Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM tiangolo/uwsgi-nginx-flask:python3.7
FROM tiangolo/uwsgi-nginx-flask:python3.9

RUN git config --global http.sslVerify false && \
mkdir -p /home/nginx/.cloudvolume/secrets && chown -R nginx /home/nginx && usermod -d /home/nginx -s /bin/bash nginx
Expand Down
2 changes: 2 additions & 0 deletions envs.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
export PPROGRESS_DATASETS='{"h01_full0_v2": "https://local.brain-wire-test.org/segmentation/1.0/h01_full0_v2/", "test0_parents_v0": "https://local.brain-wire-test.org/segmentation/1.0/test0_parents_v0/" }'
export PPROGRESS_SERVER_ADDRESSES='{"h01_full0_v2": "https://local.brain-wire-test.org", "test0_parents_v0": "https://local.brain-wire-test.org" }'
56 changes: 35 additions & 21 deletions proofreadingprogress/app/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,20 +42,21 @@ def index():
from .. import __version__
return f"ProofreadingProgress v{__version__}"


def query():
return render_template("query.html", prefix=__url_prefix__)
return render_template("query.html", prefix=__url_prefix__, serverAddresses=serverAddresses)


def user():
return render_template("user.html", prefix=__url_prefix__)
return render_template("user.html", prefix=__url_prefix__, serverAddresses=serverAddresses)


def base():
return render_template("base.html", prefix=__url_prefix__)


def publish():
return render_template("publish.html", prefix=__url_prefix__)
return render_template("publish.html", prefix=__url_prefix__, datasets=datasets)


def table():
Expand All @@ -65,6 +66,7 @@ def table():
def getResource(name):
return send_from_directory(".", name)


def home():
resp = make_response()
resp.headers["Access-Control-Allow-Origin"] = "*"
Expand Down Expand Up @@ -136,36 +138,37 @@ def unhandled_exception(e):
)
return 500


# -------------------
# ------ Applications
# -------------------
serverAddresses = {
"fly_v31": "https://prod.flywire-daf.com",
"fly_v26": "https://prod.flywire-daf.com",
"fly_training_v2": "https://minnie.microns-daf.com",
}
datasets = json.loads(os.environ.get("PPROGRESS_DATASETS"))
serverAddresses = json.loads(os.environ.get("PPROGRESS_SERVER_ADDRESSES"))

print(datasets, serverAddresses)

def dataRequest(r):
reqs = []
graph = None
args = r.args
raw = json.loads(r.data)
single = args.get("query")
isFiltered = args.get("filtered", "false") == "true"
isLineage = False#args.get("lineage", "false") == "true"
isLineage = False # args.get("lineage", "false") == "true"
dataset = args.get("dataset", "default")
#use user token, instead of local token
client = chunkedgraph.ChunkedGraphClient(server_address=serverAddresses[dataset],
table_name=dataset,
auth_client=auth.AuthClient(token=g.auth_token))
#print(f"My current token is: {client.auth.token}")
# use user token, instead of local token
client = chunkedgraph.ChunkedGraphClient(server_address=serverAddresses[dataset],
table_name=dataset,
auth_client=auth.AuthClient(token=g.auth_token))
# print(f"My current token is: {auth.token}")
str_queries = raw.get("queries", "").split(",")
queries = list(set(convertValidRootIds([single] if single else str_queries)))

queries = list(set(convertValidRootIds(
[single] if single else str_queries)))
dictsBatched = multiThread(client, queries, isFiltered)
dfdict = {k: v for d in dictsBatched for k, v in d.items()}
if (isLineage):
graphsBatched = multiThread(client, queries, graph=True)
#graphs = [g for batch in graphsBatched for g in batch]
# graphs = [g for batch in graphsBatched for g in batch]
batches = [batch for batch in graphsBatched]
graphs = [g for g in batches]
graph = nx.compose_all(graphs) if len(graphs) > 1 else graphs[0]
Expand All @@ -183,14 +186,17 @@ def dataRequest(r):
"json": reqs,
}

def multiThread(client, queries, filter = True, graph = False, b_size = 10, p_size = 10):
bqueries = [(client, queries[i : i + b_size], filter, i) for i in range(0, len(queries), b_size)]

def multiThread(client, queries, filter=True, graph=False, b_size=10, p_size=10):
bqueries = [(client, queries[i: i + b_size], filter, i)
for i in range(0, len(queries), b_size)]
p = Pool(p_size)
results = p.imap(caveGRPH if graph else caveCHLG, bqueries)
p.close()
p.join()
return results


def caveCHLG(args):
try:
return args[0].get_tabular_change_log(args[1], args[2])
Expand All @@ -203,9 +209,9 @@ def caveCHLG(args):
except:
results[f'error_{args[3]}'].append(id)
return results


#return list of graphs

# return list of graphs
def caveGRPH(args):
try:
return args[0].get_lineage_graph(root_id=args[1], as_nx_graph=True)
Expand All @@ -219,6 +225,7 @@ def caveGRPH(args):
results[f'error_{args[3]}'].append(id)
return results


def processToJson(query, dataframe, graph=None):
pubdict = None
published = []
Expand All @@ -242,6 +249,7 @@ def processToJson(query, dataframe, graph=None):
"published": pubdict,
}


def publish_neurons(args):
if (True == True):
return {}
Expand Down Expand Up @@ -279,6 +287,7 @@ def publish_neurons(args):

return existing


def removeInvalidRootIds(ids):
valid = []
for id in ids:
Expand All @@ -289,6 +298,7 @@ def removeInvalidRootIds(ids):
pass
return valid


def convertValidRootIds(ids):
valid = []
for id in ids:
Expand All @@ -298,17 +308,21 @@ def convertValidRootIds(ids):
pass
return valid


def validDOI(doi):
valid = not len(doi) or re.match("^10.\d{4,9}[-._;()/:A-Z0-9]+$", doi)
return doi if valid else ""


def validPaper(pname):
valid = not len(pname) or re.match("^[\w\-\s]+$", pname)
return pname if valid else ""


def publishRequest(args):
return pd.DataFrame.from_dict(publish_neurons(args), orient="index").to_html()


def publishDump():
with engine.connect() as conn:
return tableDump(conn).to_html()
135 changes: 69 additions & 66 deletions proofreadingprogress/app/scripts/publish.js
Original file line number Diff line number Diff line change
@@ -1,147 +1,150 @@
const base = `${window.location.origin}/${
document.getElementById('prefix').innerText || ''}/api/v1`;
const params = (new URL(document.location)).searchParams;
const auto_rootid = params.get('rootid');
document.getElementById("prefix").innerText || ""
}/api/v1`;
const params = new URL(document.location).searchParams;
const auto_rootid = params.get("rootid");
const wparams = `location=no,toolbar=no,menubar=no,width=620,left=0,top=0`;
const fly_v31 =
`https://prodv1.flywire-daf.com/segmentation/api/v1/table/fly_v31/`;
const fly_training_v2 =
`https://minnie.microns-daf.com/segmentation/api/v1/table/fly_training_v2/`;

const app = new Vue({
el: '#app',
el: "#app",
data: {
// INPUT
dataset: fly_v31,
doi: '',
pname: '',
str_rootids: auto_rootid || '',
dataset: default_dataset_url,
doi: "",
pname: "",
str_rootids: auto_rootid || "",
// OUTPUT
response: [],
headers: [],
csv: '',
csv: "",
// IMPORT
colChoices: [],
keyindex: 0,
importedCSVName: '',
importedCSVName: "",
importedCSVFile: [],
idToRowMap: {},
status: 'Submit',
loading: false
status: "Submit",
loading: false,
},
computed: {
isReady: function() {
isReady: function () {
return this.str_rootids.length && this.validateAll() && !this.loading;
},
customDataset: function() {
return [fly_v31, fly_training_v2].includes(this.dataset);
customDataset: function () {
return [dataset_name_list].includes(this.dataset);
},
validRoots: function() {
validRoots: function () {
const valid = this.rootsIDTest();
return {
'form-error': !valid, valid
}
"form-error": !valid,
valid,
};
},
validDOI: function() {
validDOI: function () {
const valid = this.DOITest();
return {
'form-error': !valid, valid
}
"form-error": !valid,
valid,
};
},
validTitle: function() {
validTitle: function () {
const valid = this.titleTest();
return {
'form-error': !valid, valid
}
}
"form-error": !valid,
valid,
};
},
},
methods: {
validateAll: function() {
validateAll: function () {
return this.rootsIDTest() && this.DOITest() && this.titleTest();
},
rootsIDTest: function() {
return /^ *\d+ *(?:, *\d+ *)*$/gm.test(this.str_rootids) ||
!this.str_rootids.length;
rootsIDTest: function () {
return (
/^ *\d+ *(?:, *\d+ *)*$/gm.test(this.str_rootids) ||
!this.str_rootids.length
);
},
DOITest: function() {
return /^10.\d{4,9}[-._;()/:A-Z0-9]+$/i.test(this.doi) ||
!this.doi.length;
DOITest: function () {
return (
/^10.\d{4,9}[-._;()/:A-Z0-9]+$/i.test(this.doi) || !this.doi.length
);
},
titleTest: function() {
titleTest: function () {
return /^[\w\-\s]+$/.test(this.pname) || !this.pname.length;
},
apiRequest: async function() {
apiRequest: async function () {
// Disable button, activate spinner
this.loading = true;
this.status = 'Loading...';
this.status = "Loading...";
this.processMQR();

const request = new URL(`${base}/pub/`);
request.searchParams.set('queries', this.str_rootids);
request.searchParams.set('verify', this.verify);
request.searchParams.set('doi', this.doi);
request.searchParams.set('pname', this.pname);
let path = new URL(this.dataset).pathname.split('/');
request.searchParams.set("queries", this.str_rootids);
request.searchParams.set("verify", this.verify);
request.searchParams.set("doi", this.doi);
request.searchParams.set("pname", this.pname);
let path = new URL(this.dataset).pathname.split("/");
path.pop();
request.searchParams.set('dataset', path.pop());
request.searchParams.set("dataset", path.pop());

try {
const response = await fetch(request);
await this.processData(await response.json());
this.status = 'Submit';
this.status = "Submit";
this.loading = false;
} catch (e) {
alert(e);
this.loading = false;
this.status = 'Submit';
this.status = "Submit";
throw e;
}
},
processData: function(response) {
processData: function (response) {
this.queryProcess(response);
},
queryProcess: function(data) {
queryProcess: function (data) {
let rows = Object.values(data);
rows.forEach(f => {
rows.forEach((f) => {
if (this.headers.length == 0) {
this.headers = Object.keys(f);
}
this.response = [...this.response, Object.values(f)];
});
},
exportCSV: function() {
const filename = 'edits.csv';
const blob = new Blob([this.csv], {type: 'text/csv;charset=utf-8;'});
const link = document.createElement('a');
exportCSV: function () {
const filename = "edits.csv";
const blob = new Blob([this.csv], {type: "text/csv;charset=utf-8;"});
const link = document.createElement("a");
const url = URL.createObjectURL(blob);
link.setAttribute('href', url);
link.setAttribute('download', filename);
link.setAttribute("href", url);
link.setAttribute("download", filename);
link.click();
},
importCSV: function(e) {
importCSV: function (e) {
Papa.parse(e.target.files[0], {
skipEmptyLines: true,
complete: (results) => {
this.importedCSVName = e.target.files[0];
this.importedCSVFile = results.data;
this.colChoices = results.data[0];
}
},
});
},
chooseCSV: function() {
document.getElementById('import').click();
chooseCSV: function () {
document.getElementById("import").click();
},
importCol: function(index) {
importCol: function (index) {
this.importedCSVFile.forEach((e, i) => {
this.keyindex = index;
// Ignore first row (header)
if (i) {
let rid = e[index];
if (rid[0] == '\'' && rid.length > 1) rid = rid.slice(1);
this.str_rootids = this.str_rootids.concat(i == 1 ? '' : ', ', rid);
if (rid[0] == "'" && rid.length > 1) rid = rid.slice(1);
this.str_rootids = this.str_rootids.concat(i == 1 ? "" : ", ", rid);
this.idToRowMap[rid] = i;
}
});
}
}
});
},
},
});
Loading