Commit 9f9a2289 authored by Ryan Herbert's avatar Ryan Herbert

add some debut logs for timing

parent dfbaffcf
Pipeline #185667 failed with stages
in 9 minutes and 18 seconds
......@@ -121,7 +121,11 @@ def index():
search, tags = parse_search(request.vars["filter"])
result = {}
perm_start = time.time()
perm_query = get_permissions(group_list)
log.debug("permission query: (%0.3fs)" % (time.time()-perm_start))
init_data_start = time.time()
for r in perm_query:
if(r.auth_group.role not in result):
result[r.auth_group.role] = {}
......@@ -134,6 +138,7 @@ def index():
result[r.auth_group.role]['analyses'] = []
result[r.auth_group.role]['tags'] = []
result[r.auth_group.role]['permissions'] = "" if r._extra[group_permissions()] is None else r._extra[group_permissions()]
log.debug("initialisation: (%0.3fs)" % (time.time()-init_data_start))
query = access_query(group_list)
if (tags is not None and len(tags) > 0):
......@@ -171,7 +176,9 @@ def index():
]
queries = {}
queries_start = time.time()
for set_type in ['patient', 'run', 'generic']:
tmp_start = time.time()
key = 'set' if set_type == 'generic' else set_type
set_query = (query &
(db[set_type].sample_set_id == db.auth_permission.record_id))
......@@ -184,9 +191,13 @@ def index():
groupby=(db.auth_group.role, db.sample_set.sample_type),
orderby=~db.scheduler_task.start_time
)
log.debug("\t%s query: (%0.3fs)" % (set_type, time.time()-tmp_start))
log.debug("total queries: (%0.3fs)" % (time.time()-queries_start))
list_size = 50
aggregation_start = time.time()
for key in queries: # patient, run, set
tmp_start = time.time()
query = queries[key]
for r in query: # 1 or 0 rows
result[r.auth_group.role][key]['count']['num_sets'] += r.num_sets
......@@ -201,11 +212,15 @@ def index():
analyses = [] if r._extra[group_analyses[key]] is None else [analysis.split(';') for analysis in r._extra[group_analyses[key]].split(',')]
result[r.auth_group.role]['analyses'] += analyses[:list_size]
log.debug("\t%s aggregation: (%0.3fs)" % (key, time.time()-tmp_start))
log.debug("total data aggregation: (%0.3fs)" % (time.time()-aggregation_start))
tags_start = time.time()
tags = get_most_used_tags(group_list) # list tags used without filtering
for r in tags:
if(r.tag.name is not None):
result[r.auth_group.role]['tags'].append("%s (%d)" % (r.tag.name, r.count))
log.debug("most used tags: (%0.3fs)" % (time.time()-tags_start))
involved_group_ids = get_involved_groups() # for search autocomplete
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment