Commit 218e5edb authored by Mathieu Giraud's avatar Mathieu Giraud Committed by Vidjil Team
Browse files

modules/defs.py: new file, grouping all DIR_* definitions

parent 6ef54bad
......@@ -9,7 +9,7 @@
## - call exposes all registered services (none by default)
#########################################################################
DIR_RESULTS = '/mnt/result/results/'
import defs
import gluon.contrib.simplejson, time, datetime
if request.env.http_origin:
......@@ -89,7 +89,7 @@ def get_data():
& ( db.fused_file.config_id == request.vars["config_id"] )
).select()
for row in query :
fused_file = DIR_RESULTS+row.fused_file
fused_file = defs.DIR_RESULTS+'/'+row.fused_file
if error == "" :
......@@ -162,7 +162,7 @@ def get_analysis():
if not analysis_query.isempty() :
row = analysis_query.select().first()
f = open(DIR_RESULTS+row.analysis_file, "r")
f = open(defs.DIR_RESULTS+'/'+row.analysis_file, "r")
analysis = gluon.contrib.simplejson.loads(f.read())
f.close()
if 'cluster' in analysis:
......
# -*- coding: utf-8 -*-
DIR_UPLOADS = '/mnt/upload/uploads/'
DIR_RESULTS = '/mnt/result/results/'
import defs
#########################################################################
## This scaffolding model makes your app work on Google App Engine too
......@@ -115,7 +113,7 @@ db.define_table('sequence_file',
Field('producer','text'),
Field('size_file','integer'),
Field('data_file', 'upload',
uploadfolder=DIR_UPLOADS,
uploadfolder=defs.DIR_SEQUENCES,
length=1000000000000, autodelete=True))
......@@ -125,7 +123,7 @@ db.define_table('standard_file',
Field('name', 'string'),
Field('info','text'),
Field('data_file', 'upload',
uploadfolder=DIR_UPLOADS,
uploadfolder=defs.DIR_SEQUENCES,
autodelete=True, length=1000000000000))
......@@ -145,7 +143,7 @@ db.define_table('results_file',
Field('run_date','datetime'),
Field('scheduler_task_id', 'integer'),
Field('data_file', 'upload',
uploadfolder=DIR_RESULTS,
uploadfolder=defs.DIR_RESULTS,
length=1000000000000, autodelete=True))
db.define_table('fused_file',
......@@ -154,7 +152,7 @@ db.define_table('fused_file',
Field('fuse_date','datetime'),
Field('status', 'string'),
Field('fused_file', 'upload',
uploadfolder=DIR_RESULTS,
uploadfolder=defs.DIR_RESULTS,
length=1000000000000, autodelete=True))
db.define_table('analysis_file',
......@@ -163,7 +161,7 @@ db.define_table('analysis_file',
Field('analyze_date','datetime'),
Field('status', 'string'),
Field('analysis_file', 'upload',
uploadfolder=DIR_RESULTS,
uploadfolder=defs.DIR_RESULTS,
length=1000000000000, autodelete=True))
......
# coding: utf8
import os
import sys
import defs
DIR_VIDJIL = '/home/vidjil/'
DIR_UPLOAD = '/mnt/upload/uploads/'
DIR_RESULTS = '/mnt/result/results/'
DIR_OUT_VIDJIL_ID = '/mnt/result/vidjil/out-%06d/'
TASK_TIMEOUT = 10 * 60
......@@ -72,8 +70,8 @@ def run_vidjil(id_file, id_config, id_data, id_fuse):
## les chemins d'acces a vidjil / aux fichiers de sequences
germline_folder = DIR_VIDJIL + '/germline/'
upload_folder = DIR_UPLOAD
out_folder = DIR_OUT_VIDJIL_ID % id_data
upload_folder = defs.DIR_SEQUENCES
out_folder = defs.DIR_OUT_VIDJIL_ID % id_data
cmd = "rm -rf "+out_folder
p = Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True)
......@@ -143,7 +141,7 @@ def run_vidjil(id_file, id_config, id_data, id_fuse):
).select( orderby=db.sequence_file.id|db.results_file.run_date, groupby=db.sequence_file.id )
for row in query :
if row.results_file.data_file is not None :
files += DIR_RESULTS + row.results_file.data_file+" "
files += defs.DIR_RESULTS + row.results_file.data_file+" "
cmd = "python ../fuse.py -o "+output_file+" -t 100 "+files
......@@ -188,8 +186,8 @@ def run_fuse_only(id_file, id_config, id_data, id_fuse):
## les chemins d'acces a vidjil / aux fichiers de sequences
germline_folder = DIR_VIDJIL + '/germline/'
upload_folder = DIR_UPLOAD
out_folder = DIR_OUT_VIDJIL_ID % id_data
upload_folder = defs.DIR_SEQUENCES
out_folder = defs.DIR_OUT_VIDJIL_ID % id_data
output_filename = "%06d" % id_data
#clean folder
......@@ -223,7 +221,7 @@ def run_fuse_only(id_file, id_config, id_data, id_fuse):
).select( orderby=db.sequence_file.sampling_date )
for row in query :
if row.sequence_file.data_file is not None :
files += DIR_RESULTS + row.sequence_file.data_file + " "
files += defs.DIR_RESULTS + row.sequence_file.data_file + " "
cmd = "python ../fuse.py -o "+output_file+" -t 100 "+files
print cmd
......
### Upload directory for .fasta/.fastq.
### Old sequences files could be thrown away.
### No regular backup.
DIR_SEQUENCES = '/mnt/upload/uploads/'
### Upload directory for .vidjil/.fused/.analysis
### Regularly backuped
DIR_RESULTS = '/mnt/result/results/'
### Temporary directory to store vidjil results
### Formatted with 'data_id' in models/task.py
DIR_OUT_VIDJIL_ID = '/mnt/result/tmp/out-%06d/'
DIR_SEQUENCES = '/mnt/upload/uploads'
import defs
for seq in db(db.sequence_file).select():
print "===", "seq-%d" % seq.id, "\t", seq.sampling_date, "\t", seq.filename, seq.data_file
......@@ -19,7 +18,7 @@ for seq in db(db.sequence_file).select():
old = files[-2]
new = files[-1]
com = 'python /home/vidjil/server/diff.py %s/%s %s/%s' % (DIR_SEQUENCES, old, DIR_SEQUENCES, new)
com = 'python /home/vidjil/server/diff.py %s/%s %s/%s' % (defs.DIR_RESULTS, old, defs.DIR_RESULTS, new)
# print com
os.system(com)
import defs
import datetime
LAST_HOURS = 24
DIR_OUT_VIDJIL_ID = '/mnt/result/vidjil/out-%06d/'
def tmp_dir(scheduler_args):
data_id = int(scheduler_args.split(',')[2])
return DIR_OUT_VIDJIL_ID % data_id
return defs.DIR_OUT_VIDJIL_ID % data_id
def ellipsis(what, max_size=30):
ell = ' ...'
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment