Commit b39e2205 authored by Mathieu Giraud's avatar Mathieu Giraud

Merge branch 'feature-c/enhance_normalisation' into 'dev'

Feature c/enhance normalisation

Closes #2680, #2668, #2670, #2669, #2565, #2568, and #2681

See merge request !115
parents 965291c7 deb4253e
Pipeline #11636 passed with stages
in 20 seconds
......@@ -191,13 +191,6 @@
<form name="normalize_list" id="normalize_list">
</form>
</div>
<div class="menu_box">
normalization method</br>
<label for="menuNormMethConstant" class="buttonSelector" onclick="m.changeNormMethod('constant')" ><input id="menuNormMethConstant" type="radio" name="normalize_method" value="constant" checked />constant</label>
<label for="menuNormMethPercent" class="buttonSelector" onclick="m.changeNormMethod('to-100')" ><input id="menuNormMethPercent" type="radio" name="normalize_method" value="to-100" />to 100%</label>
</div>
<div class="menu_box">
size display</br>
<label for="menuNotationScientific" class="buttonSelector" onclick="m.changeNotation('scientific', true)"><input id="menuNotationScientific" type="radio" name="notation" value="scientific" />scientific notation</label>
......
......@@ -144,56 +144,80 @@ Builder.prototype = {
normalize_list.removeAllChildren();
var input = document.createElement("input");
var label = document.createElement("label")
input.type = "radio";
input.value= -1;
input.name = "normalize_list";
input.id = "reset_norm";
input.checked=true;
label.for = "reset_norm"
var div = document.createElement("div");
div.appendChild(input);
div.appendChild(document.createTextNode("none"))
div.onclick = function () {
self.m.compute_normalization(-1) ;
this.firstChild.checked=true;
self.m.update();
self.m.compute_normalization(-1) ;
this.firstChild.checked=true;
self.m.update();
};
div.className="buttonSelector";
div.appendChild(input);
div.appendChild(document.createTextNode("none"));
normalize_list.appendChild(div);
// Regroup Clones and Data into a single array with only critical data
var divElements = [];
for (var i = 0; i < self.m.clones.length; ++i) {
if (typeof self.m.clone(i).expected != "undefined") {
divElements.push({
id: i,
name: self.m.clone(i).getName(),
expected: self.m.clone(i).expected
});
}
}
for (var key in self.m.data) {
if (typeof self.m.data[key].expected != "undefined") {
divElements.push({
id: key,
name: self.m.data[key],
expected: self.m.data[key].expected
});
tmp_norm_list =[]
if(m.normalization_list.length>=1){
for (var norm in m.normalization_list) {
var check=false
for (var div_id in tmp_norm_list){
console.log( normalize_list[div_id])
if (tmp_norm_list[div_id] == m.normalization_list[norm].id){
check=true
}
}
if (check==false) {
var id=m.normalization_list[norm].id
var expected_size = m.normalization_list[norm].expected_size
var input_elem = document.createElement("input");
var label_elem = document.createElement("label")
label_elem.setAttribute("for","reset_norm"+m.normalization_list[norm].id);
input_elem.type = "radio";
input_elem.name = "normalize_list";
input_elem.id = "reset_norm"+m.normalization_list[norm].id;
console.log(m.normalization_list[norm].id)
var form_div_elem = document.createElement("div");
form_div_elem.className="buttonSelector";
form_div_elem.id = "normalizetest"+id
form_div_elem.dataset.id =id
form_div_elem.dataset.expected_size=expected_size
// if (m.normalization.id==id){
// input.checked=true;
// }
text= m.clone(m.normalization_list[norm].id).getShortName()+" "+ m.clone(m.normalization_list[norm].id).getStrSize()
form_div_elem.appendChild(input_elem);
form_div_elem.appendChild(label_elem);
form_div_elem.appendChild(document.createTextNode(text))
form_div_elem.addEventListener('click', self.applyOldnormalization, false);
normalize_list.appendChild(form_div_elem);
tmp_norm_list.push(m.normalization_list[norm].id)
}
}
// Create clickable div for each Clone and Data Entry
for (var j = 0; j < divElements.length; ++j) {
var elem = self.setupNormalizeDiv(divElements[i], "buttonSelector");
console.log(elem);
normalize_list.appendChild(elem);
}
},
applyOldnormalization:function() {
self.m.norm_input.value = ""
this.firstChild.checked=true;
self.m.clone(this.dataset.id).expected= this.dataset.expected_size;
self.m.compute_normalization(this.dataset.id, this.dataset.expected_size)
self.m.update()
},
/* Fonction servant à "déverouiller" l'appel de la fonction compute_normalization(), ainsi qu'à apposer le 'check' au checkBox 'normalize'
* */
displayNormalizeButton: function() {
......
......@@ -474,7 +474,7 @@ Clone.prototype = {
if (this.m.reads.segmented[time] === 0 ) return 0
var result = this.getReads(time) / this.m.reads.segmented[time]
if (this.m.norm && this.m.normalization.method!="constant") result = this.m.normalize(result, time)
if (this.m.norm ) result = this.m.normalize(result, time)
return result
},
......@@ -1277,7 +1277,13 @@ Clone.prototype = {
span_star.onclick = function (e) {
self.m.openTagSelector(self.index, e);
}
span_star.id = self.index
if ((self.m.norm)&&(self.index==self.m.normalization.id)){
span_star.appendChild(icon('icon-lock-1', 'clone tag'))
}else{
span_star.appendChild(icon('icon-star-2', 'clone tag'))
}
span_star.setAttribute('id', 'color' + this.index);
if (typeof this.tag != 'undefined')
span_star.style.color = this.m.tag[this.getTag()].color
......
......@@ -971,7 +971,7 @@ Graph.prototype = {
var max = this.m.precision*this.m.max_size
//get ready for something really dirty
if (this.m.norm && this.m.normalization.method == "constant"){
if (this.m.norm){
for (var i=0; i<this.m.samples.order.length; i++) {
for (var j=0; j<this.m.clones.length; j++){
var size = this.m.precision*this.m.clone(j).getSize()
......
......@@ -173,11 +173,11 @@ Model.prototype = {
this.norm = false;
this.normalization = {
"method" : "constant",
"A" : [],
"B" : 0,
"size_list" : [],
"expected_size" : 0,
"id" : -1
};
this.normalization_list=[]
/*Variables pour DBSCAN*/
this.eps = 0;
this.nbr = 0;
......@@ -613,6 +613,7 @@ changeAlleleNotation: function(alleleNotation) {
/**
* normalize a size to match the normalization done on a given time/sample
* normalization is done when update is
* @param {float} original_size - size before normalization
* @param {integer} time - time/sample index of the timepoint where happen the normalization
* @return {float} normalized_size - size after normalization
......@@ -620,16 +621,10 @@ changeAlleleNotation: function(alleleNotation) {
normalize: function (original_size, time) {
var normalized_size = 0;
if (this.normalization.A.length !== 0 && this.normalization.A[time] !== 0) {
var A = this.normalization.A[time] /* standard/spike at point time */
var B = this.normalization.B /* standard/spike expected value */
if (this.normalization.method=="constant" || original_size <= A){
normalized_size = (original_size * B) / A
}else{
normalized_size = B + ( (original_size - A) * ( (1 - B) / (1 - A) ) )
}
if (this.normalization.size_list.length !== 0 && this.normalization.size_list[time] !== 0) {
var A = this.normalization.size_list[time] /* standard/spike at point time */
var B = this.normalization.expected_size /* standard/spike expected value */
normalized_size = (original_size * B) / A
}else{
normalized_size = original_size
}
......@@ -651,8 +646,8 @@ changeAlleleNotation: function(alleleNotation) {
this.norm = true
expected_size = typeof expected_size !== 'undefined' ? expected_size : this.clone(cloneID).expected;
this.normalization.A = []
this.normalization.B = expected_size
this.normalization.size_list = []
this.normalization.expected_size = expected_size
this.normalization.id = cloneID
this.normalization.type = "clone"
......@@ -660,15 +655,19 @@ changeAlleleNotation: function(alleleNotation) {
this.norm = false
for (var i=0; i<this.samples.number; i++){
this.normalization.A[i] = this.clone(cloneID).getSize(i)
this.normalization.size_list[i] = this.clone(cloneID).getSize(i)
}
this.norm = tmp
norm_hash = jQuery.extend(true, {}, this.normalization)
this.normalization_list.push(norm_hash)
}
},
/**
* compute normalization factor needed to give a data an expected size
* first function called when normalization button is clicked
* @param {integer} data - index of the data used as pivot for normalization
* @param {float} expected_size - the size the should have the clone after normalization
* */
......@@ -676,16 +675,16 @@ changeAlleleNotation: function(alleleNotation) {
expected_size = typeof expected_size !== 'undefined' ? expected_size : this.data[data].expected;
this.norm = true
this.normalization.A = []
this.normalization.B = expected_size
this.normalization.size_list = []
this.normalization.expected_size = expected_size
this.normalization.id = data
this.normalization.type = "data"
this.data[data].expected = expected_size
for (var i=0; i<this.samples.number; i++){
this.normalization.A[i] = this.data[data][i]
this.normalization.size_list[i] = this.data[data][i]
}
this.changeNormMethod("constant")
norm_hash = jQuery.extend(true, {}, this.normalization)
this.normalization_list.push(norm_hash)
},
/**
......@@ -693,8 +692,8 @@ changeAlleleNotation: function(alleleNotation) {
* clones sizes can change depending the parameters so it's neccesary to recompute normalization from time to time
* */
update_normalization: function () {
if ((this.normalization.B !== 0 && this.normalization.type=="clone" )) {
this.compute_normalization( this.normalization.id, this.normalization.B);
if ((this.normalization.expected_size !== 0 && this.normalization.type=="clone" )) {
this.compute_normalization( this.normalization.id, this.normalization.expected_size);
}
},
......@@ -704,7 +703,7 @@ changeAlleleNotation: function(alleleNotation) {
* */
update_precision: function () {
var min_size = 1
var max
for (var i=0; i<this.samples.order.length; i++){
var t = this.samples.order[i]
var size = this.min_sizes[t]
......@@ -714,14 +713,19 @@ changeAlleleNotation: function(alleleNotation) {
this.max_size = 1
this.min_size = min_size
if (this.norm && this.normalization.method=="constant"){
if (this.norm){
for (var j=0; j<this.samples.order.length; j++){
var max = this.normalization.B/this.normalization.A[j]
if(this.normalization.size_list[j]==0){
max = this.normalization.expected_size
}else{
max = this.normalization.expected_size/this.normalization.size_list[j]
}
if (max>this.max_size) this.max_size=max;
}
}
//*2 pour avoir une marge minimum d'un demi-log
// 1/0 == infinity
this.precision=(1/this.min_size)*2
this.scale_color = d3.scale.log()
......@@ -977,6 +981,15 @@ changeAlleleNotation: function(alleleNotation) {
for (var n = 0; n < this.clones.length; n++) {
this.clone(n).updateColor()
}
// update icon if normalisation is setup
if(this.norm){
$("i.icon-lock-1").toggleClass('icon-lock-1 icon-star-2')
$("#"+this.normalization.id+" i.icon-star-2").toggleClass('icon-star-2 icon-lock-1')
$("#f"+this.normalization.id+" i.icon-star-2").toggleClass('icon-star-2 icon-lock-1')
}else{
$("i.icon-lock-1").toggleClass('icon-lock-1 icon-star-2')
// $("#f"+this.normalization.id+" i.icon-star-2").toggleClass('icon-lock-1 icon-star-2')
}
},
......@@ -1887,7 +1900,7 @@ changeAlleleNotation: function(alleleNotation) {
console.log({"type": "popup", "msg": "expected input between 0.0001 and 1"});
}
}
this.norm_input.onkeydown = function () {
this.norm_input.onkeydown = function (event) {
if (event.keyCode == 13) self.norm_button.click();
}
......@@ -1916,20 +1929,8 @@ changeAlleleNotation: function(alleleNotation) {
if (top>maxTop) top=maxTop;
this.tagSelector.style.top=top+"px";
},
/**
* change the strategy for normalization
* @param {string} method - can be 'constant' or 'to-100'
* */
changeNormMethod : function (method){
this.normalization.method=method;
if (this.normalization.type=="data" && method !="constant"){
this.normalization.method="constant";
}
this.update()
},
/**
* load a new germline and update
* @param {string} system - system string to load
......
......@@ -1274,8 +1274,8 @@ ScatterPlot.prototype = {
this.nodes[seqID].s = 0
this.nodes[seqID].r1 = 0
}
size = this.m.clone(cloneID)
.getSize2();
size=Math.min(1.5,this.m.clone(cloneID).getSize2())
if (this.m.clusters[cloneID].length === 0) size = this.m.clone(cloneID)
.getSequenceSize();
......
......@@ -329,3 +329,25 @@ QUnit.test("model: primer detection", function(assert) {
QUnit.test("normalization: test", function(assert) {
var m = new Model();
m.parseJsonData(json_data, 100)
var c1 = new Clone(json_clone1, m, 0)
var c2 = new Clone(json_clone2, m, 1)
var c3 = new Clone(json_clone3, m, 2)
var c4 = new Clone(json_clone4, m, 3)
var c5 = new Clone(json_clone5, m, 4)
m.initClones()
assert.equal(c2.getSize(),0.05,"clone3 size")
m.compute_normalization(0,0.20)
assert.equal(m.normalization.expected_size,0.20, "expected value")
assert.equal(c1.getSize().toFixed(2),m.normalization.expected_size,"clone1 normalized size")
assert.equal(c1.getSize(1).toFixed(2),m.normalization.expected_size,"clone1 normalized size")
assert.equal(c1.getSize(2),0,"clone1 normalized size")
assert.equal(m.normalize(c2.getSize(),0),0.8000000000000002,"normalize")
m.compute_normalization(-1,0)
assert.equal(c2.getSize(), 0.05, "clone3 size ")
// m.compute_data_normalization()
});
\ No newline at end of file
......@@ -63,7 +63,7 @@
"hide": []
},
"normalization": {
"method": "constant",
"A": [],
"B": 0,
"id": -1
......
......@@ -68,7 +68,6 @@ analysis_data = {
"hide": []
},
"normalization": {
"method": "constant",
"A": [],
"B": 0,
"id": -1
......
......@@ -673,3 +673,143 @@ make functional
docker-compose up --build
#+END_SRC
$ docker-compose up --build
* Migrating Data
** Database
The easiest way to perform a database migration is to first extract the
data with the following command:
$ mysqldump -u <user> -p <db> -c --no-create-info > <file>
An important element to note here is the --no-create-info we add this
parameter because web2py needs to be allowed to create tables itself
because it keeps track of database migrations and errors will occur if
tables exist which it considers it needs to create.
In order to import the data into an installation you first need to ensure
the tables have been created by Web2py this can be achieved by simply
accessing a non-static page.
/!\ If the database has been initialised from the interface you will
likely encounter primary key collisions or duplicated data, so it is best
to skip the initialisation altogether.
Once the tables have been created, the data can be imported as follows:
$ mysql -u <user> -p <db> < <file>
Please note that with this method you should have at least one admin user
that is accessible in the imported data. Since the initialisation is being
skipped, you will not have the usual admin account present.
It is also possible to create a user directly from the database although
this is not the recommended course of action.
** Files
Files can simply be copied over to the new installation, their filenames
are stored in the database and should therefore be accessible as long as
they are in the correct directories.
** Filtering data
When extracting data for a given user, the whole database should not be
copied over.
There are two courses of action:
- create a copy of the existing database and remove the users that are
irrelevant. The cascading delete should remove any unwanted data
barring a few exceptions (notably fused_file, groups and sample_set_membership)
- export the relevant data directly from the database. This method
requires multiple queries which will not be detailed here.
Once the database has been correctly extracted, a list of files can be
obtained from sequence_file, fused_file, results_file and analysis_file
with the following query:
#+BEGIN_SRC sql
SELECT <filename field>
FROM <table name>
INTO OUTFILE 'filepath'
FIELDS TERMINATED BY ','
ENCLOSED BY ''
LINES TERMINATED BY '\n'
#+END_SRC
Note: We are managing filenames here which should not contain any
character such as quotes or commas so we can afford to refrain from
enclosing the data with quotes.
This query will output a csv file containing a filename on each line.
Copying the files is now just a matter of running the following script:
** Exporting sample sets
The migrator script allows the export and import of data, whether it be a
single patient/run/set or a list of them, or even all the sample sets
associated to a group.
#+BEGIN_EXAMPLE
usage: migrator.py [-h] [-f FILENAME] [--debug] {export,import} ...
Export and import data
positional arguments:
{export,import} Select operation mode
export Export data from the DB into a JSON file
import Import data from JSON into the DB
optional arguments:
-h, --help show this help message and exit
-f FILENAME Select the file to be read or written to
--debug Output debug information
#+END_EXAMPLE
Export:
#+BEGIN_EXAMPLE
usage: migrator.py export [-h] {sample_set,group} ...
positional arguments:
{sample_set,group} Select data selection method
sample_set Export data by sample-set ids
group Extract data by groupid
optional arguments:
-h, --help show this help message and exit
#+END_EXAMPLE
#+BEGIN_EXAMPLE
usage: migrator.py export sample_set [-h] {patient,run,generic} ID [ID
...]
positional arguments:
{patient,run,generic}
Type of sample
ID Ids of sample sets to be extracted
optional arguments:
-h, --help show this help message and exit
#+END_EXAMPLE
#+BEGIN_EXAMPLE
usage: migrator.py export group [-h] groupid
positional arguments:
groupid The long ID of the group
optional arguments:
-h, --help show this help message and exit
#+END_EXAMPLE
Import:
#+BEGIN_EXAMPLE
usage: migrator.py import [-h] [--dry-run] [--config CONFIG] groupid
positional arguments:
groupid The long ID of the group
optional arguments:
-h, --help show this help message and exit
--dry-run With a dry run, the data will not be saved to the database
--config CONFIG Select the config mapping file
#+END_EXAMPLE
#+BEGIN_SRC sh
sh copy_files <file source> <file destination> <input file>
#+END_SRC
......@@ -165,6 +165,14 @@ def add_form():
except ValueError:
error += " Invalid %s %s" % (key, set_ids[key])
if request.vars['generic_id'] != '' :
try:
generic_id = extract_id(request.vars['generic_id'], error)
if not auth.can_modify('generic', generic_id) :
error += " missing permissions for sample_set %d" % generic_id
except ValueError:
error += " invalid sample_set %s" % request.vars['sample_set_id']
pre_process = None
pre_process_flag = "DONE"
if request.vars['pre_process'] is not None and request.vars['pre_process'] != "0":
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment