Commit 3a64a574 authored by Ryan Herbert's avatar Ryan Herbert
Browse files

refactor Tokeniser delegate to autocomplete

tokens are now created from text rather than by reading the value of an
input. This allowed us to delegate the token creation to the
autocomplete beforeInsert callback for Samples.
This not only makes it impossible to create "trash" tokens. Tokens can
only be created as a result of an autocompletion, but it also means that
we are not binding events to the input and any selection method of an
autocomplete suggestion will create the token
parent 7e2a3f61
......@@ -173,8 +173,8 @@ VidjilAutoComplete.prototype = {
};
callbacks.beforeInsert = function(value, li) {
$input.data('set-id', value);
return value.substr(3);
new Tokeniser().tokenise(value); // Tokeniser is a Singleton
return "";
};
$input.atwho({
......
......@@ -20,42 +20,30 @@
* along with "Vidjil". If not, see <http://www.gnu.org/licenses/>
*/
function Tokeniser() {
function Tokeniser(target, form_input) {
if (typeof Tokeniser.instance === 'object') {
return Tokeniser.instance;
}
Closeable.call(this);
this.target = target;
this.form_input = form_input
Tokeniser.instance = this;
return this;
}
Tokeniser.prototype = Object.create(Closeable.prototype);
Tokeniser.prototype.setup = function(input, target, form_input) {
var self = this;
if ($(input).data('needs-tokeniser')) {
$(input).on("keydown", function(e) {
if (e.which === 13) {
e.preventDefault();
self.tokenise(this, target, form_input);
}
});
$(input).data('needs-tokeniser', false);
}
}
Tokeniser.prototype.tokenise = function(input, target, form_input) {
var token = this.createToken($(input));
$(input).val("");
$(input).removeData('set-id');
target.appendChild(token);
form_input.value = this.readTokens(target);
Tokeniser.prototype.tokenise = function(text) {
var token = this.createToken(text);
this.target.appendChild(token);
this.form_input.value = this.readTokens();
}
Tokeniser.prototype.readTokens = function(target) {
var nodes = $(target).children('.set_token');
Tokeniser.prototype.readTokens = function() {
var nodes = $(this.target).children('.set_token');
return nodes.map(function callback() {
return $(this).data('set-id');
})
......@@ -63,7 +51,7 @@ Tokeniser.prototype.readTokens = function(target) {
.join();
}
Tokeniser.prototype.createToken = function($input) {
Tokeniser.prototype.createToken = function(set_id) {
var class_mapping = {
":p": "patient_token",
":r": "run_token",
......@@ -71,8 +59,7 @@ Tokeniser.prototype.createToken = function($input) {
};
var token = document.createElement('span');
var text = $input.val().trim();
var set_id = $input.data('set-id');
var text = set_id.substr(3).trim();
var className = "set_token " + class_mapping[set_id.substr(0, 2)];
token.className = className;
......
......@@ -71,7 +71,7 @@
class="token_input"
autocomplete="off"
onfocus="new VidjilAutoComplete().setupSamples(this);
new Tokeniser().setup(this, document.getElementById('set_div_{{=i}}'), document.getElementById('file_set_list_{{=i}}'));"
new Tokeniser(document.getElementById('set_div_{{=i}}'), document.getElementById('file_set_list_{{=i}}'));"
data-needs-atwho="true"
data-needs-tokeniser="true"
data-group-ids="{{=upload_group_ids}}"
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment