diff --git a/app.py b/app.py
index 3a168ec..2752688 100644
--- a/app.py
+++ b/app.py
@@ -249,6 +249,7 @@ def GET(self, res_name):
with open(TEMPLATE_LIST,'r') as tpl_file:
tpl_list = json.load(tpl_file)
+ print(res_name)
res_type = [i['type'] for i in tpl_list if i["short_name"] == res_name][0]
res_full_name = [i['name'] for i in tpl_list if i["short_name"] == res_name][0]
@@ -513,6 +514,7 @@ def POST(self, page):
# create a new template
elif actions.action.startswith('createTemplate'):
print('create template')
+ print(actions)
is_git_auth = github_sync.is_git_auth()
res_type = actions.class_uri.strip() if "class_uri" in actions else conf.main_entity
res_name = actions.class_name.replace(' ','_').lower() if "class_name" in actions else "not provided"
@@ -524,9 +526,9 @@ def POST(self, page):
types = [t['type'] for t in templates]
now_time = str(time.time()).replace('.','-')
# check for duplicates
- res_n = actions.class_name if (res_type not in types and res_name not in names) else actions.class_name+'_'+now_time
+ res_n, adress = (actions.class_name, res_name) if (res_type not in types and res_name not in names) else (actions.class_name+'_'+now_time, res_name+'_'+now_time)
u.updateTemplateList(res_n,res_type)
- raise web.seeother(prefixLocal+'template-'+res_name)
+ raise web.seeother(prefixLocal+'template-'+adress)
# login or create a new record
else:
@@ -827,7 +829,7 @@ def POST(self, name):
# save the new record for future publication
if actions.action.startswith('save'):
- if not f.validates() or not u.check_mandatory_fields(web.input()):
+ if not f.validates():
graphToRebuild = conf.base+name+'/'
recordID = name
data = queries.getData(graphToRebuild,templateID)
@@ -866,7 +868,7 @@ def POST(self, name):
# publish the record
elif actions.action.startswith('publish'):
- if not f.validates() or not u.check_mandatory_fields(web.input()):
+ if not f.validates():
graphToRebuild = conf.base+name+'/'
recordID = name
data = queries.getData(graphToRebuild,templateID)
diff --git a/forms.py b/forms.py
index e86db5b..f84f393 100644
--- a/forms.py
+++ b/forms.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-import web , datetime , os, time, re, cgi , json
+import web , datetime , os, time, re, cgi , json, html
from web import form
import conf
@@ -60,7 +60,7 @@ def get_form(json_form, from_dict=False, subtemplate=False):
description = field['label'] if 'label' in field and len(field['label']) > 0 else 'input'
pre_a = ' '
- prepend = pre_a+field['prepend']+pre_b if 'prepend' in field and len(field['prepend']) > 0 else ''
+ prepend = pre_a+html.escape(field['prepend'])+pre_b if 'prepend' in field and len(field['prepend']) > 0 else ''
disabled = 'disabled' if 'disabled' in field and field['disabled'] == "True" else ''
classes = field['class'] if 'class' in field and len(field['class']) > 0 else ''
if 'vocab' in field:
@@ -87,7 +87,7 @@ def get_form(json_form, from_dict=False, subtemplate=False):
# Text box
if field['type'] in ['Textbox','Vocab', 'WebsitePreview']:
if "disambiguate" in field and field["disambiguate"] == 'True':
- vpass = form.regexp(r".{1,200}$", 'must be between 1 and 200 characters')
+ vpass = form.regexp(r".{0,200}$", 'must be between 1 and 200 characters') # TODO: check the regex (either set it to {0, 200} or remove it in case of Subtemplates' primary keys)
params = params + (form.Textbox(myid, vpass,
description = description,
id=myid,
diff --git a/mapping.py b/mapping.py
index 4396c07..6be163b 100644
--- a/mapping.py
+++ b/mapping.py
@@ -58,7 +58,7 @@ def getValuesFromFields(fieldPrefix, recordData, fields=None, field_type=None):
def getRightURIbase(value):
- return WD+value if value.startswith('Q') else GEO+value if value.isdecimal() else VIAF+value[4:] if value.startswith("viaf") else ''+value if value.startswith("http") else base+value
+ return WD+value if value.startswith('Q') else GEO+value if value.isdecimal() else VIAF+value[4:] if value.startswith("viaf") else ''+value if value.startswith("http") else base+value.lstrip().rstrip()
def inputToRDF(recordData, userID, stage, knowledge_extraction, graphToClear=None,tpl_form=None, subrecords_dict=None):
@@ -115,6 +115,7 @@ def inputToRDF(recordData, userID, stage, knowledge_extraction, graphToClear=Non
print(subject, predicate, obj, label)
if label:
wd.add((obj, RDFS.label, Literal(label, datatype="http://www.w3.org/2001/XMLSchema#string")))
+
queries.clearGraph(graphToClear)
wd.add(( URIRef(base+graph_name+'/'), PROV.generatedAtTime, Literal(datetime.datetime.now(),datatype=XSD.dateTime) ))
wd.add(( URIRef(base+graph_name+'/'), URIRef('http://dbpedia.org/ontology/currentStatus'), Literal(stage, datatype="http://www.w3.org/2001/XMLSchema#string") ))
@@ -198,11 +199,25 @@ def inputToRDF(recordData, userID, stage, knowledge_extraction, graphToClear=Non
server.update('load into graph <'+base+extraction_graph_name+'/>')
# SUBTEMPLATE
elif field['type']=="Subtemplate":
+ # check potential duplications:
+ #doubled_values = check_double_subrecords(recordData) if not doubled_values else doubled_values
+
+ # handle imported entities from catalogue (not newly created ones)
+ imported_entities = [field_id for field_id in recordData if field_id.startswith(field['id']+"-") and "," in recordData[field_id]]
+ for imported_entity in imported_entities:
+ imported_entity_id, imported_entity_label = recordData[imported_entity].split(',')
+ imported_entity_label = urllib.parse.unquote(imported_entity_label)
+ entityURI = getRightURIbase(imported_entity_id)
+ print(entityURI)
+ wd.add(( URIRef(base+graph_name), URIRef(field['property']), URIRef(entityURI) ))
+ wd.add(( URIRef( entityURI ), RDFS.label, Literal(imported_entity_label.lstrip().rstrip(), datatype="http://www.w3.org/2001/XMLSchema#string") ))
subrecords = process_subrecords(recordData, field['id']) if not subrecords_dict else subrecords_dict
- print("SUBRECORDS!!!!!!!!!!!!!!!\n", subrecords)
+ print("#### surbrecords:", subrecords)
if field['id'] in subrecords:
for subrecord_idx, subrecord in subrecords[field['id']].items():
- ID = str(int(time.time() * 1000))
+ ct = datetime.datetime.now()
+ ts = ct.timestamp()
+ ID = str(ts).replace('.', '-')
subrecord['recordID'] = ID
label = find_label(field['import_subtemplate'], subrecord, field['label'])
inputToRDF(storify(subrecord),userID,stage,knowledge_extraction,tpl_form=field['import_subtemplate'],subrecords_dict=subrecord)
@@ -228,45 +243,63 @@ def inputToRDF(recordData, userID, stage, knowledge_extraction, graphToClear=Non
return 'records/'+recordID+'.ttl'
+def check_double_subrecords(data):
+ results_dict = {
+ 'targets': {},
+ 'pointers' : {},
+ }
+ for key, value in data.items():
+ if value.startswith("target-"):
+ split_key = key.split("__")
+ new_key = split_key[0] + "__" + split_key[-1]
+ split_value = value.replace("target-", "").split("__")
+ new_value = split_value[0] + "__" + split_value[-1]
+ results_dict['targets'][new_value] = new_key
+ results_dict['pointers'][new_key] = new_value
+ return results_dict
+
+
+
+
# convert the dict of inputs into a series of nested dictionaries to be parsed as single records
-def process_subrecords(data, id):
- results = {}
- created_subrecords = [key for key in data if key.startswith(id+"__")]
- if created_subrecords != []:
- for subrecord in created_subrecords:
- add_results = {}
- subrecord_split = subrecord.split('__')
- prefix, num = subrecord_split[0], subrecord_split[-1]
- subrecord_fields = data[subrecord].split(',')
- inner_subrecords = [key for item in subrecord_fields for key in data.keys() if key.startswith(item + "__")]
- for key in subrecord_fields:
- if data[key] != "":
- add_results[key.split('__')[0]] = data[key]
- else:
- inner_subrecords = [inner_subrecord for inner_subrecord in data.keys() if inner_subrecord.startswith(key + "__")]
- if inner_subrecords != []:
- for inner_subrecord in inner_subrecords:
- inner_subrecord_split = inner_subrecord.split('__')
- inner_prefix, inner_num = inner_subrecord_split[0], inner_subrecord_split[-1]
- if inner_prefix in add_results:
- add_results[inner_prefix][inner_num] = process_subrecords(data, inner_subrecord)
- else:
- add_results[inner_prefix] = {
- inner_num: process_subrecords(data, inner_subrecord)
- }
- else:
- imported_values = [import_key for import_key in data.keys() if import_key.startswith(key + "-")]
- for imported_value in imported_values:
- new_key = imported_value.split('__')[0] + "-" + imported_value.split('-')[-1]
- add_results[new_key] = data[imported_value]
- if prefix in results:
- results[prefix][num] = add_results
- else:
- results[prefix] = { num: add_results }
- elif data[id] != "":
- for el in data[id].split(','):
- results[el.split('__')[0]] = data[el]
- return results
+def process_subrecords(data, id, created_subrecords=None):
+ results = {}
+ subrecords = [key for key in data if key.startswith(id+"__") and not data[key].startswith("target-")] if created_subrecords == None else created_subrecords
+
+ for subrecord in subrecords:
+ subrecord_split = subrecord.split('__')
+ prefix, num = subrecord_split[0], subrecord_split[-1]
+ if prefix not in results:
+ results[prefix] = { num: {} }
+ else:
+ results[prefix][num] = {}
+ add_results = {}
+ subrecord_fields = data[subrecord].split(',')
+ for key in subrecord_fields:
+ if data[key].startswith("target-"):
+ add_results[key.replace("target-", "").split('__')[0]] = {key.split('__')[-1] : process_subrecords(data, data[key].replace("target-", "")) }
+ elif data[key] != "":
+ add_results[key.split('__')[0]] = data[key]
+ else:
+ multiple_values_fields = [import_key for import_key in data.keys() if import_key.startswith(key + "-")]
+ for imported_value in multiple_values_fields:
+ new_key = imported_value.split('__')[0] + "-" + imported_value.split('-')[-1]
+ add_results[new_key] = data[imported_value]
+ inner_subrecords = [item for item in data.keys() if item.startswith(key + "__") and not data[item].startswith("target-") ]
+ if inner_subrecords:
+ add_results[key.split('__')[0]] = process_subrecords(data, key, inner_subrecords)[key.split('__')[0]]
+
+ results[prefix][num] = add_results
+
+ if not subrecords and data[id] != "":
+ for el in data[id].split(','):
+ imported_resources = [field_id for field_id in data if field_id.startswith(el+"-")]
+ for imported_res in imported_resources:
+ results[imported_res.split('__')[0]+"-"+imported_res.split("-")[-1]] = data[imported_res]
+ results[el.split('__')[0]] = data[el]
+
+ return results
+
def find_label(tpl, subrecord, alternative_label):
print(tpl)
diff --git a/static/js/main.js b/static/js/main.js
index 38b473a..13d16fe 100644
--- a/static/js/main.js
+++ b/static/js/main.js
@@ -138,7 +138,6 @@ $(document).ready(function() {
$(this).closest('form').parent().after(searchresult);
}
-
if ( $(this).hasClass('searchWikidata') ) {
searchWD(searchID);
};
@@ -174,6 +173,10 @@ $(document).ready(function() {
if ( $(this).hasClass('websitePreview')) {
addURL(searchID, iframe=true);
}
+
+ if ( $(this).attr('subtemplate') != undefined) {
+ searchCatalogueByClass(searchID);
+ }
});
// create preview buttons for multimedia urls (click on MMtag, i.e., MultiMediaTag)
@@ -587,6 +590,112 @@ function searchGeonames(searchterm) {
});
};
+// a function to look for catalogue's records belonging to a desired class
+function searchCatalogueByClass(searchterm) {
+ var resource_class = $('#'+searchterm).attr('subtemplate');
+ var yet_to_save_keys = [];
+ var yet_to_save_resources = [];
+ $('.disambiguate[class*="' + resource_class + '"]').each(function() {
+ yet_to_save_keys.push($(this).val());
+ var key_id = $(this).attr('id');
+ var subrecord = $('input[type="hidden"][value*="'+key_id+'"]');
+ yet_to_save_resources.push(subrecord.attr('id'));
+ });
+
+
+ $('#'+searchterm).keyup(function(e) {
+ var useful_yet_to_save_keys = yet_to_save_keys.filter(function(value) {
+ return value.toLowerCase().includes($('#'+searchterm).val().toLowerCase()) && value.trim() !== '';
+ });
+
+ // autocomplete positioning;
+ var position = $('#'+searchterm).position();
+ var leftpos = $('.subform_section').length !== 0 ? position.left-35 : position.left+15;
+ var offset = $('#'+searchterm).offset();
+ var height = $('#'+searchterm).height();
+ var top = $('.subform_section').length !== 0 ? offset.top - $('.subform_section').offset().top + height + "px" : offset.top + height + "px";
+ var max_width = $('.subform_section').length !== 0 ? '90%' : '600px';
+ $('#searchresult').css( {
+ 'position': 'absolute',
+ 'margin-left': leftpos+'px',
+ 'top': top,
+ 'z-index':1000,
+ 'background-color': 'white',
+ 'border':'solid 1px grey',
+ 'max-width':max_width,
+ 'border-radius': '4px'
+ });
+ $("#searchresult").show();
+
+ // prepare the query
+ var query_term = $('#'+searchterm).val();
+ var query = "prefix bds: select distinct ?s ?o where { ?o bds:search '"+query_term+"*'. ?o bds:minRelevance '0.3'^^xsd:double . ?s rdfs:label ?o ; a <"+resource_class+"> .}"
+ var encoded = encodeURIComponent(query);
+
+ // send the query request
+ $.ajax({
+ type: 'GET',
+ url: myPublicEndpoint + '?query=' + encoded,
+ headers: { Accept: 'application/sparql-results+json' },
+ success: function (returnedJson) {
+ $("#searchresult").empty();
+ var url = myPublicEndpoint + '?query=' + encoded
+ // show results
+ if (!returnedJson.results.bindings.length) {
+ $("#searchresult").append("No results in catalogue
");
+ // remove messages after 3 seconds
+ setTimeout(function(){ if ($('.noresults').length > 0) { $('.noresults').remove(); } }, 3000);
+ } else {
+ for (let i = 0; i < returnedJson.results.bindings.length; i++) {
+ var myUrl = returnedJson.results.bindings[i].s.value;
+ var resID = myUrl.substr(myUrl.lastIndexOf('/') + 1);
+ $("#searchresult").append("");
+ }
+ }
+
+ // add tag if the user chooses an item from the catalogue
+ $('a[data-id^="' + base + '"]').each(function () {
+ $(this).bind('click', function (e) {
+ e.preventDefault();
+ var oldID = this.getAttribute('data-id').substr(this.getAttribute('data-id').lastIndexOf('/') + 1);
+ var oldLabel = $(this).text();
+ $('#' + searchterm).next('i').after("" + oldLabel + " ");
+ $("#searchresult").hide();
+ $('#' + searchterm).val('');
+ });
+
+ });;
+
+ // once external resources have been added, include newly created resources (yet to be saved)
+ for (let j = 0; j < useful_yet_to_save_keys.length; j++) {
+ var resource_id = yet_to_save_resources[j];
+ var resource_name = useful_yet_to_save_keys[j];
+ $('#searchresult').append("")
+ }
+
+ // add tag if the user chooses an item from yet to save resources
+ $('a[target]').each(function () {
+ $(this).bind('click', function (e) {
+ e.preventDefault();
+ console.log(this)
+ var target = $(this).attr('target');
+ var label = $(this).text();
+ var id_root = target.replace(/\d+$/, '');
+ var subrecord_idx = $('[id^="'+id_root+'"]').length + 1;
+ $('#' + searchterm).next('i').after("" + label + " ");
+ $("#searchresult").hide();
+ $('#' + searchterm).val('');
+ });
+
+ });
+ },
+ error: function (error) {
+ reject(error);
+ }
+ });
+ })
+};
+
// a function to look through the catalogue while querying Wikidata and VIAF
function searchCatalogueIntermediate(q) {
return new Promise(function(resolve, reject) {
@@ -1317,28 +1426,29 @@ function create_subrecord(resource_class, field_name, el) {
// save or cancel subrecord
const subrecord_buttons = $("");
- const save_subrecord_btn = $(" ");
+ const save_subrecord_btn = $(" ");
const cancel_subrecord_btn = $(" ");
// SAVE
save_subrecord_btn.on('click', function(e) {
// generate a tag
- console.log("RQWEQ")
- var tag_label = subrecord_form.find('.disambiguate').val() || (field_name + "-" + $(".tag-subrecord[class~='"+resource_class+"']").length + 1);
- var subinputs = [];
- subrecord_form.find('input:not(.btn)').each(function() {
- $("#recordForm").append($(this));
- $(this).hide();
- if ($(this).attr('id') !== undefined) {subinputs.push($(this).attr('id'))};
- });
- var subrecord_index = $("[subtemplate='"+resource_class+"']").parent().parent().find('.tag-subrecord').length + 1;
- var subrecord_id = $("[subtemplate='"+resource_class+"']").attr('id') + "__" + subrecord_index;
- console.log(el);
- el.after("" + tag_label + " ");
- $('#recordForm').append(" ");
+ var is_valid = check_mandatory_fields(this);
+ if (is_valid) {
+ var tag_label = subrecord_form.find('.disambiguate').val() || (field_name + "-" + $(".tag-subrecord[class~='"+resource_class+"']").length + 1);
+ var subinputs = [];
+ subrecord_form.find('input:not(.btn)').each(function() {
+ $("#recordForm").append($(this));
+ $(this).hide();
+ if ($(this).attr('id') !== undefined) {subinputs.push($(this).attr('id'))};
+ });
+ var subrecord_index = $("[subtemplate='"+resource_class+"']").parent().parent().find('.tag-subrecord').length + 1;
+ var subrecord_id = $("[subtemplate='"+resource_class+"']").attr('id') + "__" + subrecord_index;
+ el.after("" + tag_label + " ");
+ $('#recordForm').append(" ");
- // hide_subform
- cancel_subrecord(this);
+ // hide_subform
+ cancel_subrecord(this);
+ }
});
// CANCEL
cancel_subrecord_btn.on('click', function(e) {
@@ -2557,18 +2667,20 @@ function next_extractor(element, id, type) {
// TODO: bring it to the right position within this file
-function check_mandatory_fields(){
+function check_mandatory_fields(subrecord_btn=false){
var is_valid = true;
-
- $('[mandatory="True"]:not(.original_subtemplate)').each(function() {
+
+ if (subrecord_btn) { var fields = $(subrecord_btn).parent().parent().find('[mandatory="True"]'); } else { var fields = $('[mandatory="True"]:not(.original_subtemplate)'); }
+ fields.each(function() {
if ($(this).val() === '' && !$('[data-input="'+$(this).attr('id')+'"]').length) {
console.log($(this));
/* in principle, the header could be changed through the back-end application.
However, this would cause the loss of all inserted values. */
- $('header').find('h3').eq(0).text("The form is not valid, please check mandatory fields")
+ if (subrecord_btn) { alert("Check Mandatory Fields!")}
+ else { $('header').find('h3').eq(0).text("The form is not valid, please check mandatory fields") }
window.scrollTo(0, 0);
is_valid = false;
}
})
return is_valid;
-}
\ No newline at end of file
+}
diff --git a/utils.py b/utils.py
index b6fc77d..80ad09b 100644
--- a/utils.py
+++ b/utils.py
@@ -294,10 +294,11 @@ def updateTemplateList(res_name=None,res_type=None,remove=False):
for i in range(len(data)):
if data[i]['short_name'] == res_name:
- #del data[i]
- #break
- data[i]["status"] = "deleted"
-
+ to_be_deleted = data[i]['template']
+ if os.path.exists(to_be_deleted):
+ os.remove(to_be_deleted)
+ del data[i]
+ break
with open(TEMPLATE_LIST,'w') as tpl_file:
json.dump(data, tpl_file)