def print_specs(specs): print Ut.headings("SPECIFICATIONS DATA", line=False) # PRINT SPECS for key, data in specs.items(): if key == "target" or key == "source": new_line = "\n" else: new_line = "" if type(data) == str or type(data) == int or type(data) == unicode: value = to_unicode(data) #.encode(encoding='utf-8') elif type(data) == float or type(data) == int: value = to_unicode(data) else: value = type(data) print "{}\t{:22}{}".format(new_line, key, "{}".format(": {}".format(to_bytes(value)))) if type(data) == dict: for detail, val in data.items(): print "\t\t{:18}: {}".format(detail, val) print ""
def expand_approx(specs, theta, stop_words_string, stop_symbols_string, linkset2expand, reorder=True): data = None inserted_1 = 0 inserted_2 = 0 total = 0 count= 0 abort = False for is_source in [True, False]: count += 1 print Ut.headings("********* PASS {} *********").format(count) # if is_source is False: # specs[St.corr_reducer] = data[St.result] # print data[St.result] data = prefixed_inverted_index( specs, theta=theta, reorder=reorder, stop_words_string=stop_words_string, stop_symbols_string=stop_symbols_string, expands=True, is_source=is_source, linkset2expand=linkset2expand, check_file=False) if count == 1: inserted_1 += data['inserted'] total += inserted_1 else: inserted_2 += data['inserted'] total += inserted_2 if data[St.message].__contains__('ALREADY EXISTS'): abort = True print "\n>>> THE PROCESS IS BEING ABORTED AS THE FIRST " \ "PASS REVEALS THE EXISTENCE OF AN EXPANSION OF THE GRAPH." break if abort is False: # REMOVE DUPLICATES print "REMOVING REPETITION" if data is not None and data[St.result] is not None: print "\t", Qry.remove_repetition_same_direction(data[St.result]) # PRINT THE FINAL TRIPLE COUNT final_inserted = Qry.get_triples_count(data[St.result]) if final_inserted is None: final_inserted = 0 else: final_inserted = int(final_inserted) print "\nOVERALL STATS:\n\tCORRESPONDENCES DISCOVERED AT PASS 1 : {}\n\tCORRESPONDENCES DISCOVERED AT PASS 2 : {}".format( inserted_1, inserted_2) print "\tOVERALL CORRESPONDENCES DISCOVERED : {}".format(total) print "\tTOTAL REPEATED CORRESPONDENCES REMOVED : {}".format(total - final_inserted) print "\tTOTAL CORRESPONDENCES INSERTED : {}".format(final_inserted) # print data return data
def export_flat_alignment(alignment): print Ut.headings("EXPORTING THE ALIGNMENT WITH NO METADATA") print "Export for: {}".format(alignment) alignment = str(alignment).strip() row_alignment = alignment alignment = alignment if Ut.is_nt_format( alignment) is True else "<{}>".format(alignment) # CONSTRUCT QUERY query = """ PREFIX ll: <{}> CONSTRUCT {{ ?x ll:mySameAs ?z }} WHERE {{ GRAPH {} {{ ?x ?y ?z }} }} order by ?x """.format(Ns.alivocab, alignment) # print query # FIRE THE CONSTRUCT AGAINST THE TRIPLE STORE alignment_construct = Qry.endpointconstruct(query) # REMOVE EMPTY LINES # COMMA IS COUNTED WHENEVER THERE ARE MORE OBJECTS FOR THE SUBJECT triples = len(regex.findall('ll:mySameAs', alignment_construct)) + len( regex.findall(',', alignment_construct)) alignment_construct = "\n".join( [line for line in alignment_construct.splitlines() if line.strip()]) alignment_construct = alignment_construct.replace( "{", "{}\n{{".format(alignment)) # RESULTS result = "### TRIPLE COUNT: {0}\n### LINKSET: {1}\n".format( triples, alignment) + alignment_construct message = "You have just downloaded the graph [{}] which contains [{}] correspondences. ".format( row_alignment, triples) return {'result': result, 'message': message, "triples": triples}
def specification_2_linkset_subset(specs, activated=False): if activated is True: print Ut.headings("EXECUTING LINKSET SUBSET SPECS...") else: print Ut.headings( "THE FUNCTION [specification_2_linkset_subset] IS NOT ACTIVATED") return {St.message: Ec.ERROR_CODE_0, St.error_code: 0, St.result: None} # ACCESS THE TASK SPECIFIC PREDICATE COUNT specs[St.sameAsCount] = Qry.get_same_as_count(specs[St.mechanism]) # UPDATE THE QUERY THAT IS GOING TO BE EXECUTED if specs[St.sameAsCount]: source = specs[St.source] target = specs[St.target] # UPDATE THE SPECS OF SOURCE AND TARGETS update_specification(source) update_specification(target) # GENERATE THE NAME OF THE LINKSET Ls.set_subset_name(specs) # SETTING SOME GENERIC METADATA INFO specs[St.link_name] = "same" specs[St.linkset_name] = specs[St.linkset_name] specs[St.link] = "http://risis.eu/linkset/predicate/{}".format( specs[St.link_name]) specs[ St. link_subpropertyof] = "http://risis.eu/linkset/predicate/{}".format( specs[St.link_name]) specs[St.linkset] = "{}{}".format(Ns.linkset, specs[St.linkset_name]) specs[St.assertion_method] = "{}{}".format(Ns.method, specs[St.linkset_name]) specs[St.justification] = "{}{}".format(Ns.justification, specs[St.linkset_name]) # COMMENT ON THE LINK PREDICATE specs[St.link_comment] = "The predicate <{}> is used in replacement of the linktype <{}> used in the " \ "original <{}> dataset.".format( specs[St.link], specs[St.source][St.link_old], specs[St.source][St.graph]) # COMMENT ON THE JUSTIFICATION FOR THIS LINKSET specs[St.justification_comment] = "In OrgRef's a set of entities are linked to GRID. The linking method " \ "used by OrgRef is unknown. Here we assume that it is a curated work " \ "and extracted it as a linkset.", # COMMENT ON THE LINKSET ITSELF specs[St.linkset_comment] = "The current linkset is a subset of the <{0}> dataset that links <{0}> to " \ "<{1}>. The methodology used by <{0}> to generate this builtin linkset in " \ "unknown.".format(specs[St.source][St.graph], specs[St.target][St.graph]) source[St.entity_ns] = str(source[St.entity_datatype]).replace( source[St.entity_name], '') target[St.entity_ns] = str(target[St.entity_datatype]).replace( target[St.entity_name], '') # GENERATE THE LINKSET inserted_linkset = spa_linkset_subset(specs, activated) # print "LINKSET SUBSET RESULT:", inserted_linkset if inserted_linkset[St.message].__contains__("ALREADY EXISTS"): return inserted_linkset if specs[St.triples] > "0": # REGISTER THE ALIGNMENT if inserted_linkset[St.message].__contains__("ALREADY EXISTS"): Urq.register_alignment_mapping(specs, created=False) else: Urq.register_alignment_mapping(specs, created=True) return inserted_linkset else: print Ec.ERROR_CODE_1 return {St.message: Ec.ERROR_CODE_1, St.error_code: 5, St.result: None}
def intersecting(specs, activated=False): if activated is False: print("THE FUNCTION [intersecting] IS NOT ACTIVATED") return { St.message: "THE FUNCTION [intersecting] IS NOT ACTIVATED.", St.error_code: 1, St.result: None } print Ut.headings("EXECUTING INTERSECTION SPECS...") # 1. GENERATE THE LENS NAME lens_name = generate_lens_name(specs['datasets'], operator="intersection") specs[St.lens] = "{}{}".format(Ns.lens, lens_name['name']) Ut.update_specification(specs) # ********************************** # 3. GOOD TO GO CHECK # ********************************** query = """ SELECT * {{ <{}> ?predicate ?object . }} """.format(specs[St.lens]) check = Lens_Union.run_checks(specs, query, operator="intersection") # NOT GOOD TO GO, IT ALREADY EXISTS if check[St.message].__contains__("ALREADY EXISTS"): return { St.message: check[St.message], St.error_code: 71, St.result: specs[St.lens] } # ********************************** # GOOD TO GO # ********************************** else: try: specs[St.lens_target_triples] = "" # DOCUMENTING START TIME lens_start = time.time() print "\n4. GENERATE THE INSERT QUERY" specs[St.insert_query] = intersection_extended( specs, lens_name=specs[St.lens_name]) print specs[St.insert_query] print "\n5. >>> LOOKING FOR INTERSECTING LINKS" print "\t", Qry.boolean_endpoint_response(specs[St.insert_query]) print "\n6. EXTRACTING THE NUMBER OF TRIPLES" specs[St.triples] = Qry.get_namedgraph_size("{0}{1}".format( Ns.lens, specs[St.lens_name])) lens_end = time.time() diff = lens_end - lens_start print " \n>>> Executed so far in : {:<14}".format( str(datetime.timedelta(seconds=diff))) if int(specs[St.triples]) > 0: for linkset in specs[St.datasets]: specs[St.lens_target_triples] += \ "\n\t void:target <{}> ;".format(linkset) print "\n7. INSERTING THE GENERIC METADATA" metadata = Gn.intersection_meta(specs) # print metadata Qry.boolean_endpoint_response(metadata) # print "\n8. WRITING TO FILE" server_message = "Linksets created as: {}".format( specs[St.lens]) message = "The linkset was created as [{}] with {} triples found!".format( specs[St.lens], specs[St.triples]) print "\n\t", server_message Urq.register_lens(specs, is_created=True) ls_end_2 = time.time() diff = ls_end_2 - lens_end print ">>> Executed in : {:<14}".format( str(datetime.timedelta(seconds=diff))) print "\t*** JOB DONE! ***" return { St.message: message, St.error_code: 0, St.result: specs[St.lens] } else: print "The linkset was not generated as no match could be found" print "\t*** JOB DONE! ***" return { St.message: "The linkset was not generated as no match could be found", St.error_code: 4, St.result: None } except Exception as err: traceback.print_exc() return { St.message: Ec.ERROR_CODE_1, St.error_code: 5, St.result: None } # specs = { # 'lens_operation': u'intersection', # 'datasets': [u'http://risis.eu/linkset/grid_20170712_eter_2014_approxStrSim_Organization_altLabel_P1079405301', # u'http://risis.eu/linkset/grid_20170712_eter_2014_approxStrSim_Organization_altLabel_P1661430032', # u'http://risis.eu/linkset/grid_20170712_eter_2014_approxStrSim_Organization_label_N1860664105'], # 'researchQ_URI': u'http://risis.eu/activity/idea_67a6ce'} # specs_2 = {'lens_operation': u'intersection', # 'datasets': [u'http://risis.eu/lens/union_Grid_20170712_Eter_2014_N291690309', # u'http://risis.eu/lens/union_Orgreg_20170718_Eter_2014_P1061032980', # u'http://risis.eu/lens/union_Orgreg_20170718_Grid_20170712_N1966224323'], # 'researchQ_URI': u'http://risis.eu/activity/idea_67a6ce'} # # specs_3 = {'lens_operation': u'intersection', # 'datasets': [ # u'http://risis.eu/linkset/orgreg_20170718_grid_20170712_approxStrSim_University_Entity_current_name_English_N682223883', # u'http://risis.eu/linkset/orgreg_20170718_grid_20170712_approxStrSim_University_Entity_current_name_English_P2117262605', # u'http://risis.eu/lens/union_Grid_20170712_Eter_2014_N291690309', # u'http://risis.eu/lens/union_Orgreg_20170718_Eter_2014_P1061032980', # u'http://risis.eu/lens/union_Orgreg_20170718_Grid_20170712_N1966224323'], # 'researchQ_URI': u'http://risis.eu/activity/idea_67a6ce'} # # # # print intersection_extended(specs_3, "lens_name", display=False) # import Alignments.Manage.AdminGraphs as adm # adm.drop_a_lens("http://risis.eu/lens/intersection_Grid_20170712_Eter_2014_P1326988364", display=True, activated=True) # print intersecting(specs, activated=True)
def refine_lens(specs, activated=False, check_file=False): try: message = Ec.ERROR_CODE_0.replace('\n', "<br/>") if activated is False: print Ut.headings("THE FUNCTION [refine_lens] IS NOT ACTIVATED") return {St.message: message, St.error_code: 4, St.result: None} # 1. UPDATING THE SPECS BY CHANGING LINKSET TO TENS specs[St.refined] = specs['linkset'] specs.pop('linkset') Ut.update_specification(specs) # CHECKING WHETHER THE LENS IS REFINENABLE # Refine.is_refinable(specs[St.refined]) # PRINTING THE SPECIFICATIONS # lensUt.print_specs(specs) # ASSIGN THE SAME AS COUNT specs[St.sameAsCount] = Qry.get_same_as_count(specs[St.mechanism]) message = Ec.ERROR_CODE_4.replace('\n', "<br/>") if specs[St.sameAsCount]: source = specs[St.source] target = specs[St.target] # 2. SET THE LENS NAME # ******************************* print "\n2. SET THE LENS NAME" # ******************************* lensUt.lens_refine_name(specs, 'refine') #******************************* # GOOD TO GO CHECK # ******************************* query = """ SELECT * {{ <{}> ?predicate ?object . }} """.format(specs[St.lens]) check = Lens_Union.run_checks(specs, query, operator="refine") # NOT GOOD TO GO, IT ALREADY EXISTS if check[St.message].__contains__("ALREADY EXISTS"): return { St.message: check[St.message], St.error_code: 71, St.result: specs[St.lens] } # ******************************* # GOOD TO GO # ******************************* else: lens_start = time.time() # UPDATE THE SPECIFICATION Ut.update_specification(specs[St.source]) Ut.update_specification(specs[St.target]) # PRINTING THE SPECIFICATIONS lensUt.print_specs(specs) ######################################################################## print """\n4. EXECUTING THE GEO-MATCH """ ######################################################################## geo_match(specs) ######################################################################## print """\n5. EXTRACT THE NUMBER OF TRIPLES """ ######################################################################## specs[St.triples] = Qry.get_namedgraph_size("{0}{1}".format( Ns.lens, specs[St.lens_name])) ######################################################################## print """\n6. ASSIGN THE SPARQL INSERT QUERY """ ######################################################################## specs[St.insert_query] = "{} ;\n{};\n{}".format( geo_load_query(specs, True), geo_load_query(specs, False), geo_match_query(specs)) lens_end = time.time() diff = lens_end - lens_start print "\n\t>>> Executed so far in : {:<14}".format( str(datetime.timedelta(seconds=diff))) if int(specs[St.triples]) > 0: ######################################################################## print """\n4. INSERTING THE GENERIC METADATA """ ######################################################################## metadata = Gn.lens_refine_geo_metadata(specs) Qry.boolean_endpoint_response(metadata) ######################################################################## print """\n5. WRITING TO FILE """ ######################################################################## src = [source[St.graph_name], "", source[St.entity_ns]] trg = [target[St.graph_name], "", target[St.entity_ns]] # linkset_path = "D:\datasets\Linksets\ExactName" linkset_path = DIRECTORY writelinkset(src, trg, specs[St.lens_name], linkset_path, metadata, check_file=check_file) server_message = "Linksets created as: {}".format( specs[St.lens]) message = "The linkset was created as [{}] with {} triples found!".format( specs[St.lens], specs[St.triples]) print "\n\t", server_message Urq.register_lens(specs, is_created=True) ls_end_2 = time.time() diff = ls_end_2 - lens_end print ">>> Executed in : {:<14}".format( str(datetime.timedelta(seconds=diff))) print "\t*** JOB DONE! ***" return { St.message: message, St.error_code: 0, St.result: specs[St.lens] } else: print "\tThe linkset was not generated as no match could be found" print "\t*** JOB DONE! ***" return { St.message: message, St.error_code: 4, St.result: None } except Exception as err: traceback.print_exc() return {St.message: Ec.ERROR_CODE_1, St.error_code: 5, St.result: None} # print geo_load_query(specs, is_source=True) # print geo_load_query(specs, is_source=False) # geo_match_query(specs) # traceback.print_exception() # import Alignments.Manage.AdminGraphs as adm # adm.drop_a_lens("http://risis.eu/lens/refine_union_Grid_20170712_Eter_2014_N291690309", display=True, activated=True) # refine_lens(specs_example, activated=True, check_file=False) # # adm.drop_a_lens("http://risis.eu/lens/refine_union_Orgreg_20170718_Eter_2014_P1061032980", display=True, activated=True) # refine_lens(specs_example_2, activated=True, check_file=False) # # adm.drop_a_lens("http://risis.eu/lens/refine_union_Orgreg_20170718_Grid_20170712_N1966224323", display=True, activated=True) # refine_lens(specs_example_3, activated=True, check_file=False)
def export_alignment(alignment, limit=5000): # COMMENT THE LINKSET IF IT IS EQUAL TO NONE # This function returns all the links + some metadata about the alignment. # METADATA: source dataset, target dataset and mechanism print Ut.headings("EXPORTING THE ALIGNMENT FOR VISUALISATION") use = alignment alignment = str(alignment).strip() row_alignment = alignment alignment = alignment if Ut.is_nt_format( alignment) is True else "<{}>".format(alignment) src_dataset = None trg_dataset = None lens_targets = [] mec_dataset = None rdf_type = None # GET THE METADATA OF THE ALIGNMENT: THE QUERY meta = """ PREFIX ll: <{0}> CONSTRUCT {{ {1} ?y ?z. ?z ?p ?o . }} WHERE {{ {1} ?y ?z . #?z ?p ?o . }} order by ?y """.format(Ns.alivocab, alignment) # print meta # GET THE METADATA OF THE ALIGNMENT: RUN THE QUERY print "GETTING THE METADATA OF THE ALIGNMENT: RUN THE QUERY" meta_construct = Qry.endpointconstruct(meta, clean=False) meta_construct = meta_construct.replace("{", "").replace("}", "") meta_construct = meta_construct.replace('<<', '"""<').replace( '>>', '>"""').replace('\>', '>') print meta_construct # LOAD THE METADATA USING RDFLIB sg = rdflib.Graph() sg.parse(data=meta_construct, format="turtle") # EXTRACT FROM THE RESPONSE: THE SOURCE AND TARGET DATASETS AND THE ALIGNMENT sbj = rdflib.URIRef(use) source = rdflib.URIRef("http://rdfs.org/ns/void#subjectsTarget") target = rdflib.URIRef("http://rdfs.org/ns/void#objectsTarget") lens_uri_targets = rdflib.URIRef("http://rdfs.org/ns/void#target") rdf_uri_type = rdflib.URIRef( "http://www.w3.org/1999/02/22-rdf-syntax-ns#type") mechanism = rdflib.URIRef( "http://risis.eu/alignment/predicate/alignsMechanism") # EXTRACT THE ALIGNMENT TYPE for item in sg.objects(sbj, rdf_uri_type): rdf_type = item print "TYPE: ", rdf_type if str(rdf_type) == Ns.lens_type: # EXTRACT THE SOURCE DATASET for item in sg.objects(sbj, lens_uri_targets): lens_targets += [str(item)] print "{} TARGETS in {}".format(len(lens_targets), alignment) for trg_item in lens_targets: print "\t- {}".format(trg_item) else: # EXTRACT THE SOURCE DATASET for item in sg.objects(sbj, source): src_dataset = item # EXTRACT THE TARGET DATASET for item in sg.objects(sbj, target): trg_dataset = item # EXTRACT THE MECHANISM USED FOR THIS ALIGNMENT for item in sg.objects(sbj, mechanism): mec_dataset = item # CONSTRUCT QUERY FOR EXTRACTING HE CORRESPONDENCES comment = "" if limit else "#" query = """ PREFIX ll: <{}> CONSTRUCT {{ ?x ?y ?z }} WHERE {{ GRAPH {} {{ ?x ?y ?z }} }} order by ?x {}LIMIT {} """.format(Ns.alivocab, alignment, comment, limit) # print query # FIRE THE CONSTRUCT FOR CORRESPONDENCES AGAINST THE TRIPLE STORE alignment_construct = Qry.endpointconstruct(query, clean=False) triples = 0 links = None # RESULTS if alignment_construct is not None: links = "### TRIPLE COUNT: {}\n### LINKSET: {}\n".format( triples, alignment) + alignment_construct links = links.replace("{", "").replace("}", "") message = "You have just downloaded the graph [{}] which contains [{}] correspondences. ".format( row_alignment, triples) # result = result # print result print "Done with graph: {}".format(alignment) return { "type": rdf_type, 'result': links, 'message': message, 'source': src_dataset, "target": trg_dataset, "lens_targets": lens_targets, 'mechanism': mec_dataset }
def export_alignment_all(alignment, directory=None, limit=5000): print Ut.headings("EXPORTING THE ALIGNMENT WITH ALL METADATA") directory = os.path.join(directory, "") print directory if os.path.isdir(os.path.dirname(directory)) is False or os.path.exists( directory) is False: print "CREATING THE DIRECTORY" os.mkdir(os.path.dirname(directory)) # COMMENT THE LINKSET OIT IF IT IS EQUAL TO NONE # This function returns all the links + some metadata about the alignment. # METADATA: source dataset, target dataset and mechanism use = alignment alignment = str(alignment).strip() row_alignment = alignment alignment = alignment if Ut.is_nt_format( alignment) is True else "<{}>".format(alignment) # **************************************************** # 1. GET THE METADATA OF THE ALIGNMENT: THE QUERY # **************************************************** meta = """ PREFIX ll: <{0}> CONSTRUCT {{ {1} ?y ?z. ?z ?p ?o . }} WHERE {{ {1} ?y ?z . OPTIONAL{{ ?z ?p ?o . }} OPTIONAL{{ ?O ?Q ?R . }} }} order by ?y """.format(Ns.alivocab, alignment) # print meta # GET THE METADATA OF THE ALIGNMENT: RUN THE QUERY meta_construct = Qry.endpointconstruct(meta, clean=False) meta_construct = meta_construct.replace("{", "").replace("}", "") with open(os.path.join(directory, "metadata.ttl"), "wb") as metadata: metadata.write(meta_construct) # print meta_construct # **************************************************** # 2. GET THE CORRESPONDENCES OF THE LINKSET # **************************************************** # CONSTRUCT QUERY FOR EXTRACTING HE CORRESPONDENCES comment = "" if limit else "#" query = """ PREFIX ll: <{}> CONSTRUCT {{ ?x ?y ?z }} WHERE {{ GRAPH {} {{ ?x ?y ?z }} }} order by ?x {}LIMIT {} """.format(Ns.alivocab, alignment, comment, limit) # print query # FIRE THE CONSTRUCT FOR CORRESPONDENCES AGAINST THE TRIPLE STORE alignment_construct = Qry.endpointconstruct(query, clean=False) if alignment_construct: alignment_construct = alignment_construct.replace( "{", "{}\n{{".format(alignment)) # print alignment_construct with open(os.path.join(directory, "linkset.trig"), "wb") as links: links.write(alignment_construct) # **************************************************** # 3. GET THE METADATA CORRESPONDENCES' PREDICATES # **************************************************** singleton_graph_uri = Ut.from_alignment2singleton(alignment) singleton_query = """ PREFIX ll: <{0}> PREFIX singletons: <{1}> CONSTRUCT {{ ?predicate ?x ?y }} WHERE {{ {{ SELECT ?predicate {{ GRAPH {2} {{ ?subject ?predicate ?object }} }} order by ?x {3}LIMIT {4} }} GRAPH {5} {{ ?predicate ?x ?y }} }} """.format(Ns.alivocab, Ns.singletons, alignment, comment, limit, singleton_graph_uri) # print singleton_query # FIRE THE CONSTRUCT FOR SINGLETON AGAINST THE TRIPLE STORE singleton_construct = Qry.endpointconstruct(singleton_query, clean=False) if singleton_construct: singleton_construct = singleton_construct.replace( "{", "{}\n{{".format(singleton_graph_uri)) # print singleton_construct with open(os.path.join(directory, "singletons.trig"), "wb") as singletons: singletons.write(singleton_construct) # LOAD THE METADATA USING RDFLIB # THIS IS A QUICK FIX FOR THE PROPERTY PATH ISSUE meta_construct = meta_construct.replace('<<', '"""<').replace( '>>', '>"""').replace('\>', '>') sg = rdflib.Graph() sg.parse(data=meta_construct, format="turtle") # EXTRACT FROM THE RESPONSE: THE SOURCE AND TARGET DATASETS AND THE ALIGNMENT sbj = rdflib.URIRef(use) triples_uri = rdflib.URIRef("http://rdfs.org/ns/void#triples") # EXTRACT THE ALIGNMENT TYPE triples = "" for item in sg.objects(sbj, triples_uri): triples = item print "TRIPLES: ", triples if alignment_construct is not None: links = "### TRIPLE COUNT: {}\n### LINKSET: {}\n".format( triples, alignment) + alignment_construct links = links.replace("{", "").replace("}", "") message = "You have just downloaded the graph [{}] which contains [{}] correspondences. ".format( row_alignment, triples) host = Svr.settings[St.stardog_host_name] endpoint = b"http://{}/annex/{}/sparql/query?".format( host, Svr.settings[St.database]) local_name = Ut.get_uri_local_name_plus(alignment) file_at_parent_directory = os.path.join( os.path.abspath(os.path.join(directory, os.pardir)), "{}.zip".format(local_name)) zipped_file = Ut.zip_folder(directory, output_file_path=file_at_parent_directory) print "\t>>> THE ZIPPED FILE IS LOCATED AT:\n\t\t- {}".format(zipped_file) # result = result # print result print "Done with graph: {}".format(alignment) # return {'result': { # "generic_metadata": meta_construct, # 'specific_metadata': singleton_construct, # 'data': alignment_construct}, 'message': message} return {'result': zipped_file, 'message': message}
# print lock_file except Exception as err: print traceback.print_exc() if lock_file is not None: # SETTING THE PORT port = int(os.environ['LL_PORT'] ) if 'LL_PORT' in os.environ else Svr.settings[St.ll_port] # DO THIS ONLY IF THE RERVER IS READY, MEANING AFTER THE SECOND FLASK LOAD if "WERKZEUG_RUN_MAIN" in os.environ: # ******************************************************************************** print Ut.headings("RUNNING THE LENTICULAR LENS SERVER...") # ******************************************************************************** try: response = requests.get("http://{}".format( Svr.settings[St.stardog_host_name])) # response = Qry.endpoint("SELECT * {?sub ?obj ?pred} LIMIT 1") except Exception as err: response = str(err) # print response if str(response).__contains__("401"): print "THE STARDOG SERVER IS ON AND REQUIRES PASSWORD." elif len(lock_file) > 0 and (