def generate_map(storage_path, storage_name, output_path): # Read patent data from storage result = p2n.storage.read(storage_path, storage_name) if not result: logger.error('Could not read storage "{}"'.format(storage_name)) sys.exit(1) # List value in countries, avoiding for bre in result['brevets']: if isinstance(bre['country'], list) and len(bre['country']) == 1: # Well, taking the first one, this is an approximation bre['country'] = bre['country'][0] # Status message logger.info("Mapping {count} patents. Excepting EP and WO.".format(count=len(result['brevets']))) # Compute map data mapdata = p2n.formatter.maps.d3plus_data_brevets(result['brevets'], 'country') # Render map jsonfile = '{storage_name}CountryMap.json'.format(**locals()) htmlfile = '{storage_name}Carto.html'.format(**locals()) with open(os.path.join(output_path, jsonfile), "w") as mapdatafile: json.dump(mapdata, mapdatafile) RenderTemplate( "ModeleCarto.html", os.path.join(output_path, htmlfile), request=result["requete"], jsonFile=jsonfile, )
def generate_map(storage_path, storage_name, output_path): # Read patent data from storage result = p2n.storage.read(storage_path, storage_name) if not result: logger.error('Could not read "{storage_name}" from storage path "{storage_path}".'.format(**locals())) sys.exit(1) # Status message logger.info("Mapping {count} patents. Excepting EP and WO.".format(count=len(result['brevets']))) # Compute map data mapdata_all = { 'Applicant-Country': p2n.formatter.maps.d3plus_data_brevets(result['brevets'], 'Applicant-Country'), 'Inventor-Country': p2n.formatter.maps.d3plus_data_brevets(result['brevets'], 'Inventor-Country'), } # Create output path if not os.path.isdir(output_path): os.makedirs(output_path) # Render maps for field, mapdata in mapdata_all.items(): field_name = field.split('-')[0] jsonfile = '{storage_name}Map{field_name}.json'.format(**locals()) htmlfile = '{storage_name}Carto{field_name}.html'.format(**locals()) with open(os.path.join(output_path, jsonfile), "w") as mapdatafile: json.dump(mapdata, mapdatafile) RenderTemplate( "ModeleCartoDeposant.html", os.path.join(output_path, htmlfile), field=field_name, request=result["requete"], jsonFile=jsonfile, )
# tempo2[ket] = brev[ket] Exclude = [] print "entering formating html process" dicoRes = dict() dicoRes['data'] = LstExp contenu = json.dumps(dicoRes, indent=3) #ensure_ascii=True, compt = 0 Dones = [] Double = dict( ) #dictionnary to manage multiple bib entries (same authors and date) with open(ResultPathContent + '//' + ndf + '.json', 'w') as resFic: resFic.write(contenu) RenderTemplate("ModeleFamille.html", ResultPathContent + '//' + ndf + '.html', fichier=ndf + '.json', fichierPivot=ndf + 'Pivot.html', requete=requete.replace('"', '')) with open("searchScript.js", 'r') as Source: js = Source.read() js = js.replace('***fichierJson***', ndf + '.json') js = js.replace('{ "data": "application-ref"},', '') with open(ResultPathContent + '//' + 'searchScript.js', 'w') as resFic: resFic.write(js) #os.system('start firefox -url '+ URLs.replace('//','/') )
fictemp.write(lig+'\n') fictemp.close() fic.close() try: #os.remove(ResultGephiPath+'\\'+ndf+'.gexf') os.remove(ResultGephiPath+'/'+outputFile) except: pass os.rename(ResultGephiPath+'/'+"Good"+outputFile, ResultGephiPath+'/'+outputFile) print "Network file writen in ", ResultGephiPath+' directory.\n See file: '+outputFile print print #making the html from model RenderTemplate( "Graphe.html", ResultGephiPath + '/'+outputFile.replace('.gexf','.html'), TitleNet=network[1:]+' Network for ' + requete, fichierConfigJS=outputFile.replace('.gexf','') +'Conf.js', mediaStyle='../../../Patent2Net/media/styles', mediaJs='../../../Patent2Net/media/js', ) # making the js from model # maybe we could adjust node size and other parameters here RenderTemplate( "gephiConfig.js", ResultGephiPath + '/'+outputFile.replace('.gexf','') +'Conf.js', FicRezo=outputFile, )
try: #os.remove(ResultGephiPath+'\\'+ndf+'.gexf') os.remove(ResultGephiPath + '/' + outputFile) except: pass os.rename(ResultGephiPath + '/' + "Good" + outputFile, ResultGephiPath + '/' + outputFile) AnnonceLog(Appli='p2n_network', texte="Network file writen in " + ResultGephiPath + ' directory.\n See file: ' + outputFile) #making the html from model RenderTemplate( "Graphe.html", ResultGephiPath + '/' + outputFile.replace('.gexf', '.html'), TitleNet=network[1:] + ' Network for ' + requete, fichierConfigJS=outputFile.replace('.gexf', '') + 'Conf.js', mediaStyle='../../../Patent2Net/media/styles', mediaJs='../../../Patent2Net/media/js', ) # making the js from model # maybe we could adjust node size and other parameters here RenderTemplate( "gephiConfig.js", ResultGephiPath + '/' + outputFile.replace('.gexf', '') + 'Conf.js', FicRezo=outputFile, ) RenderTemplate("GraphIndex.html", configFile.ResultPath + "/GraphIndex" + projectName + ".html")
for k in cptPay.keys(): tempo = dict() tempo["value"] = cptPay[k] tempo["name"] = k tempo["country"] = NomTopoJSON[k] if "data" in dico.keys(): dico["data"].append(tempo) else: dico["data"] = [tempo] with open(ResultPathContent + '//' + ndf + "CountryMap.json", "w") as fic: json.dump(dico, fic) resJsonName = ndf + "CountryMap.json" RenderTemplate( "ModeleCarto.html", ResultPathContent + '//' + ndf + 'Carto.html', jsonFile=resJsonName, request=requete.replace('"', ''), ) #due to limit of D3, countries ressources are necessary placed # in same working directory... other solution is to start an http server # http://stackoverflow.com/questions/17077931/d3-samples-in-a-microsoft-stack with open(ResultPathContent + '//' + prefix + 'Countries.json', "w") as fic: with open('countries.json', 'r') as fic2: tempo = fic2.read() fic.write(tempo)
RenderTemplate( "ModeleContenuIndex.html", outfile, GlobalPath=GlobalPath, CollectName=ndf, Request=requete, TotalPatents=totalPatents, TotalFamily=nbFam, HasFamily=GatherFamilly, Date=date, TotalsPerType=totalsPerType, TotalsPerFamilyType=totalsPerFamilyType, InventorNetwork=configFile.InventorNetwork, ApplicantNetwork=configFile.ApplicantNetwork, ApplicantInventorNetwork=configFile.ApplicantInventorNetwork, InventorCrossTechNetwork=configFile.InventorCrossTechNetwork, ApplicantCrossTechNetwork=configFile.ApplicantCrossTechNetwork, CountryCrossTechNetwork=configFile.CountryCrossTechNetwork, CrossTechNetwork=configFile.CrossTechNetwork, CompleteNetwork=configFile.CompleteNetwork, References=configFile.References, Citations=configFile.Citations, Equivalents=configFile.Equivalents, FormateExportCountryCartography=configFile.FormateExportCountryCartography, FormateExportBiblio=configFile.FormateExportBiblio, FormateExportDataTable=configFile.FormateExportDataTable, FormateExportPivotTable=configFile.FormateExportPivotTable, FreePlane=configFile.FreePlane, FusionCarrot2=configFile.FusionCarrot2, Images=configFile.GatherImages, Cluster=configFile.Cluster, )
def run(): # Bootstrap logging boot_logging() # Load configuration config = LoadConfig() # Run this only if enabled if not config.GatherImages: return # Get some information from configuration expression = config.requete storage_basedir = config.ResultBiblioPath storage_dirname = config.ndf output_path = config.ResultPathImages # Compute prefixes prefixes = [""] if config.GatherFamilly: prefixes.append("Families") # Build maps for all prefixes for prefix in prefixes: # Status message label = label_from_prefix(prefix) logger.info("Generating gallery of drawings for {}. ".format(label)) # Compute storage slot using prefix and DataDirectory # e.g. "Lentille" vs. "FamiliesLentille" storage_name = prefix + storage_dirname # Load bibliographic data biblio_file = LoadBiblioFile(storage_basedir, storage_name) # Generate thumbnails gallery = [] patents = biblio_file['brevets'] for patent in patents: patent_label = get_patent_label(patent) i = 1 logger.info('Processing patent {}'.format(patent_label)) path_img_base = '{}//{}-{}.tiff'.format(output_path, patent_label, '{}') path = path_img_base.format(i) while os.path.exists(path): thumb, orig, tiff = generate_thumbnails(path) gallery.append({ "_id": '{}-{}'.format(patent_label, i), 'thumb': thumb, 'orig': orig, 'label': patent['title'], 'ipcr7': patent['IPCR7'], 'code': patent_label, 'tiff': tiff, }) i += 1 path = path_img_base.format(i) # Render gallery RenderTemplate( 'ModeleImages.html', output_path + '/index' + prefix + '.html', request=expression.replace('"', ''), gallery=gallery, json=json.dumps(gallery), )
"utf8") as resFic: # encode and append line by line to avoid memory error try: resFic.write(contenu2) for contenu2 in json.JSONEncoder(indent=3).iterencode(LstExp2): #with codecs.open(ResultPathContent + '//' + ndf+'Pivot.json', 'a', "utf8") as resFic: resFic.write(contenu2) except: resFic.write(contenu2) contenu2 = json.dumps(LstExp2, indent=3, ensure_ascii=False, encoding='utf8') #, #with codecs.open(ResultPathContent + '//' + ndf+'Pivot.json', 'w', "utf8") as resFic: resFic.write(contenu2) FichierHtml = ndf + '.html' if ndf.startswith('Families'): ModelePivot = "ModeleFamillePivot.html" else: ModelePivot = "Pivot.html" RenderTemplate(ModelePivot, ResultPathContent + '//' + ndf + 'Pivot.html', fichier=ndf + 'Pivot.json', requete=DataBrevet['requete'].replace('"', ''), FichierHtml=FichierHtml, FichierHtmlFamille='Families' + FichierHtml) #os.system('start firefox -url '+ URLs.replace('//','/') )
for k in cptPay.keys(): tempo = dict() tempo["value"] = cptPay[k] tempo["name"] = k tempo["country"] = NomTopoJSON[k] if "data" in dico.keys(): dico["data"].append(tempo) else: dico["data"] = [tempo] nameFic = field.split('-')[0] with open( ResultPathContent + '//' + ndf + "Map" + nameFic + ".json", "w") as fic: json.dump(dico, fic) resJsonName = ndf + "Map" + nameFic + ".json" RenderTemplate("ModeleCartoDeposant.html", ResultPathContent + '//' + ndf + "Carto" + nameFic + ".html", field=nameFic, request=DataBrevet["requete"], jsonFile=resJsonName) #due to limit of D3, countries ressources are necessary placed # in same working directory... other solution is to start an http server # http://stackoverflow.com/questions/17077931/d3-samples-in-a-microsoft-stack #with open(ResultPathContent+'//'+"countries.json", "w") as fic: # with open('countries.json', 'r') as fic2: # tempo = fic2.read() # fic.write(tempo)