def process(self, bib_parse_dict): quirks = lookup_plugin_by_type(BIBTEX_QUIRKS_KEY) for quirk in quirks: q = quirk() for id in bib_parse_dict: if q.isApplicable(bib_parse_dict[id]): q.transform(bib_parse_dict[id])
def process(self, id, rdf_dict): try: if ns.bibo["doi"] in rdf_dict[id]: url = "http://dx.doi.org/%s" % rdf_dict[id][ns.bibo["doi"]][0] self.logger.info("Grabbing data from: %s" % url) graph = create_graph() graph.parse(url) self.logger.info("Retrieved data from: %s" % url, extra= { notify_user: True, }) plugins = lookup_plugin_by_type(GREF_DOI_RDF_PROCESSOR) for updater in plugins: try: updater().process(id, url, rdf_dict, graph) except Exception, e: self.logger.exception(e.message) except Exception, e: self.logger.exception(e.message)
def post_to_bib_conversion_processing(self, id, bib_dict, rdf_dict): if self.post_to_bib_conversion_processors is None: self.post_to_bib_conversion_processors = lookup_plugin_by_type(self.post_to_bib_conversion_plugin_key) for post_pro in self.post_to_bib_conversion_processors: post_pro(model=self.model).process(id, bib_dict, rdf_dict, self.bib_type)
def pre_process(self, bib_dict): pre_processors = lookup_plugin_by_type(self.pre_process_plugin_key) for pre in pre_processors: pre(model=self.model).process(bib_dict[0])
def process(self, id, bib_dict): processors = lookup_plugin_by_type(GREF_RDF_PROCESSOR) for processor in processors: processor(model=self.model).process(id, bib_dict)
def create_filer_map(model): #, filers=default_filers): toRet = {} def a(x): toRet[x.getExtension()] = x map(a, [ x(model) for x in lookup_plugin_by_type(BIB_FILER_KEY)]) return toRet