def submit_contribution(self, mpfile, contributor_email, project=None): """submit a single contribution to `mpcontribs.contributions` collection""" if len(mpfile.document) > 1: raise ValueError( 'submission only possible for single section MPFiles') mp_cat_id = mpfile.document.keys()[0] data = mpfile.document[mp_cat_id] update = ('cid' in data) # new vs update cid = bson.ObjectId(data['cid']) if update else bson.ObjectId() cid_short = get_short_object_id(cid) collaborators = [contributor_email] if update and self.db is not None: # check contributor permissions if update mode data.pop('cid') collaborators = self.contributions.find_one( {'_id': cid}, {'collaborators': 1})['collaborators'] if contributor_email not in collaborators: raise ValueError( "Submission stopped: update of contribution #{} not " "allowed due to insufficient permissions of {}! Ask " "someone of {} to make you a collaborator on #{}.".format( cid_short, contributor_email, collaborators, cid_short)) # prepare document doc = { 'collaborators': collaborators, 'mp_cat_id': mp_cat_id, 'content': data } if project is not None: doc['project'] = project if self.db is None: doc['_id'] = cid return doc self.contributions.find_and_modify({'_id': cid}, doc, upsert=True) return cid
def get_contributions(self, phase=None): data = [] phase_query_key = {'$exists': 1} if phase is None else phase columns = ['mp-id', 'contribution', 'formula'] if phase is None: columns.append('phase') columns += ['dH (formation)', 'dH (hydration)', 'GS?', 'CIF'] for doc in self.query_contributions(criteria={ 'project': { '$in': ['LBNL', 'MIT'] }, 'content.info.Phase': phase_query_key }, projection={ '_id': 1, 'mp_cat_id': 1, 'content': 1 }): mpfile = MPFile.from_contribution(doc) mp_id = mpfile.ids[0] info = mpfile.hdata[mp_id]['info'] row = [mp_id, get_short_object_id(doc['_id']), info['Formula']] if phase is None: row.append(info['Phase']) row += [info['dHf'], info['dHh'], info['GS'], 'TODO'] # TODO URLs for mp_id and cid data.append((mp_id, row)) return DataFrame.from_items(data, orient='index', columns=columns)
def get_uwsi2_contributions(self): data = [] for doc in self.query_contributions(criteria={ 'project': { '$in': ['LBNL', 'UW-Madison'] }, 'content.figshare_id': { '$exists': 1 } }, projection={ '_id': 1, 'mp_cat_id': 1, 'content': 1 }): mpfile = MPFile.from_contribution(doc) mp_id = mpfile.ids[0] table = mpfile.tdata[mp_id]['data_D0_Q'] anums = [self.z[el] for el in table['element']] table.insert(0, 'Z', Series(anums, index=table.index)) table.sort_values('Z', inplace=True) table.reset_index(drop=True, inplace=True) hdata = mpfile.hdata[mp_id] data.append({ 'mp_id': mp_id, 'cid': doc['_id'], 'short_cid': get_short_object_id(doc['_id']), 'formula': hdata['formula'], 'table': table }) return data
def get_uwsi2_contributions(self): """ - [<host(pretty-formula)>] <mp_cat_id-linked-to-materials-details-page> <cid-linked-to-contribution-details-page> |- <solute> <D0-value> <Q-value> <toggle-in-graph> |- ... - ... """ labels = ["El.", "Z", "D0", "Q"] contribs = list(self.query_contributions( criteria={'project': 'LBNL'}, projection={'mp_cat_id': 1} )) mp_ids, projection, data = set(), {}, [] for c in contribs: mp_ids.add(c['mp_cat_id']) projection['.'.join(['LBNL', c['_id'], 'tables'])] = 1 projection['.'.join(['LBNL', c['_id'], 'tree_data', 'formula'])] = 1 for doc in self.query_contributions( criteria={'_id': {'$in': mp_ids}}, projection=projection, collection='materials' ): for cid in doc['LBNL']: d = { 'mp_id': doc['_id'], 'cid': cid, 'short_cid': get_short_object_id(cid), 'formula': doc['LBNL'][cid]['tree_data']['formula'] } d['tables'] = doc['LBNL'][cid]['tables'] cols = d['tables']['data_supporting']['columns'] for idx, col in enumerate(cols): col['label'] = labels[idx] col['editable'] = 'false' data.append(d) return data
def submit_contribution(self, mpfile, contributor_email, project=None): """submit a single contribution to `mpcontribs.contributions` collection""" if len(mpfile.document) > 1: raise ValueError('submission only possible for single section MPFiles') mp_cat_id = mpfile.document.keys()[0] data = mpfile.document[mp_cat_id] update = ('cid' in data) # new vs update cid = bson.ObjectId(data['cid']) if update else bson.ObjectId() cid_short = get_short_object_id(cid) collaborators = [contributor_email] if update: # check contributor permissions if update mode data.pop('cid') collaborators = self.contributions.find_one( {'_id': cid}, {'collaborators': 1} )['collaborators'] if contributor_email not in collaborators: raise ValueError( "Submission stopped: update of contribution #{} not " "allowed due to insufficient permissions of {}! Ask " "someone of {} to make you a collaborator on #{}.".format( cid_short, contributor_email, collaborators, cid_short)) # prepare document doc = { 'collaborators': collaborators, 'mp_cat_id': mp_cat_id, 'content': data } if project is not None: doc['project'] = project if self.db is None: doc['_id'] = cid return doc self.contributions.find_and_modify({'_id': cid}, doc, upsert=True) return cid
def index(request): ctx = RequestContext(request) if request.user.is_authenticated(): from ..rest.rester import DiluteSoluteDiffusionRester with DiluteSoluteDiffusionRester( user.api_key, endpoint=get_endpoint(request)) as mpr: try: prov = mpr.get_provenance() ctx['title'] = prov.pop('title') ctx['provenance'] = render_dict(prov, webapp=True) ranges, contribs = {}, [] for host in mpr.get_hosts(): contrib = {} df = mpr.get_contributions(host) contrib['table'] = render_dataframe(df, webapp=True, paginate=False) contrib['formula'] = host contrib.update(mpr.get_table_info(host)) contrib['short_cid'] = get_short_object_id(contrib['cid']) contribs.append(contrib) for col in df.columns: if col == 'El.': continue low, upp = min(df[col]), max(df[col]) if col == 'Z': low -= 1 upp += 1 if col not in ranges: ranges[col] = [low, upp] else: if low < ranges[col][0]: ranges[col][0] = low if upp > ranges[col][1]: ranges[col][1] = upp ctx['ranges'] = dumps(ranges) ctx['contribs'] = contribs except Exception as ex: ctx['alert'] = str(ex) else: return redirect('{}?next={}'.format(reverse('cas_ng_login'), request.path)) return render_to_response("dilute_solute_diffusion_explorer_index.html", ctx)
def index(request): ctx = RequestContext(request) if request.user.is_authenticated(): API_KEY = request.user.api_key ENDPOINT = request.build_absolute_uri(get_endpoint()) from ..rest.rester import DiluteSoluteDiffusionRester with DiluteSoluteDiffusionRester(API_KEY, endpoint=ENDPOINT) as mpr: try: prov = mpr.get_provenance() ctx['title'] = prov.pop('title') ctx['provenance'] = render_dict(prov, webapp=True) ranges, contribs = {}, [] for host in mpr.get_hosts(): contrib = {} df = mpr.get_contributions(host) contrib['table'] = render_dataframe(df, webapp=True, paginate=False) contrib['formula'] = host contrib.update(mpr.get_table_info(host)) contrib['short_cid'] = get_short_object_id(contrib['cid']) contribs.append(contrib) for col in df.columns: if col == 'El.': continue low, upp = min(df[col]), max(df[col]) if col == 'Z': low -= 1 upp += 1 if col not in ranges: ranges[col] = [low, upp] else: if low < ranges[col][0]: ranges[col][0] = low if upp > ranges[col][1]: ranges[col][1] = upp ctx['ranges'] = dumps(ranges) ctx['contribs'] = contribs except Exception as ex: ctx['alert'] = str(ex) else: ctx['alert'] = 'Please log in!' return render_to_response("dilute_solute_diffusion_explorer_index.html", ctx)
def get_contributions(self): data = [] for doc in self.query_contributions( projection={'_id': 1, 'mp_cat_id': 1, 'content': 1} ): mpfile = MPFile.from_contribution(doc) mp_id = mpfile.ids[0] table = mpfile.tdata[mp_id][mp_level01_titles[1]+'_D0_Q'] anums = [self.z[el] for el in table['element']] table.insert(0, 'Z', Series(anums, index=table.index)) table.sort_values('Z', inplace=True) table.reset_index(drop=True, inplace=True) hdata = mpfile.hdata[mp_id] data.append({ 'mp_id': mp_id, 'cid': doc['_id'], 'short_cid': get_short_object_id(doc['_id']), 'formula': hdata['formula'], 'table': table }) return data
def process_mpfile(path_or_mpfile, target=None, fmt='archieml'): try: if isinstance(path_or_mpfile, six.string_types) and \ not os.path.isfile(path_or_mpfile): raise Exception('{} not found'.format(path_or_mpfile)) mod = import_module('mpcontribs.io.{}.mpfile'.format(fmt)) MPFile = getattr(mod, 'MPFile') full_name = pwd.getpwuid(os.getuid())[4] contributor = '{} <*****@*****.**>'.format(full_name) # fake cma = ContributionMongoAdapter() axes, ov_data = set(), dict() # split input MPFile into contributions: treat every mp_cat_id as separate DB insert mpfile, cid_shorts = MPFile.from_dict(), [] # output for idx, mpfile_single in enumerate( MPFile.from_file(path_or_mpfile).split()): mp_cat_id = mpfile_single.document.keys()[0] # TODO test update mode cid = mpfile_single.document[mp_cat_id].get('cid', None) update = bool(cid is not None) if update: cid_short = get_short_object_id(cid) yield 'use contribution #{} to update ID #{} ... '.format( idx, cid_short) # always run local "submission" to catch failure before interacting with DB yield 'locally process contribution #{} ... '.format(idx) doc = cma.submit_contribution( mpfile_single, contributor) # does not use get_string cid = doc['_id'] yield 'check consistency ... ' mpfile_single_cmp = MPFile.from_string(mpfile_single.get_string()) if mpfile_single.document != mpfile_single_cmp.document: json.encoder.FLOAT_REPR = lambda o: format(o, 'g') # compare json strings to find first inconsistency for a, b in zip( json.dumps(mpfile_single.document, indent=4).split('\n'), json.dumps(mpfile_single_cmp.document, indent=4).split('\n')): if a != b: raise Exception('{} <====> {}'.format( a.strip(), b.strip())) if target is not None: yield 'submit to MP ... ' cid = target.submit_contribution(mpfile_single, fmt) # uses get_string cid_short = get_short_object_id(cid) mpfile_single.insert_id(mp_cat_id, cid) cid_shorts.append(cid_short) yield 'build notebook ... ' if target is not None: url = target.build_contribution(cid) url = '/'.join( [target.preamble.rsplit('/', 1)[0], 'explorer', url]) yield ( "OK. <a href='{}' class='btn btn-default btn-xs' " + "role='button' target='_blank'>View</a></br>").format(url) else: mcb = MPContributionsBuilder(doc) build_doc = mcb.build(contributor, cid) yield build_doc yield 'determine overview axes ... ' scope, local_axes = [], set() mpfile_for_axes = MPFile.from_contribution(doc) for k, v in mpfile_for_axes.hdata[mp_cat_id].iterate(): if v is None: scope = scope[:k[0]] scope.append(k[1]) else: try: if k[0] == len(scope): scope.append(k[1]) else: scope[-1] = k[1] vf = float(v) # trigger exception scope_str = '.'.join(scope) if idx == 0: axes.add(scope_str) ov_data[scope_str] = { cid_short: (vf, mp_cat_id) } else: local_axes.add(scope_str) ov_data[scope_str][cid_short] = (vf, mp_cat_id) except: pass if idx > 0: axes.intersection_update(local_axes) yield 'OK.</br>'.format(idx, cid_short) mpfile.concat(mpfile_single) time.sleep(.01) ncontribs = len(cid_shorts) #if target is not None and \ # isinstance(path_or_mpfile, six.string_types) and \ # os.path.isfile(path_or_mpfile): # yield 'embed #{} in MPFile ...'.format('/'.join(cid_shorts)) # mpfile.write_file(path_or_mpfile, with_comments=True) if target is not None: yield '<strong>{} contributions successfully submitted.</strong>'.format( ncontribs) else: for k in ov_data: if k not in axes: ov_data.pop(k) yield ov_data yield '<strong>{} contributions successfully processed.</strong>'.format( ncontribs) except: ex = sys.exc_info()[1] yield 'FAILED.</br>' yield str(ex).replace('"', "'") return
def process_mpfile(path_or_mpfile, target=None, fmt='archieml', ids=None): try: if isinstance(path_or_mpfile, six.string_types) and \ not os.path.isfile(path_or_mpfile): raise Exception('{} not found'.format(path_or_mpfile)) if ids is not None and not isinstance(ids, list) and not len(ids) == 2: raise Exception('{} is not list of length 2!'.format(ids)) from pymatgen.analysis.structure_matcher import StructureMatcher mod = import_module('mpcontribs.io.{}.mpfile'.format(fmt)) MPFile = getattr(mod, 'MPFile') full_name = pwd.getpwuid(os.getuid())[4] contributor = '{} <*****@*****.**>'.format(full_name) # fake cma = ContributionMongoAdapter() axes, ov_data = set(), dict() mpfile_out, cid_shorts = MPFile(), [] # output sm = StructureMatcher(primitive_cell=False, scale=False) # split input MPFile into contributions: treat every mp_cat_id as separate DB insert mpfile_in = path_or_mpfile if isinstance(path_or_mpfile, six.string_types) or isinstance( path_or_mpfile, StringIO): mpfile_in = MPFile.from_file(path_or_mpfile) for idx, mpfile_single in enumerate(mpfile_in.split()): mp_cat_id = mpfile_single.document.keys()[0] if ids is None or mp_cat_id == ids[0]: cid = mpfile_single.document[mp_cat_id].get('cid', None) update = bool(cid is not None) if update: cid_short = get_short_object_id(cid) yield 'use #{} to update #{} ... '.format(idx, cid_short) # always run local "submission" to catch failure before interacting with DB yield 'process #{} ({}) ... '.format(idx, mp_cat_id) doc = cma.submit_contribution( mpfile_single, contributor) # does not use get_string cid = doc['_id'] cid_short = get_short_object_id(cid) if ids is None or cid_short == ids[1]: yield 'check ... ' obj_size = asizeof.asizeof(mpfile_single) / 1024. / 1024. if obj_size > 0.5: yield 'skip ({:.3f}MB) ... '.format(obj_size) else: try: mpfile_single_cmp_str = mpfile_single.get_string() except Exception as ex: yield 'get_string() FAILED!<br>' continue try: mpfile_single_cmp = MPFile.from_string( mpfile_single_cmp_str) except Exception as ex: yield 'from_string() FAILED!<br>' continue if mpfile_single.document != mpfile_single_cmp.document: yield 'check again ... ' found_inconsistency = False # check structural data structures_ok = True for name, s1 in mpfile_single.sdata[ mp_cat_id].iteritems(): s2 = mpfile_single_cmp.sdata[mp_cat_id][name] if s1 != s2: if len(s1) != len(s2): yield 'different number of sites: {} -> {}!<br>'.format( len(s1), len(s2)) structures_ok = False break if s1.lattice != s2.lattice: yield 'lattices different!<br>' structures_ok = False break for site in s1: if site not in s2: found_inconsistency = True if not sm.fit(s1, s2): yield 'structures do not match!<br>' structures_ok = False break if not structures_ok: break if not structures_ok: continue # check hierarchical and tabular data # compare json strings to find first inconsistency json_compare(mpfile_single.hdata, mpfile_single_cmp.hdata) json_compare(mpfile_single.tdata, mpfile_single_cmp.tdata) if not found_inconsistency: # documents are not equal, but all components checked, skip contribution # should not happen yield 'inconsistency found but not identified!<br>' continue if target is not None: yield 'submit ... ' cid = target.submit_contribution( mpfile_single, fmt) # uses get_string mpfile_single.insert_id(mp_cat_id, cid) cid_shorts.append(cid_short) if target is not None: if idx < 5: yield 'build ... ' url = target.build_contribution(cid) url = '/'.join([ target.preamble.rsplit('/', 1)[0], 'explorer', url ]) yield ( "OK. <a href='{}' class='btn btn-default btn-xs' " + "role='button' target='_blank'>View</a></br>" ).format(url) else: target.set_build_flag(cid, True) yield 'OK (queued).</br>' else: if (ids is None and idx < 5) or ids is not None: yield 'build ... ' mcb = MPContributionsBuilder(doc) build_doc = mcb.build(contributor, cid) else: yield 'skip ... ' from pymatgen.util.provenance import Author author = Author.parse_author(contributor) build_doc = [mp_cat_id, author.name, cid_short, ''] yield build_doc yield 'overview axes ... ' scope, local_axes = [], set() mpfile_for_axes = MPFile.from_contribution(doc) for k, v in mpfile_for_axes.hdata[mp_cat_id].iterate(): if v is None: scope = scope[:k[0]] scope.append(k[1]) else: try: if k[0] == len(scope): scope.append(k[1]) else: scope[-1] = k[1] vf = float(v) # trigger exception scope_str = '.'.join(scope) if idx == 0: axes.add(scope_str) ov_data[scope_str] = { cid_short: (vf, mp_cat_id) } else: local_axes.add(scope_str) ov_data[scope_str][cid_short] = ( vf, mp_cat_id) except: pass if idx > 0: axes.intersection_update(local_axes) yield 'OK.</br>'.format(idx, cid_short) else: yield 'wrong CID.</br>' mpfile_out.concat(mpfile_single) time.sleep(.01) ncontribs = len(cid_shorts) if target is not None: yield '<strong>{} contributions successfully submitted.</strong>'.format( ncontribs) else: for k in ov_data.keys(): if k not in axes: ov_data.pop(k) yield ov_data yield '<strong>{} contributions successfully processed.</strong>'.format( ncontribs) except: ex = sys.exc_info()[1] yield 'FAILED.</br>' yield str(ex).replace('"', "'") return
def build(self, contributor_email, cid, api_key=None, endpoint=None): """update materials/compositions collections with contributed data""" cid_short, cid_str = get_short_object_id(cid), str(cid) contrib = self.find_contribution(cid) if not contrib: raise Exception('Contribution {} not found!'.format(cid)) if contributor_email not in contrib['collaborators']: raise ValueError( "Build stopped: building contribution {} not " "allowed due to insufficient permissions of {}! Ask " "someone of {} to make you a collaborator on {}.".format( cid_short, contributor_email, contrib['collaborators'], cid_short)) from pymatgen.util.provenance import Author mpfile = MPFileCore.from_contribution(contrib) mp_cat_id = mpfile.ids[0] is_mp_id = mp_id_pattern.match(mp_cat_id) self.curr_coll = self.materials if is_mp_id else self.compositions author = Author.parse_author(contributor_email) project = str(author.name).translate(None, '.') \ if 'project' not in contrib else contrib['project'] nb = nbf.new_notebook() if isinstance(self.db, dict): contrib.pop('_id') if 'cid' in contrib['content']: contrib['content'].pop('cid') nb['cells'].append( nbf.new_code_cell( "from mpcontribs.io.core.mpfile import MPFileCore\n" "from mpcontribs.io.core.recdict import RecursiveDict\n" "mpfile = MPFileCore.from_contribution({})\n" "identifier = '{}'".format(contrib, mp_cat_id))) else: nb['cells'].append( nbf.new_code_cell( "from mpcontribs.rest.rester import MPContribsRester")) os.environ['PMG_MAPI_KEY'] = api_key os.environ['PMG_MAPI_ENDPOINT'] = endpoint nb['cells'].append( nbf.new_code_cell( "with MPContribsRester() as mpr:\n" " mpfile = mpr.find_contribution('{}')\n" " identifier = mpfile.ids[0]".format(cid))) nb['cells'].append( nbf.new_markdown_cell("## Contribution #{} for {}".format( cid_short, mp_cat_id))) nb['cells'].append(nbf.new_markdown_cell("### Hierarchical Data")) nb['cells'].append(nbf.new_code_cell("mpfile.hdata[identifier]")) if mpfile.tdata[mp_cat_id]: nb['cells'].append(nbf.new_markdown_cell("### Tabular Data")) for table_name, table in mpfile.tdata[mp_cat_id].iteritems(): nb['cells'].append( nbf.new_markdown_cell("#### {}".format(table_name))) nb['cells'].append( nbf.new_code_cell( "mpfile.tdata[identifier]['{}']".format(table_name))) if mpfile.gdata[mp_cat_id]: nb['cells'].append(nbf.new_markdown_cell("### Graphical Data")) for plot_name, plot in mpfile.gdata[mp_cat_id].iteritems(): nb['cells'].append( nbf.new_markdown_cell("#### {}".format(plot_name))) nb['cells'].append( nbf.new_code_cell( "mpfile.gdata[identifier]['{}']".format(plot_name))) if mpfile.sdata[mp_cat_id]: nb['cells'].append(nbf.new_markdown_cell("### Structural Data")) for structure_name, structure in mpfile.sdata[mp_cat_id].iteritems(): nb['cells'].append( nbf.new_markdown_cell("#### {}".format(structure_name))) nb['cells'].append( nbf.new_code_cell( "mpfile.sdata[identifier]['{}']".format(structure_name))) self.ep.preprocess(nb, {'metadata': {'path': self.nbdir}}) if isinstance(self.db, dict): return [mp_cat_id, project, cid_short, export_notebook(nb, cid)] else: build_doc = RecursiveDict() build_doc['mp_cat_id'] = mp_cat_id build_doc['project'] = project build_doc['nb'] = nb self.curr_coll.update({'_id': cid}, {'$set': build_doc}, upsert=True) return '{}/{}'.format( # return URL for contribution page ('materials' if is_mp_id else 'compositions'), cid_str)
def process_mpfile(path_or_mpfile, target=None, fmt='archieml'): try: if isinstance(path_or_mpfile, six.string_types) and \ not os.path.isfile(path_or_mpfile): raise Exception('{} not found'.format(path_or_mpfile)) mod = import_module('mpcontribs.io.{}.mpfile'.format(fmt)) MPFile = getattr(mod, 'MPFile') full_name = pwd.getpwuid(os.getuid())[4] contributor = '{} <*****@*****.**>'.format(full_name) # fake cma = ContributionMongoAdapter() axes, ov_data = set(), dict() # split input MPFile into contributions: treat every mp_cat_id as separate DB insert mpfile, cid_shorts = MPFile.from_dict(), [] # output for idx, mpfile_single in enumerate(MPFile.from_file(path_or_mpfile).split()): mp_cat_id = mpfile_single.document.keys()[0] # TODO test update mode cid = mpfile_single.document[mp_cat_id].get('cid', None) update = bool(cid is not None) if update: cid_short = get_short_object_id(cid) yield 'use contribution #{} to update ID #{} ... '.format(idx, cid_short) # always run local "submission" to catch failure before interacting with DB yield 'locally process contribution #{} ... '.format(idx) doc = cma.submit_contribution(mpfile_single, contributor) # does not use get_string cid = doc['_id'] yield 'check consistency ... ' mpfile_single_cmp = MPFile.from_string(mpfile_single.get_string()) if mpfile_single.document != mpfile_single_cmp.document: # compare json strings to find first inconsistency for a, b in zip( json.dumps(mpfile_single.document, indent=4).split('\n'), json.dumps(mpfile_single_cmp.document, indent=4).split('\n') ): if a != b: raise Exception('{} <====> {}'.format(a.strip(), b.strip())) if target is not None: yield 'submit to MP ... ' cid = target.submit_contribution(mpfile_single, fmt) # uses get_string cid_short = get_short_object_id(cid) mpfile_single.insert_id(mp_cat_id, cid) cid_shorts.append(cid_short) yield 'build into {} ... '.format(mp_cat_id) if target is not None: url = target.build_contribution(cid) url = '/'.join([target.preamble.rsplit('/', 1)[0], 'explorer', url]) yield ("OK. <a href='{}' class='btn btn-default btn-xs' " + "role='button' target='_blank'>View</a></br>").format(url) else: mcb = MPContributionsBuilder(doc) build_doc = mcb.build(contributor, cid) yield build_doc yield 'determine overview axes ... ' scope, local_axes = [], set() for k,v in build_doc[3]['tree_data'].iterate(): if v is None: scope = scope[:k[0]] scope.append(k[1]) else: try: if k[0] == len(scope): scope.append(k[1]) else: scope[-1] = k[1] vf = float(v) # trigger exception scope_str = '.'.join(scope) if idx == 0: axes.add(scope_str) ov_data[scope_str] = {cid_short: (vf, mp_cat_id)} else: local_axes.add(scope_str) ov_data[scope_str][cid_short] = (vf, mp_cat_id) except: pass if idx > 0: axes.intersection_update(local_axes) yield 'OK.</br>'.format(idx, cid_short) mpfile.concat(mpfile_single) time.sleep(.01) ncontribs = len(cid_shorts) #if target is not None and \ # isinstance(path_or_mpfile, six.string_types) and \ # os.path.isfile(path_or_mpfile): # yield 'embed #{} in MPFile ...'.format('/'.join(cid_shorts)) # mpfile.write_file(path_or_mpfile, with_comments=True) if target is not None: yield '<strong>{} contributions successfully submitted.</strong>'.format(ncontribs) else: for k in ov_data: if k not in axes: ov_data.pop(k) yield ov_data yield '<strong>{} contributions successfully processed.</strong>'.format(ncontribs) except: ex = sys.exc_info()[1] yield 'FAILED.</br>' yield str(ex).replace('"',"'") return