def add_snl(mat, new_style_mat): snl = new_style_mat.get("snl", None) mat["snl"] = copy.deepcopy(mat["structure"]) if snl: mat["snl"].update(snl) else: mat["snl"] = StructureNL(Structure.from_dict(mat["structure"]), []).as_dict() mat["snl"]["about"].update(mp_default_snl_fields) mat["snl_final"] = mat["snl"] mat["icsd_ids"] = [ int(i) for i in get(mat["snl"], "about._db_ids.icsd_ids", []) ] mat["pf_ids"] = get(mat["snl"], "about._db_ids.pf_ids", []) # Extract tags from remarks by looking for just nounds and adjectives mat["exp"] = {"tags": []} for remark in mat["snl"]["about"].get("_tags", []): tokens = set(tok[1] for tok in nltk.pos_tag(nltk.word_tokenize(remark), tagset="universal")) if len(tokens.intersection({"ADV", "ADP", "VERB"})) == 0: mat["exp"]["tags"].append(remark)
def update_current_schedule(): schedule_data = fetch_current_schedule() if not schedule_data: return game_dates = get(schedule_data, "leagueSchedule.gameDates") games = flat_map(game_dates, lambda game_date: get(game_date, "games")) reg_season_games = filter(lambda game: get(game, "weekNumber") > 0, games) game_data = list( map( lambda game: { **omit(game, [ "broadcasters", "homeTeam", "awayTeam", "pointsLeaders" ]), "homeTeamId": get(game, "homeTeam.teamId"), "awayTeamId": get(game, "awayTeam.teamId"), }, reg_season_games, )) primary_keys = ["gameId"] update_data_table_from_dicts("game_schedule", game_data, primary_keys)
def getValidMethod(obj): ''' 获取请求方法的有效方法类型 :param obj: 请求对象 :return: 返回有效的请求方法类型名称 或者 None ''' methods = ['get', 'post', 'put', 'delete', 'patch'] valids = [] for method in methods: if objects.get(obj, method) != None: valids.append(method) return valids
def create_confirmation_email_dict(response, confirmationEmailInfo): # Creates dict with confirmation email info. if not confirmationEmailInfo: return templateText = get( confirmationEmailInfo, "template.html", "Confirmation Email Sample Text" ) msgBody = fill_string_from_template(response, templateText) addCSS = False toField = confirmationEmailInfo["toField"] if type(toField) is not list: toField = [toField] # pre-process attachment templates attachments = [ { "fileName": item.get("fileName", "attachment.pdf"), "contents": convert_html_to_pdf( fill_string_from_template(response, item["template"]["html"]) ), } for item in confirmationEmailInfo.get("attachments", []) ] emailOptions = dict( toEmail=[get(response.value, i) for i in toField], fromEmail=confirmationEmailInfo.get("from", "*****@*****.**"), fromName=confirmationEmailInfo.get("fromName", "Webmaster"), subject=confirmationEmailInfo.get("subject", "Confirmation Email"), bccEmail=confirmationEmailInfo.get("bcc", ""), ccEmail=confirmationEmailInfo.get("cc", ""), replyToEmail=confirmationEmailInfo.get("replyTo", ""), msgBody=msgBody, attachments=attachments, ) return emailOptions
def filter_and_group_tasks(self, tasks): """ Groups tasks by structure matching """ filtered_tasks = [ t for t in tasks if task_type(t["orig_inputs"]) in self.allowed_tasks ] structures = [] for idx, t in enumerate(filtered_tasks): s = Structure.from_dict(t["output"]["structure"]) s.index = idx total_mag = get(t, "calcs_reversed.0.output.outcar.total_magnetization", 0) s.total_magnetization = total_mag if total_mag else 0 # a fix for very old tasks that did not report site-projected magnetic moments # so that we can group them appropriately if ( ("magmom" not in s.site_properties) and (get(t, "input.parameters.ISPIN", 1) == 2) and has(t, "input.parameters.MAGMOM") ): # TODO: map input structure sites to output structure sites s.add_site_property("magmom", t["input"]["parameters"]["MAGMOM"]) structures.append(s) grouped_structures = group_structures( structures, ltol=self.ltol, stol=self.stol, angle_tol=self.angle_tol, separate_mag_orderings=self.separate_mag_orderings, ) for group in grouped_structures: yield [filtered_tasks[struc.index] for struc in group]
def fill_paymentMethods_with_data(paymentMethods, form_data): new_paymentMethods = {} for paymentMethodName, paymentMethod in paymentMethods.items(): new_paymentMethods[paymentMethodName] = {} for key, value in paymentMethod.items(): if type(value) is str: value = [value] if type(value) is list: value = [ (get(form_data, x[1:], ".") if x.startswith("$") else x) for x in value ] value = "".join(value) new_paymentMethods[paymentMethodName][key] = value return new_paymentMethods
def old_style_mat(new_mat): """ Creates the base document for the old MP mapidoc style from the new document structure """ mat = {} for mp, new_key in mp_conversion_dict.items(): if has(new_mat, new_key): set_(mat, mp, get(new_mat, new_key)) mat["is_orderd"] = True mat["is_compatible"] = True struc = Structure.from_dict(mat["structure"]) mat["oxide_type"] = oxide_type(struc) mat["reduced_cell_formula"] = struc.composition.as_dict() mat["full_formula"] = "".join(struc.formula.split()) vals = sorted(mat["reduced_cell_formula"].values()) mat["anonymous_formula"] = { string.ascii_uppercase[i]: float(vals[i]) for i in range(len(vals)) } set_(mat, "pseudo_potential.functional", "PBE") set_(mat, "pseudo_potential.labels", [ p["titel"].split()[1] for p in get(new_mat, "calc_settings.potcar_spec") ]) mat["ntask_ids"] = len(get(new_mat, "task_ids")) set_(mat, "pseudo_potential.pot_type", "paw") add_bv_structure(mat) add_blessed_tasks(mat, new_mat) add_cifs(mat) return mat
def inner_hand(data, index): key = key_split[index] value = objects.get(data, key) if index == (len(key_split) - 1): if value is not None: result.append(str(value)) return if isinstance(value, list): k_index = index + 1 for item in value: inner_hand(item, k_index) else: inner_hand(value, index + 1)
def _create_one_to_one_record(table_name, message) -> dict: """ Build a dictionary with a list for the field names (columns) and the associated values :param table_name: :param message: :return: """ table = {'columns': [], 'values': [], 'table': table_name['name']} for field in table_name['fields']: value = objects.get(message, field['json_name']) if value: table['columns'].append(field['destination_name']) table['values'].append(value) return table
def get_implemented_filter_function(function_name: str, debug: bool) -> Callable: def param_resolver(f): def wrap(*args, **kwargs): calculated_parameters = [arg(kwargs) for arg in args] result = f(*calculated_parameters, **kwargs) if debug: logger.debug( f"{function_name}({', '.join(str(x) for x in calculated_parameters)}) -> {result}" ) return result return wrap implemented_filter_functions = { "eq": param_resolver(lambda a, b, **kwargs: a == b), "ne": param_resolver(lambda a, b, **kwargs: a != b), "lt": param_resolver(lambda a, b, **kwargs: a < b), "gt": param_resolver(lambda a, b, **kwargs: a > b), "le": param_resolver(lambda a, b, **kwargs: a <= b), "ge": param_resolver(lambda a, b, **kwargs: a >= b), "not": param_resolver(lambda a, **kwargs: not a), "or": lambda *args, **kwargs: any(arg(kwargs) for arg in args), "and": lambda *args, **kwargs: all(arg(kwargs) for arg in args), "in": param_resolver(lambda a, b, **kwargs: a in b), "regex": param_resolver(lambda *args, **kwargs: bool(re.match(*args))), "regex:ignorecase": param_resolver( lambda *args, **kwargs: bool(re.match(*args, re.IGNORECASE))), "exists": param_resolver(lambda a, **kwargs: a is not None), "empty": param_resolver(lambda *args, **kwargs: len(args) == 0), "ref": param_resolver(lambda param_name, **kwargs: get(kwargs, param_name)), } return implemented_filter_functions[function_name]
def search_bulk(self, keywords=None, author=None): """Build a list of all books for given author/keyword combination""" rv = list() if not keywords and not author: raise KeyError("No value for search!") while self.current_result <= self.max_results: r = self.search_books(keywords=keywords, author=author, index=self.current_result) if get(r, 'items'): for book in r['items']: rv.append(book) self.current_result += 1 else: return rv # as there are no more results, we exit the while loop return rv
def update_response_path(response, path, value, response_base_path): from ..main import app existing_value = get(getattr(response, response_base_path), path, value) if type(value) is str: if type(existing_value) is bool and value.lower() in ("true", "false"): value = value.lower() == "true" elif type(existing_value) is float and value.isdigit(): value = float(value) elif type(existing_value) is int and value.isdigit(): value = int(value) set_(getattr(response, response_base_path), path, value) response.update_trail.append( UpdateTrailItem(path=path, old_value=existing_value, new_value=value, date=datetime.datetime.now(), user=app.get_current_user_id(), response_base_path=response_base_path))
def buildTop(data): ''' 构建swagger顶层目录结构 :param data: swagger原始返回的信息 :return:返回顶层元组信息 ''' try: dataObj = json.loads(data) tags = objects.get(dataObj, 'tags', []) tops = arrays.compact(tags) tops = arrays.mapcat( tops, lambda item: objects.assign(objects.clone(item), {'child': []})) return tops except: sysLog.warn('JSON处理失败,请确认地址是否正确') return []
def convert_mpworks_to_atomate(mpworks_doc, update_mpworks=True): """ Function to convert an mpworks document into an atomate document, uses schema above and a few custom cases Args: mpworks_doc (dict): mpworks task document update_mpworks (bool): flag to indicate that mpworks schema should be updated to final MPWorks version """ if update_mpworks: update_mpworks_schema(mpworks_doc) atomate_doc = {} for key_mpworks, key_atomate in settings['task_conversion_keys'].items(): val = get(mpworks_doc, key_mpworks) set_(atomate_doc, key_atomate, val) # Task type atomate_doc["task_label"] = settings['task_label_conversions'].get( mpworks_doc["task_type"]) # calculations atomate_doc["calcs_reversed"] = mpworks_doc["calculations"][::-1] # anonymous formula comp = Composition(atomate_doc['composition_reduced']) atomate_doc["formula_anonymous"] = comp.anonymized_formula # deformation matrix and original_task_id if "deformation_matrix" in mpworks_doc: # Transpose this b/c of old bug, should verify in doc processing defo = mpworks_doc["deformation_matrix"] if isinstance(defo, str): defo = convert_string_deformation_to_list(defo) defo = np.transpose(defo).tolist() set_(atomate_doc, "transmuter.transformations", ["DeformStructureTransformation"]) set_(atomate_doc, "transmuter.transformation_params", [{ "deformation": defo }]) return atomate_doc
def _get_insert_sql(self, table, entity, fields, from_id): if from_id: entity['from_id'] = from_id entity['branch'] = objects.get(entity, '_metadata.branch') columns = [ field['column'] for field in fields if field['name'] in entity ] values = ['%s' for field in fields if field['name'] in entity] params = tuple(entity[field['name']] for field in fields if field['name'] in entity) return { 'query': self.QUERIES['insert'].format(table=table, values=str.join(',', values), columns=str.join(',', columns)), 'params': params }
def old_style_mat(new_mat): mat = {} for mp, new_key in mp_conversion_dict.items(): if has(new_mat, new_key): set_(mat, mp, get(new_mat, new_key)) mat["is_orderd"] = True mat["is_compatible"] = True struc = Structure.from_dict(mat["structure"]) mat["oxide_type"] = oxide_type(struc) mat["reduced_cell_formula"] = struc.composition.as_dict() mat["full_formula"] = "".join(struc.formula.split()) vals = sorted(mat["reduced_cell_formula"].values()) mat["anonymous_formula"] = { string.ascii_uppercase[i]: float(vals[i]) for i in range(len(vals)) } set_(mat, "original_task_id", get(new_mat, "material_id")) set_(mat, "ntask_ids", len(get(new_mat, "task_ids"))) set_(mat, "input.incar", get(new_mat, "inputs.structure_optimization.incar")) set_(mat, "input.kpoints", get(new_mat, "inputs.structure_optimization.kpoints")) set_(mat, "encut", get(new_mat, "inputs.structure_optimization.incar.ENCUT")) mat["pseudo_potential"] = { "pot_type": "paw", "labels": get(new_mat, "input.structure_optimization.potcar.symbols"), "functional": get(new_mat, "input.structure_optimization.potcar.functional") } return mat
def update_targets(self, items): """ Inserts the new task_types into the task_types collection """ snls = [] for snl_dict in filter(None, items): for mat_id, snl_list in snl_dict.items(): snl = sorted( snl_list, key=lambda x: StructureNL.from_dict(x).created_at)[0] icsd_ids = [get(snl, "about._icsd.icsd_id") for snl in snl_list if has(snl, "about._icsd")] snls.append( {self.snls.key: mat_id, "snl": snl, "icsd_ids": icsd_ids}) if len(snls) > 0: self.snls.update(snls) else: self.logger.info("No items to update")
def add_thermo(mat, new_style_mat): """ Add's the thermo values in with sandboxing """ if "thermo_docs" not in new_style_mat: mat["deprecated"] = True if not mat["deprecated"]: thermo = new_style_mat["thermo_docs"] if "core" in mat["sbxn"]: main_sbx = "core" else: main_sbx = mat["sbxn"][0] # Get the primary document and set in mat document core_thermo = next(d for d in thermo if main_sbx in d["_sbxn"]) mat["e_above_hull"] = core_thermo["thermo"]["e_above_hull"] mat["formation_energy_per_atom"] = core_thermo["thermo"][ "formation_energy_per_atom" ] if "decomposes_to" in core_thermo["thermo"]: mat["decomposes_to"] = core_thermo["thermo"]["decomposes_to"] sbxd = {} sandbox_props = { "e_above_hull": "thermo.e_above_hull", "decomposes_to": "thermo.decomposes_to", } for doc in thermo: for sbx in doc["_sbxn"]: sbx_d = { k: get(doc, v) for k, v in sandbox_props.items() if has(doc, v) } sbx_d["id"] = sbx sbxd[sbx] = sbx_d mat["sbxd"] = list(sbxd.values())
def _get_update_sql(self, entity_id, table, entity, fields, change_track, from_id=None): if from_id: entity['from_id'] = from_id entity['deleted'] = change_track == 'destroy' entity['branch'] = objects.get(entity, '_metadata.branch') values = [f"{field['column']}=%s" for field in fields] params = tuple( entity[field['name']] if field['name'] in entity else None for field in fields) params += (entity_id, ) # entity id parameter return { 'query': self.QUERIES['update'].format(table=table, values=str.join(',', values)), 'params': params }
def get_elastic_analysis(opt_task, defo_tasks): """ Performs the analysis of opt_tasks and defo_tasks necessary for an elastic analysis Args: opt_task: task doc corresponding to optimization defo_tasks: task_doc corresponding to deformations Returns: elastic document with fitted elastic tensor and analysis """ elastic_doc = {"warnings": []} opt_struct = Structure.from_dict(opt_task['output']['structure']) input_struct = Structure.from_dict(opt_task['input']['structure']) # For now, discern order (i.e. TOEC) using parameters from optimization # TODO: figure this out more intelligently diff = get(opt_task, "input.incar.EDIFFG", 0) order = 3 if np.isclose(diff, -0.001) else 2 explicit, derived = process_elastic_calcs(opt_task, defo_tasks) all_calcs = explicit + derived stresses = [c.get("cauchy_stress") for c in all_calcs] pk_stresses = [c.get("pk_stress") for c in all_calcs] strains = [c.get("strain") for c in all_calcs] elastic_doc['calculations'] = all_calcs vstrains = [s.zeroed(0.002).voigt for s in strains] if np.linalg.matrix_rank(vstrains) == 6: if order == 2: et_fit = legacy_fit(strains, stresses) elif order == 3: # Test for TOEC if len(strains) < 70: logger.info("insufficient valid strains for {} TOEC".format( opt_task['formula_pretty'])) return None eq_stress = -0.1 * Stress(opt_task['output']['stress']) # strains = [s.zeroed(0.0001) for s in strains] # et_expansion = pdb_function(ElasticTensorExpansion.from_diff_fit, # strains, pk_stresses, eq_stress=eq_stress, tol=1e-5) et_exp_raw = ElasticTensorExpansion.from_diff_fit( strains, pk_stresses, eq_stress=eq_stress, tol=1e-6) et_exp = et_exp_raw.voigt_symmetrized.convert_to_ieee(opt_struct) et_exp = et_exp.round(1) et_fit = ElasticTensor(et_exp[0]) # Update elastic doc with TOEC stuff tec = et_exp.thermal_expansion_coeff(opt_struct, 300) elastic_doc.update({ "elastic_tensor_expansion": elastic_sanitize(et_exp), "elastic_tensor_expansion_original": elastic_sanitize(et_exp_raw), "thermal_expansion_tensor": tec, "average_linear_thermal_expansion": np.trace(tec) / 3 }) et = et_fit.voigt_symmetrized.convert_to_ieee(opt_struct) vasp_input = opt_task['input'] if 'structure' in vasp_input: vasp_input.pop('structure') completed_at = max([d['completed_at'] for d in defo_tasks]) elastic_doc.update({ "optimization_task_id": opt_task['task_id'], "optimization_dir_name": opt_task['dir_name'], "cauchy_stresses": stresses, "strains": strains, "elastic_tensor": elastic_sanitize(et.zeroed(0.01).round(0)), # Convert compliance to 10^-12 Pa "compliance_tensor": elastic_sanitize(et.compliance_tensor * 1000), "elastic_tensor_original": elastic_sanitize(et_fit), "optimized_structure": opt_struct, "spacegroup": input_struct.get_space_group_info()[0], "input_structure": input_struct, "completed_at": completed_at, "optimization_input": vasp_input, "order": order, "pretty_formula": opt_struct.composition.reduced_formula }) # Add magnetic type mag = CollinearMagneticStructureAnalyzer(opt_struct).ordering.value elastic_doc['magnetic_type'] = mag_types[mag] try: prop_dict = et.get_structure_property_dict(opt_struct) prop_dict.pop('structure') except ValueError: logger.debug("Negative K or G found, structure property " "dict not computed") prop_dict = et.property_dict for k, v in prop_dict.items(): if k in ['homogeneous_poisson', 'universal_anisotropy']: prop_dict[k] = np.round(v, 2) else: prop_dict[k] = np.round(v, 0) elastic_doc.update(prop_dict) # Update with state and warnings state, warnings = get_state_and_warnings(elastic_doc) elastic_doc.update({"state": state, "warnings": warnings}) # TODO: add kpoints params? return elastic_doc else: logger.info("insufficient valid strains for {}".format( opt_task['formula_pretty'])) return None
def querySelector(tree, css): doms = querySelectorAll(tree, css) return get(doms, 0)
def add_bonds(mat, new_style_mat): if get("bonds.successful", new_style_mat, False): mat["bonds"] = get("bonds.summary", new_style_mat)
os.environ["TABLE_PREFIX"] = "cff_prod" from chalicelib.main import TABLES from chalicelib.util import get_all_responses from boto3.dynamodb.conditions import Key import boto3 import json from pynliner import Pynliner from pydash.objects import get import datetime formId = "31571110-483c-4b72-b4b8-5b1ce0b9348b" client = boto3.client("ses") responses = boto3.resource('dynamodb').Table("cff_prod.responses") print("Querying all responses...") responses = get_all_responses(KeyConditionExpression=Key('formId').eq(formId), FilterExpression=Key('PAID').eq(True)) print("Got responses.") used_bibs = [] duplicate_bibs = [] for rNum, response in enumerate(responses): print(f"=== Starting response number {rNum} ===") for i, participant in enumerate(get(response, "value.participants", [])): oldbib = str(get(participant, "bib_number")) if oldbib in used_bibs and oldbib != 'None': duplicate_bibs.append(oldbib) used_bibs.append(oldbib) print("Done! Duplicates:") print(duplicate_bibs)
def run_task(self, fw_spec): # get the directory that contains the VASP dir to parse calc_dir = os.getcwd() if "calc_dir" in self: calc_dir = self["calc_dir"] elif self.get("calc_loc"): calc_dir = get_calc_loc(self["calc_loc"], fw_spec["calc_locs"])["path"] # parse the VASP directory logger.info("PARSING DIRECTORY: {}".format(calc_dir)) drone = VaspDrone(additional_fields=self.get("additional_fields"), parse_dos=self.get("parse_dos", False), bandstructure_mode=self.get("bandstructure_mode", False), parse_chgcar=self.get("parse_chgcar", False), parse_aeccar=self.get("parse_aeccar", False)) # assimilate (i.e., parse) task_doc = drone.assimilate(calc_dir) # Check for additional keys to set based on the fw_spec if self.get("fw_spec_field"): task_doc.update(fw_spec[self.get("fw_spec_field")]) # get the database connection db_file = env_chk(self.get('db_file'), fw_spec) # db insertion or taskdoc dump if not db_file: with open("task.json", "w") as f: f.write(json.dumps(task_doc, default=DATETIME_HANDLER)) else: mmdb = VaspCalcDb.from_db_file(db_file, admin=True) t_id = mmdb.insert_task( task_doc, use_gridfs=self.get("parse_dos", False) or bool(self.get("bandstructure_mode", False)) or self.get("parse_chgcar", False) or self.get("parse_aeccar", False)) logger.info("Finished parsing with task_id: {}".format(t_id)) defuse_children = False if task_doc["state"] != "successful": defuse_unsuccessful = self.get("defuse_unsuccessful", DEFUSE_UNSUCCESSFUL) if defuse_unsuccessful is True: defuse_children = True elif defuse_unsuccessful is False: pass elif defuse_unsuccessful == "fizzle": raise RuntimeError( "VaspToDb indicates that job is not successful " "(perhaps your job did not converge within the " "limit of electronic/ionic iterations)!") else: raise RuntimeError("Unknown option for defuse_unsuccessful: " "{}".format(defuse_unsuccessful)) task_fields_to_push = self.get("task_fields_to_push", None) update_spec = {} if task_fields_to_push: if isinstance(task_fields_to_push, dict): for key, path_in_task_doc in task_fields_to_push.items(): if has(task_doc, path_in_task_doc): update_spec[key] = get(task_doc, path_in_task_doc) else: logger.warn("Could not find {} in task document. Unable to push to next firetask/firework".format(path_in_task_doc)) else: raise RuntimeError("Inappropriate type {} for task_fields_to_push. It must be a " "dictionary of format: {key: path} where key refers to a field " "in the spec and path is a full mongo-style path to a " "field in the task document".format(type(task_fields_to_push))) return FWAction(stored_data={"task_id": task_doc.get("task_id", None)}, defuse_children=defuse_children, update_spec=update_spec)
def update_ccavenue_hash(formId, ccavenuePaymentMethodInfo, response): from ...main import app def fill_ccavenue_paymentinfo(key): value = ccavenuePaymentMethodInfo.get(key) return fill_string_from_template(response, value) if value else "" merchant_id = ccavenuePaymentMethodInfo["merchant_id"] config = CCAvenueConfig.objects.get({"merchant_id": merchant_id}) if not config: raise Exception( f"CCAvenue config not found for merchant id: {merchant_id}.") """en - English hi - Hindi gu - Gujarati mr - Marathi bn - Bengali""" responseId = str(response.id) orderId = str(ObjectId()) data = { "merchant_id": merchant_id, "order_id": orderId, "currency": response.paymentInfo["currency"], "amount": str( Decimal(response.paymentInfo["total"]) - Decimal(response.amount_paid)), "redirect_url": app.get_url(f"responses/{responseId}/ccavenueResponseHandler"), "cancel_url": "http://www.chinmayamission.com", # todo: fix this. "language": "en", "billing_name": fill_ccavenue_paymentinfo("billing_name"), "billing_address": fill_ccavenue_paymentinfo("billing_address"), "billing_city": fill_ccavenue_paymentinfo("billing_city"), "billing_state": fill_ccavenue_paymentinfo("billing_state"), "billing_zip": fill_ccavenue_paymentinfo("billing_zip"), "billing_country": fill_ccavenue_paymentinfo("billing_country"), "billing_tel": fill_ccavenue_paymentinfo("billing_tel"), "billing_email": fill_ccavenue_paymentinfo("billing_email"), "merchant_param1": formId, "merchant_param2": responseId, "merchant_param3": get( ccavenuePaymentMethodInfo, "redirectUrl", ccavenuePaymentMethodInfo.get("redirectUrl", "http://www.chinmayamission.com"), ), } sub_account_id = ccavenuePaymentMethodInfo.get("sub_account_id") if sub_account_id is not None: data["sub_account_id"] = sub_account_id # "delivery_name": "test", # "delivery_address": "test", # "delivery_city": "test", # "delivery_state": "test", # "delivery_zip": "test", # "delivery_country": "test", # "delivery_tel": "test", # "merchant_param1": "test", # "merchant_param2": "test", # "merchant_param3": "test", # "merchant_param4": "test", # "merchant_param5": "test", # "integration_type": "test", # "promo_code": "test", # "customer_identifier": "test" ccavenuePaymentMethodInfo["encRequest"] = encrypt( data, config.SECRET_working_key) ccavenuePaymentMethodInfo["access_code"] = config.access_code ccavenuePaymentMethodInfo["merchant_id"] = merchant_id return ccavenuePaymentMethodInfo
def group_by_material_id(materials_dict, docs, structure_key='structure', tol=1e-6, loosen=True, structure_matcher=None): """ Groups a collection of documents by material id as found in a materials collection Args: materials_dict (dict): dictionary of structures keyed by task_id docs ([dict]): list of documents tol (float): tolerance for lattice grouping loosen (bool): whether or not to loosen criteria if no matches are found structure_key (string): mongo-style key of documents where structures are contained (e. g. input.structure or output.structure) structure_matcher (StructureMatcher): structure matcher for finding equivalent structures Returns: documents grouped by task_id from the materials collection """ # Structify all input structures materials_dict = { mp_id: Structure.from_dict(struct) for mp_id, struct in materials_dict.items() } # Get magnetic phases mags = {} # TODO: refactor this with data from materials collection? for mp_id, structure in materials_dict.items(): mag = CollinearMagneticStructureAnalyzer(structure).ordering.value mags[mp_id] = mag_types[mag] docs_by_mp_id = {} for doc in docs: sm = structure_matcher or StructureMatcher( comparator=ElementComparator()) structure = Structure.from_dict(get(doc, structure_key)) input_sg_symbol = SpacegroupAnalyzer(structure, 0.1).get_space_group_symbol() # Iterate over all candidates until match is found matches = { c_id: candidate for c_id, candidate in materials_dict.items() if sm.fit(candidate, structure) } niter = 0 if not matches: # First try with conventional structure then loosen match criteria convs = { c_id: SpacegroupAnalyzer(candidate, 0.1).get_conventional_standard_structure() for c_id, candidate in materials_dict.items() } matches = { c_id: candidate for c_id, candidate in materials_dict.items() if sm.fit(convs[c_id], structure) } while len(matches) < 1 and niter < 4 and loosen: logger.debug("Loosening sm criteria") sm = StructureMatcher(sm.ltol * 2, sm.stol * 2, sm.angle_tol * 2, primitive_cell=False) matches = { c_id: candidate for c_id, candidate in materials_dict.items() if sm.fit(convs[c_id], structure) } niter += 1 if matches: # Get best match by spacegroup, then mag phase, then closest density mag = doc['magnetic_type'] def sort_criteria(m_id): dens_diff = abs(matches[m_id].density - structure.density) sg = matches[m_id].get_space_group_info(0.1)[0] mag_id = mags[m_id] # prefer explicit matches, allow non-mag materials match with FM tensors if mag_id == mag: mag_match = 0 elif mag_id == 'Non-magnetic' and mag == 'FM': mag_match = 1 else: mag_match = 2 return (sg != input_sg_symbol, mag_match, dens_diff) sorted_ids = sorted(list(matches.keys()), key=sort_criteria) mp_id = sorted_ids[0] if mp_id in docs_by_mp_id: docs_by_mp_id[mp_id].append(doc) else: docs_by_mp_id[mp_id] = [doc] else: logger.debug("No material match found for formula {}".format( structure.composition.reduced_formula)) return docs_by_mp_id
def run_task(self, fw_spec): # get the directory that contains the VASP dir to parse calc_dir = os.getcwd() if "calc_dir" in self: calc_dir = self["calc_dir"] elif self.get("calc_loc"): calc_dir = get_calc_loc(self["calc_loc"], fw_spec["calc_locs"])["path"] # parse the VASP directory logger.info("PARSING DIRECTORY: {}".format(calc_dir)) drone = VaspDrone(additional_fields=self.get("additional_fields"), parse_dos=self.get("parse_dos", False), bandstructure_mode=self.get("bandstructure_mode", False), parse_chgcar=self.get("parse_chgcar", False), parse_aeccar=self.get("parse_aeccar", False)) # assimilate (i.e., parse) task_doc = drone.assimilate(calc_dir) # Check for additional keys to set based on the fw_spec if self.get("fw_spec_field"): task_doc.update(fw_spec[self.get("fw_spec_field")]) # get the database connection db_file = env_chk(self.get('db_file'), fw_spec) # db insertion or taskdoc dump if not db_file: with open("task.json", "w") as f: f.write(json.dumps(task_doc, default=DATETIME_HANDLER)) else: mmdb = VaspCalcDb.from_db_file(db_file, admin=True) t_id = mmdb.insert_task( task_doc, use_gridfs=self.get("parse_dos", False) or bool(self.get("bandstructure_mode", False)) or self.get("parse_chgcar", False) or self.get("parse_aeccar", False)) logger.info("Finished parsing with task_id: {}".format(t_id)) defuse_children = False if task_doc["state"] != "successful": defuse_unsuccessful = self.get("defuse_unsuccessful", DEFUSE_UNSUCCESSFUL) if defuse_unsuccessful is True: defuse_children = True elif defuse_unsuccessful is False: pass elif defuse_unsuccessful == "fizzle": raise RuntimeError( "VaspToDb indicates that job is not successful " "(perhaps your job did not converge within the " "limit of electronic/ionic iterations)!") else: raise RuntimeError("Unknown option for defuse_unsuccessful: " "{}".format(defuse_unsuccessful)) task_fields_to_push = self.get("task_fields_to_push", None) update_spec = {} if task_fields_to_push: if isinstance(task_fields_to_push, dict): for key, path_in_task_doc in task_fields_to_push.items(): if has(task_doc, path_in_task_doc): update_spec[key] = get(task_doc, path_in_task_doc) else: logger.warn( "Could not find {} in task document. Unable to push to next firetask/firework" .format(path_in_task_doc)) else: raise RuntimeError( "Inappropriate type {} for task_fields_to_push. It must be a " "dictionary of format: {key: path} where key refers to a field " "in the spec and path is a full mongo-style path to a " "field in the task document".format( type(task_fields_to_push))) return FWAction(stored_data={"task_id": task_doc.get("task_id", None)}, defuse_children=defuse_children, update_spec=update_spec)
def handle_columns_reduce(total, column): value_ = get(item, column.order_by) if get( item, column.label) is None else get(item, column.label) total.append(value_) return total
def form_response_new(formId): """ Payload: { "data": formData, "modifyLink": "...", "responseId"?: "asdsadasd" } If responseId is defined, it is an update. Otherwise, it is an existing submission. """ from ..main import app email_sent = False responseId = app.current_request.json_body.get("responseId", None) if not responseId: responseId = ObjectId() newResponse = True else: responseId = ObjectId(responseId) newResponse = False form = Form.objects.get({"_id": ObjectId(formId)}) response_data = app.current_request.json_body["data"] response_data = process_response_data_images(response_data) postprocess = form.formOptions.postprocess if postprocess and "patches" in postprocess and type( postprocess["patches"]) is list: response_data = patch_predicate(response_data, postprocess["patches"]) counter_value = None counter = form.formOptions.counter if newResponse and counter and "enabled" in counter and counter[ "enabled"] == True: counter_value = get_counter(formId) modify_link = app.current_request.json_body.get('modifyLink', '') paymentInfo = form.formOptions.paymentInfo confirmationEmailInfo = form.formOptions.confirmationEmailInfo paymentMethods = fill_paymentMethods_with_data( form.formOptions.paymentMethods, response_data) def calc_item_total_to_paymentInfo(paymentInfoItem, paymentInfo): paymentInfoItem['amount'] = calculate_price( paymentInfoItem.get('amount', '0'), response_data) paymentInfoItem['quantity'] = calculate_price( paymentInfoItem.get('quantity', '0'), response_data) paymentInfo[ 'total'] += paymentInfoItem['amount'] * paymentInfoItem['quantity'] if "couponCode" in paymentInfoItem and paymentInfoItem[ "amount"] * paymentInfoItem["quantity"] != 0: slots_maximum = calculate_price( paymentInfoItem.get("couponCodeMaximum", "-1"), response_data) if slots_maximum != -1: slots_requested = calculate_price( paymentInfoItem.get("couponCodeCount", "1"), response_data) slots_used = form.couponCodes_used.get( paymentInfoItem["couponCode"], 0) slots_available = slots_maximum - slots_used slots_remaining = slots_available - slots_requested if slots_remaining < 0: message = "Coupon code maximum reached.\nSubmitting this form will cause you to exceed the coupon code maximum.\nNumber of spots remaining: {}".format( int(slots_available)) return False, { "res": { "success": False, "message": message, "fields_to_clear": ["couponCode"] } } form.couponCodes_used[paymentInfoItem[ "couponCode"]] = slots_used + slots_requested Form.objects.raw({ "_id": form.id }).update({ "$set": { f"couponCodes_used.{paymentInfoItem['couponCode']}": slots_used + slots_requested } }) return True, {} paymentInfoItemsWithTotal = [] paymentInfoItemsInstallment = [] paymentInfo['total'] = 0 for paymentInfoItem in paymentInfo.setdefault('items', []): paymentInfoItem.setdefault("name", "Payment Item") paymentInfoItem.setdefault("description", "Payment Item") paymentInfoItem.setdefault("quantity", "1") if paymentInfoItem.get("installment", False) == True: # Don't count "installment" payments towards the total. paymentInfoItemsInstallment.append(paymentInfoItem) continue if "$total" in paymentInfoItem.get( "amount", "0") or "$total" in paymentInfoItem.get( "quantity", "0"): # Take care of this at the end. paymentInfoItemsWithTotal.append(paymentInfoItem) continue success, error = calc_item_total_to_paymentInfo( paymentInfoItem, paymentInfo) if success is False: return error # Now take care of items for round off, etc. -- which need the total value to work. response_data["total"] = float(paymentInfo["total"]) for paymentInfoItem in paymentInfoItemsWithTotal: success, error = calc_item_total_to_paymentInfo( paymentInfoItem, paymentInfo) if success is False: return error # Take care of installment payments now. response_data["total"] = float(paymentInfo["total"]) for paymentInfoItem in paymentInfoItemsInstallment: paymentInfoItem['amount'] = calculate_price( paymentInfoItem.get('amount', '0'), response_data) paymentInfoItem['quantity'] = calculate_price( paymentInfoItem.get('quantity', '0'), response_data) response_data.pop("total", None) paymentInfo['items'] = [ item for item in paymentInfo['items'] if item['quantity'] * item['amount'] != 0 ] userId = app.get_current_user_id() paid = paymentInfo["total"] == 0 if newResponse: response = Response(form=form, id=responseId, date_created=datetime.datetime.now(), modify_link=modify_link + "?responseId=" + str(responseId) if modify_link else "", counter=counter_value) if get(form, "formOptions.loginRequired", False ) is True and userId is not "cm:cognitoUserPool:anonymousUser": user = get_user_or_create_one(userId) response.user = userId # Only one response per user. try: Response.objects.get({ "form": ObjectId(formId), "user": userId }) raise Exception( f"Response with userId {userId} already exists!") except DoesNotExist: pass else: response = Response.objects.get({"_id": responseId}) response.update_trail.append( UpdateTrailItem(old=response.value, new=response_data, date=datetime.datetime.now(), update_type="update")) if (response.paid == True and paymentInfo["total"] <= response.paymentInfo["total"]): paid = True if form.id != response.form.id: raise UnauthorizedError( f"Response {response.id} does not belong to form {form.id}; it belongs to form {response.form.id}." ) if response.user and response.user.id != userId: app.check_permissions(form, 'Responses_Edit') # raise UnauthorizedError(f"User {userId} does not own response {response.id} (owner is {response.user.id})") if newResponse or (not newResponse and paid): response.value = response_data response.date_modified = datetime.datetime.now() response.paymentInfo = paymentInfo response.paid = paid if not newResponse: response.update_trail.append( UpdateTrailItem(date=datetime.datetime.now(), update_type="apply_update")) if paid and confirmationEmailInfo: # If total amount is zero (user uses coupon code to get for free) send_confirmation_email(response, confirmationEmailInfo) email_sent = True # todo: fix this, should auto_email come even if not paid? if "auto_email" in paymentMethods and get( paymentMethods, "auto_email.enabled", True) == True and type( get(paymentMethods, "autoEmail.confirmationEmailInfo") is dict): send_confirmation_email( response, get(paymentMethods, "auto_email.confirmationEmailInfo")) email_sent = True response.save() if "description" in paymentInfo and type( paymentInfo["description"]) is str: paymentInfo["description"] = fill_string_from_template( response, paymentInfo["description"]) if "ccavenue" in paymentMethods and response.paid == False: paymentMethods["ccavenue"] = update_ccavenue_hash( formId, paymentMethods["ccavenue"], response) return { "res": { "value": response_data, "paid": paid, "success": True, "action": "insert", "email_sent": email_sent, "responseId": str(responseId), "paymentInfo": paymentInfo, "paymentMethods": paymentMethods } } elif not newResponse: # Update. response.date_modified = datetime.datetime.now() # Not using pending_update for now. # response.pending_update = { # "value": response_data, # "paymentInfo": paymentInfo, # } response.value = response_data response.paymentInfo = paymentInfo response.paid = paid response.save() if "description" in paymentInfo and type( paymentInfo["description"]) is str: paymentInfo["description"] = fill_string_from_template( response, paymentInfo["description"]) if "ccavenue" in paymentMethods and response.paid == False: paymentMethods["ccavenue"] = update_ccavenue_hash( formId, paymentMethods["ccavenue"], response) return { "res": { "value": response_data, "paid": paid, "success": True, "action": "update", "email_sent": email_sent, "responseId": str(responseId), "paymentInfo": paymentInfo, "paymentMethods": paymentMethods, "amt_received": { "currency": paymentInfo["currency"], "total": float(response.amount_paid or 0) } } }
def get(path): return objects.get(_config, path)
def active_version(self, data, **kwargs): from implement_table.core.classes import BConfig reference_id = data.pop(self.reference_field) date_ = get(data, 'max_date', None) query = { self.reference_field: int(reference_id), } if date_ is not None: query[self.date_field] = { "$lt": datetime.strptime(date_, BConfig().date_format) + timedelta(days=1) } subscriber = kwargs.get('subscriber') query.update( self.permission_filter(subscriber.user, subscriber.staff, 'get')) sort = [{ '$match': query }, { "$group": { "_id": { "$dateToString": { "format": "%Y-%m-%d", "date": "${}".format(self.date_field) } }, "pk": { "$last": "$_id" }, "created_at": { "$last": "${}".format(self.date_field) } } }, { '$sort': { self.date_field: -1 } }, { '$limit': 1 }, { "$project": { "_id": "$pk" } }] object__ = self.get_model().objects.aggregate(*sort) if len(object__) > 0: pk = get(object__, '0._id') object__ = self.get_model().objects.get(id=pk) serializer = self.get_serializer(object__) return serializer.data else: return dict()
def _search(form, query, autocomplete, search_by_id, show_unpaid): search_fields = get(form.formOptions.dataOptions, "search.searchFields", ["_id"]) if search_by_id is not None: search_fields = ["_id"] result_limit = get(form.formOptions.dataOptions, "search.resultLimit", 10) result_fields = get(form.formOptions.dataOptions, "search.resultFields", ["_id"]) autocomplete_fields = get( form.formOptions.dataOptions, "search.autocompleteFields", ["_id"] ) if show_unpaid is not None: default_mongo_query = {"paid": False} else: default_mongo_query = {"paid": True} mongo_query = {"$or": []} for word in query.split(" "): for field in search_fields: if field == "_id": if len(word) <= 24: try: queryObjectIdStart = ObjectId( word + "0" * (24 - len(word)) ) # fill in zeroes to create object id, e.g. 5cba --> 5cba0000000000000000000 queryObjectIdEnd = ObjectId(word + "e" * (24 - len(word))) mongo_query["$or"].append( { field: { "$gte": queryObjectIdStart, "$lte": queryObjectIdEnd, } } ) except bson.errors.InvalidId: pass else: if field.startswith("value.participants."): _, subfield = field.split("value.participants.") mongo_query["$or"].append( { "value.participants": { "$elemMatch": { subfield: {"$regex": "^" + word, "$options": "i"} } } } ) else: mongo_query["$or"].append( {field: {"$regex": "^" + word, "$options": "i"}} ) mongo_query["form"] = form.id if len(mongo_query["$or"]) == 0: del mongo_query["$or"] # Default query paid = True if mongo_query: mongo_query = {"$and": [default_mongo_query, mongo_query]} else: mongo_query = default_mongo_query if autocomplete is not None: projection = {field: 1 for field in autocomplete_fields} result_limit = 5 else: projection = {} for field in result_fields: projection[field] = 1 responses = ( Response.objects.raw(mongo_query).limit(result_limit).project(projection) ) return {"res": [serialize_model(r) for r in responses]}