def _handle_identifier_not_name(*, concept, prefix, identifier) -> bool: # Some namespaces are just too much of a problem at the moment to look up if prefix in SKIP: return False if prefix in NO_NAMES: concept[NAME] = concept[IDENTIFIER] return True if prefix == 'uniprot': concept[NAME] = get_mnemonic(identifier) return True try: id_name_mapping = get_id_name_mapping(prefix) except (NoOboFoundry, MissingOboBuild): return False if id_name_mapping is None: logger.warning('could not get names for prefix %s', prefix) return False name = id_name_mapping.get(identifier) if name is None: logger.warning('could not get name for %s:%s', prefix, identifier) return False concept[NAME] = name return True
def upload_artifacts_for_prefix(*, prefix: str, bucket: str): """Upload compiled parts for the given prefix to AWS.""" logger.info('[%s] getting id->name mapping', prefix) get_id_name_mapping(prefix) id_name_path = prefix_directory_join(prefix, 'cache', 'names.tsv') id_name_key = os.path.join(prefix, 'cache', 'names.tsv') logger.info('[%s] uploading id->name mapping', prefix) upload_file(path=id_name_path, bucket=bucket, key=id_name_key) logger.info('[%s] getting id->synonyms mapping', prefix) get_id_synonyms_mapping(prefix) id_synonyms_path = prefix_directory_join(prefix, 'cache', 'synonyms.tsv') id_synonyms_key = os.path.join(prefix, 'cache', 'synonyms.tsv') logger.info('[%s] uploading id->synonyms mapping', prefix) upload_file(path=id_synonyms_path, bucket=bucket, key=id_synonyms_key) logger.info('[%s] getting xrefs', prefix) get_xrefs_df(prefix) xrefs_path = prefix_directory_join(prefix, 'cache', 'xrefs.tsv') xrefs_key = os.path.join(prefix, 'cache', 'xrefs.tsv') logger.info('[%s] uploading xrefs', prefix) upload_file(path=xrefs_path, bucket=bucket, key=xrefs_key) logger.info('[%s] getting relations', prefix) get_relations_df(prefix) relations_path = prefix_directory_join(prefix, 'cache', 'relations.tsv') relations_key = os.path.join(prefix, 'cache', 'relations.tsv') logger.info('[%s] uploading relations', prefix) upload_file(path=relations_path, bucket=bucket, key=relations_key) logger.info('[%s] getting properties', prefix) get_properties_df(prefix) properties_path = prefix_directory_join(prefix, 'cache', 'properties.tsv') properties_key = os.path.join(prefix, 'cache', 'properties.tsv') logger.info('[%s] uploading properties', prefix) upload_file(path=properties_path, bucket=bucket, key=properties_key)