Esempio n. 1
0
async def send_file_block(server_id, file_path, piece):
    try:
        buffer = pickle.dumps(piece)
        if server_id == settings.server_id:
            await write_file_block_in_fs(file_path, buffer)
            logger.info('%s: sendblock %d to server %d', file_path, piece.piece_id, server_id)
            return True
        else:
            port = settings.base_port + server_id
            url = 'http://%s:%d/writeblock/%s' % (settings.host, port, file_path)
            headers = {
                'content-type': 'application/octet-stream'
            }
            session = get_session()
            async with session.post(url, data=buffer, headers=headers) as resp:
                resp: aiohttp.ClientResponse
                if resp.status == 200:
                    logger.info('%s: sendblock %d to server %d', file_path, piece.piece_id, server_id)
                    return True
                else:
                    raise Exception(resp)
    except Exception as e:
        logger.error('%s failed: sendblock %d to server %d', file_path, piece.piece_id, server_id)
        logger.exception(e)
    return False
Esempio n. 2
0
    def response_keyed(self, query: str, sources: Set[str]) -> Dict:
        """Return response as dict where key is source name and value
        is a list of records. Corresponds to `keyed=true` API parameter.

        :param str query: string to match against
        :param Set[str] sources: sources to match from
        :return: completed response object to return to client
        """
        resp = {
            'query': query,
            'warnings': self.emit_warnings(query),
            'source_matches': {source: None
                               for source in sources}
        }
        if query == '':
            return self.post_process_resp(resp)
        query_l = query.lower()

        queries = list()
        if [p for p in PREFIX_LOOKUP.keys() if query_l.startswith(p)]:
            pk = f'{query_l}##identity'
            queries.append(pk)

        for prefix in [
                p for p in NAMESPACE_LOOKUP.keys() if query_l.startswith(p)
        ]:
            pk = f'{NAMESPACE_LOOKUP[prefix].lower()}:{query_l}##identity'
            queries.append(pk)

        for match in ITEM_TYPES.values():
            pk = f'{query_l}##{match}'
            queries.append(pk)

        matched_concept_ids = list()
        for q in queries:
            try:
                query_resp = self.db.genes.query(
                    KeyConditionExpression=Key('label_and_type').eq(q))
                for record in query_resp['Items']:
                    concept_id = record['concept_id']
                    if concept_id in matched_concept_ids:
                        continue
                    else:
                        if record['item_type'] == "identity":
                            self.add_record(resp, record, MatchType.CONCEPT_ID)
                        else:
                            self.fetch_record(
                                resp, concept_id,
                                MatchType[record['item_type'].upper()])
                        matched_concept_ids.append(concept_id)

            except ClientError as e:
                logger.error(e.response['Error']['Message'])
                continue

        # remaining sources get no match
        return self.post_process_resp(resp)
Esempio n. 3
0
    def _handle_failed_merge_ref(record, response, query) -> Dict:
        """Log + fill out response for a failed merge reference lookup.

        :param Dict record: record containing failed merge_ref
        :param Dict response: in-progress response object
        :param str query: original query value
        :return: response with no match
        """
        logger.error(f"Merge ref lookup failed for ref {record['merge_ref']} "
                     f"in record {record['concept_id']} from query {query}")
        response['match_type'] = MatchType.NO_MATCH
        return response
Esempio n. 4
0
    def bind_socket(self):
        family = socket.AF_INET
        if self.cfgdata["host"] and ":" in self.cfgdata["host"]:
            family = socket.AF_INET6

        sock = socket.socket(family=family)
        sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        try:
            sock.bind((self.cfgdata["host"], self.cfgdata["port"]))
        except OSError as exc:
            uvlogger.error(exc)
            sys.exit(1)
        sock.set_inheritable(True)
        return sock
Esempio n. 5
0
    def fetch_meta(self, src_name: str) -> SourceMeta:
        """Fetch metadata for src_name.

        :param str src_name: name of source to get metadata for
        :return: SourceMeta object containing source metadata
        """
        if src_name in self.db.cached_sources.keys():
            return self.db.cached_sources[src_name]
        else:
            try:
                db_response = self.db.metadata.get_item(
                    Key={'src_name': src_name})
                response = SourceMeta(**db_response['Item'])
                self.db.cached_sources[src_name] = response
                return response
            except ClientError as e:
                logger.error(e.response['Error']['Message'])
Esempio n. 6
0
    def fetch_record(self, response: Dict[str, Dict], concept_id: str,
                     match_type: MatchType) -> None:
        """Add fetched record to response

        :param Dict[str, Dict] response: in-progress response object to return
            to client.
        :param str concept_id: Concept id to fetch record for.
            Should be all lower-case.
        :param MatchType match_type: match type for record
        """
        try:
            pk = f'{concept_id}##identity'
            filter_exp = Key('label_and_type').eq(pk)
            result = self.db.genes.query(KeyConditionExpression=filter_exp)
            match = result['Items'][0]
            self.add_record(response, match, match_type)
        except ClientError as e:
            logger.error(e.response['Error']['Message'])
Esempio n. 7
0
async def write_file_block(file_path: str, request: Request):
    try:
        async with write_file_block_lock:
            new_buffer = await request.body()
            new_piece = pickle.loads(new_buffer)
            replace = True
            try:
                old_buffer = await read_file_block_from_fs(file_path)
                old_piece = pickle.loads(old_buffer)
                if new_piece.timestamp < old_piece.timestamp:
                    replace = False
            except:
                pass
            if replace:
                await write_file_block_in_fs(file_path, new_buffer)
            else:
                logger.error('%s failed: writeblock into server %d, received file has smaller timestamp',
                             file_path, settings.server_id)
    except Exception as e:
        logger.exception(e)
        raise HTTPException(400, str(e))
Esempio n. 8
0
async def receive_file_block(server_id, file_path):
    try:
        if server_id == settings.server_id:
            buffer = await read_file_block_from_fs(file_path)
            piece = pickle.loads(buffer)
            logger.info('%s: receiveblock %d from server %d', file_path, piece.piece_id, server_id)
            return piece
        else:
            port = settings.base_port + server_id
            url = 'http://%s:%d/readblock/%s' % (settings.host, port, file_path)
            session = get_session()
            async with session.get(url) as resp:
                resp: aiohttp.ClientResponse
                if resp.status == 200:
                    buffer = await resp.read()
                    piece = pickle.loads(buffer)
                    logger.info('%s: receiveblock %d from server %d', file_path, piece.piece_id, server_id)
                    return piece
                else:
                    raise Exception(resp)
    except Exception as e:
        logger.error('%s failed: receiveblock from server %d', file_path, server_id)
        logger.exception(e)
    return None
Esempio n. 9
0
 def error(self, func: str, msg: str) -> None:
     self.logger.error(f"cls: {self.cls}, func: {func}, msg {msg}")
     logger.error(f"cls: {self.cls}, func: {func}, msg {msg}")