def job_metadata_api(command=None, subcommand=None, run_id=None, status=None, show_details=True, order=None, offset=None, limit=None): """ Defines handler method for job-metadata GET API (version 2.0). :param command: command name (default None) :param subcommand: sub-command name (default None) :param run_id: job run id (default None) :param status: job execution status (default None) :param show_details: show full job details (default True) :param order: sorting order (Ascending/Descending, default None) :param offset: offset of data (default None) :param limit: limit of the data (default None) :return: json """ result = get_metadata(command, subcommand, run_id, status) if order is not None or (offset is not None and limit is not None): data = [rec._asdict() for rec in result] paginated_data = Pagination.paginate(data, offset, limit) if order == 'Ascending': paginated_data.get('data').sort(key=operator.itemgetter('run_id')) elif order == 'Descending': paginated_data.get('data').sort(key=operator.itemgetter('run_id'), reverse=True) if not show_details: response = { '_keys': JobKeys().dump(dict(paginated_data.get('keys'))).data, 'jobs': [JobMetadata(exclude=('extra_metadata',)).dump(dict(dat)).data for dat in paginated_data.get('data')] } return jsonify(response) else: response = { '_keys': JobKeys().dump(dict(paginated_data.get('keys'))).data, 'jobs': [JobMetadata().dump(dict(dat)).data for dat in paginated_data.get('data')] } return jsonify(response) keys = {'offset': '', 'limit': '', 'previous_key': '', 'next_key': '', 'result_size': len(result)} if not show_details: response = { '_keys': JobKeys().dump(dict(keys)).data, 'jobs': [JobMetadata(exclude=('extra_metadata',)).dump(rec._asdict()).data for rec in result] } else: response = { '_keys': JobKeys().dump(dict(keys)).data, 'jobs': [JobMetadata().dump(rec._asdict()).data for rec in result] } return jsonify(response)
def get_pairings(self, imei, **kwargs): """Handler method for IMEI-Pairings API (version 2.0).""" imei_norm = self._validate_imei(imei) offset = kwargs.get('offset') limit = kwargs.get('limit') order = kwargs.get('order') if offset is None: offset = 1 if limit is None: limit = 10 with get_db_connection() as db_conn, db_conn.cursor() as cursor: cursor.execute("""SELECT pairing_list.imsi, network_triplets.last_seen FROM pairing_list LEFT JOIN monthly_network_triplets_country_no_null_imeis AS network_triplets ON network_triplets.imsi = pairing_list.imsi AND network_triplets.imei_norm = pairing_list.imei_norm WHERE pairing_list.imei_norm = '{imei_norm}'""" .format(imei_norm=imei_norm)) if cursor is not None: pairings = [{'imsi': x.imsi, 'last_seen': x.last_seen} for x in cursor] paginated_data = Pagination.paginate(pairings, offset, limit) if order == 'Ascending': paginated_data.get('data').sort(key=operator.itemgetter('last_seen')) return jsonify(IMEIPairings().dump(dict(imei_norm=imei_norm, pairs=paginated_data.get('data'), _keys=paginated_data.get('keys'))).data) elif order == 'Descending': paginated_data.get('data').sort(key=operator.itemgetter('last_seen'), reverse=True) return jsonify(IMEIPairings().dump(dict(imei_norm=imei_norm, pairs=paginated_data.get('data'), _keys=paginated_data.get('keys'))).data) return jsonify(IMEIPairings().dump(dict(imei_norm=imei_norm, pairs=paginated_data.get('data'), _keys=paginated_data.get('keys'))).data) keys = {'offset': offset, 'limit': limit, 'current_key': offset, 'next_key': '', 'result_size': 0} return jsonify(IMEIPairings().dump(dict(imei_norm=imei_norm, pairs=None, _keys=keys)))
def get_subscribers(self, imei, **kwargs): """Handler method for IMEI-Subscribers API (version 2.0).""" imei_norm = self._validate_imei(imei) offset = kwargs.get('offset') limit = kwargs.get('limit') order = kwargs.get('order') if offset is None: offset = 1 if limit is None: limit = 10 with get_db_connection() as db_conn, db_conn.cursor() as cursor: cursor.execute("""SELECT DISTINCT imsi, msisdn, last_seen FROM monthly_network_triplets_country_no_null_imeis WHERE imei_norm = %(imei_norm)s AND virt_imei_shard = calc_virt_imei_shard(%(imei_norm)s)""", {'imei_norm': imei_norm}) if cursor is not None: subscribers = [{'imsi': x.imsi, 'msisdn': x.msisdn, 'last_seen': x.last_seen} for x in cursor] paginated_data = Pagination.paginate(subscribers, offset, limit) if order == 'Ascending': paginated_data.get('data').sort(key=operator.itemgetter('last_seen')) return jsonify(IMEISubscribers().dump(dict(imei_norm=imei_norm, subscribers=paginated_data.get('data'), _keys=paginated_data.get('keys'))).data) elif order == 'Descending': paginated_data.get('data').sort(key=operator.itemgetter('last_seen'), reverse=True) return jsonify(IMEISubscribers().dump(dict(imei_norm=imei_norm, subscribers=paginated_data.get('data'), _keys=paginated_data.get('keys'))).data) return jsonify(IMEISubscribers().dump(dict(imei_norm=imei_norm, subscribers=paginated_data.get('data'), _keys=paginated_data.get('keys'))).data) keys = {'offset': offset, 'limit': limit, 'current_key': offset, 'next_key': '', 'result_size': 0} return jsonify(IMEISubscribers().dump(dict(imei_norm=imei_norm, subscribers=None, _keys=keys)))
def catalog_api(**kwargs): """ Defines handler for Catalog API (version 2.0) GET method. :param kwargs: input args :return: json """ sorting_order = kwargs.get('order') offset_key = kwargs.get('offset') per_page_limit = kwargs.get('limit') # Build filters to be applied to the SQL query filters, filter_params = _build_sql_query_filters(**kwargs) query = sql.SQL( """SELECT array_agg(status ORDER BY run_id DESC)::TEXT[] AS status_list, dc.* FROM (SELECT file_id, filename, file_type, compressed_size_bytes, modified_time, is_valid_zip, is_valid_format, md5, extra_attributes, first_seen, last_seen, uncompressed_size_bytes, num_records FROM data_catalog {filters} ORDER BY last_seen DESC, file_id DESC LIMIT ALL) dc LEFT JOIN (SELECT run_id, status, extra_metadata FROM job_metadata WHERE command = 'dirbs-import') jm ON md5 = (extra_metadata->>'input_file_md5')::uuid GROUP BY file_id, filename, file_type, compressed_size_bytes, modified_time, is_valid_zip, is_valid_format, md5, extra_attributes, first_seen, last_seen, uncompressed_size_bytes, num_records ORDER BY last_seen DESC, file_id DESC""" ) # noqa Q444 where_clause = sql.SQL('') if len(filters) > 0: where_clause = sql.SQL('WHERE {0}').format( sql.SQL(' AND ').join(filters)) with get_db_connection() as conn, conn.cursor() as cursor: cursor.execute( cursor.mogrify(query.format(filters=where_clause), filter_params)) resp = [CatalogFile().dump(rec._asdict()).data for rec in cursor] if sorting_order is not None or (offset_key is not None and per_page_limit is not None): paginated_data = Pagination.paginate(resp, offset_key, per_page_limit) if sorting_order == 'Ascending': paginated_data.get('data').sort( key=operator.itemgetter('file_id')) response = { '_keys': Keys().dump(dict(paginated_data.get('keys'))).data, 'files': [file_data for file_data in paginated_data.get('data')] } return jsonify(response) elif sorting_order == 'Descending': paginated_data.get('data').sort( key=operator.itemgetter('file_id'), reverse=True) response = { '_keys': Keys().dump(dict(paginated_data.get('keys'))).data, 'files': [file_data for file_data in paginated_data.get('data')] } return jsonify(response) response = { '_keys': Keys().dump(dict(paginated_data.get('keys'))).data, 'files': [file_data for file_data in paginated_data.get('data')] } return jsonify(response) keys = { 'offset': '', 'limit': '', 'previous_key': '', 'next_key': '', 'result_size': len(resp) } response = {'_keys': Keys().dump(dict(keys)).data, 'files': resp} return jsonify(response)
def get_job_metadata(self, command=None, subcommand=None, run_id=None, status=None, show_details=True, order=None, offset=None, limit=None): """Defines handler method for job-metadata GET API (version 2.0).""" result = self.get_metadata(command, subcommand, run_id, status) if order is not None or (offset is not None and limit is not None): data = [rec._asdict() for rec in result] paginated_data = Pagination.paginate(data, offset, limit) if order == 'Ascending': paginated_data.get('data').sort( key=operator.itemgetter('run_id')) elif order == 'Descending': paginated_data.get('data').sort( key=operator.itemgetter('run_id'), reverse=True) if not show_details: response = { '_keys': Keys().dump(dict(paginated_data.get('keys'))).data, 'jobs': [ JobMetadata(exclude=('extra_metadata', )).dump( dict(dat)).data for dat in paginated_data.get('data') ] } return jsonify(response) else: response = { '_keys': Keys().dump(dict(paginated_data.get('keys'))).data, 'jobs': [ JobMetadata().dump(dict(dat)).data for dat in paginated_data.get('data') ] } return jsonify(response) keys = { 'offset': '', 'limit': '', 'previous_key': '', 'next_key': '', 'result_size': len(result) } if not show_details: response = { '_keys': Keys().dump(dict(keys)).data, 'jobs': [ JobMetadata(exclude=('extra_metadata', )).dump( rec._asdict()).data for rec in result ] } else: response = { '_keys': Keys().dump(dict(keys)).data, 'jobs': [JobMetadata().dump(rec._asdict()).data for rec in result] } return jsonify(response)