def g_json_all_handler(): db_results = util.db_find('roadDB', {}) #db_results.sort(key=lambda (r): str(r['start_timestamp']) + '_' + str(r['end_timestamp'])) results = g_json_handler.parse_json_results(db_results) return results
def post_all_ad_versions_handler(): db_results = util.db_find('bee_csv', { "is_processed_ad_version": True, "is_processed_address": True }, { "_id": False, "town": True, "count": True, "deliver_time": True, "deliver_date": True, "save_time": True, "geo": True, "county": True, "address": True, "user_name": True, "is_processed_ad_version": True, "is_processed_address": True, "csv_key": True, "ad_versions": True, "version_text": True, "memo": True, "deliver_status": True }) for db_result in db_results: csv_key = db_result.get('csv_key', '') util.db_update('bee', {'csv_key': csv_key}, db_result)
def g_search_by_location_handler(params): lat = util._float(params.get('lat', 25)) lon = util._float(params.get('lng', 121)) dist = util._float(params.get('distance', 10)) dist = max(dist, 10.0) dist_x = 360.0 * dist / EARTH_EQUATOIAL_CIRCUMFERENCE dist_y = 360.0 * dist / EARTH_MERIDIONAL_CIRCUMFERENCE min_x = lon - dist_x max_x = lon + dist_x min_y = lat - dist_y max_y = lat + dist_y query = { 'geo': { '$geoIntersects': { '$geometry': { 'type': 'Polygon', 'coordinates': [[[min_x, min_y], [min_x, max_y], [max_x, max_y], [max_x, min_y], [min_x, min_y]]] } } } } cfg.logger.debug('dist: %s dist_x: %s dist_y: %s to db_find: query: %s', dist, dist_x, dist_y, query) results = util.db_find('roadDB', query) cfg.logger.debug('results: %s len: %s', results, len(results)) return results
def get_db_results_by_the_timestamp(start_timestamp, end_timestamp): start_timestamp = util._int(start_timestamp) end_timestamp = util._int(end_timestamp) result_all = util.db_find('roadDB', {'end_timestamp': {'$gte': start_timestamp}, 'start_timestamp': {'$lte': end_timestamp}}) results = [result for result in result_all if _is_valid(result, start_timestamp, end_timestamp)] return results
def g_ad_data_handler(): db_results = util.db_find('bee_img', {}, { "_id": False, "name": True, "the_type": True, "the_id": True }) return db_results
def _parse_csv(data): f = StringIO(data) df = pd.read_csv(f) funnel_dict = {"error_code": S_OK, "error_msg": "", "fail": set()} for each_column in df.columns: df[each_column].fillna('', inplace=True) df['csv_key'] = df.apply(lambda x: _parse_csv_key(dict(x), funnel_dict), axis=1) csv_key_list = list(df['csv_key']) db_csv_keys = util.db_find('bee', {'csv_key': {'$in': csv_key_list}}, {"_id": False, "csv_key": True}) db_csv_keys = [db_csv_key.get('csv_key', '') for db_csv_key in db_csv_keys] db_csv_keys = [each_key for each_key in db_csv_keys if each_key] #is_csv_key_not_in_db = df['csv_key'].isin(db_csv_keys) == False #df = df[is_csv_key_not_in_db] df['address'] = df.apply(lambda x: _parse_address(dict(x), funnel_dict), axis=1) df['county_and_town'] = df.apply(lambda x: _parse_county_and_town(dict(x), funnel_dict), axis=1) df['google_address'] = df.apply(lambda x: _parse_google_address(dict(x), funnel_dict), axis=1) df['deliver_time'] = df.apply(lambda x: _parse_deliver_time(dict(x), funnel_dict), axis=1) df['save_time'] = df.apply(lambda x: _parse_save_time(dict(x), funnel_dict), axis=1) df['deliver_date'] = df.apply(lambda x: _parse_deliver_date(dict(x), funnel_dict), axis=1) df['user_name'] = df.apply(lambda x: _parse_user_name(dict(x), funnel_dict), axis=1) df['count'] = df.apply(lambda x: _parse_count(dict(x), funnel_dict), axis=1) df['deliver_status'] = df.apply(lambda x: _parse_deliver_status(dict(x), funnel_dict), axis=1) df['memo'] = df.apply(lambda x: _parse_memo(dict(x), funnel_dict), axis=1) df['version_text'] = df.apply(lambda x: _parse_version_text(dict(x), funnel_dict), axis=1) df['versions'] = df.apply(lambda x: _parse_versions(dict(x), funnel_dict), axis=1) cfg.logger.debug('df_len: %s', len(df)) parsed_dict_list = [_parse_dict_row(row, funnel_dict) for (idx, row) in df.iterrows()] df = pd.DataFrame(parsed_dict_list) df = df[['csv_key', 'deliver_time', 'deliver_date', 'user_name', 'address', 'county_and_town', 'google_address', 'versions', 'version_text', 'count', 'save_time', 'deliver_status', 'memo']] results = util.df_to_dict_list(df) for each_result in results: csv_key = each_result.get('csv_key', '') versions = each_result.get('versions', []) version_text = each_result.get('version_text', []) cfg.logger.debug('to db_update: each_result: %s', each_result) util.db_update('bee_csv', {'csv_key': csv_key}, each_result) for each_version in versions: util.db_update('bee_csv_versions', {'version': each_version}, {csv_key: version_text}) return (funnel_dict['error_code'], funnel_dict['error_msg'], len(results), results)
def g_json_by_id_list_handler(params): id_list = params.get('id_list').split(',') id_list = [each_id for each_id in id_list if each_id] if not id_list: return [] db_results = util.db_find('roadDB', {'the_id': {'$in': id_list}}) results = g_json_handler.parse_json_results(db_results) return results
def get_db_results_by_the_timestamp(start_timestamp, end_timestamp): start_timestamp = util._int(start_timestamp) end_timestamp = util._int(end_timestamp) result_all = util.db_find( 'roadDB', { 'end_timestamp': { '$gte': start_timestamp }, 'start_timestamp': { '$lte': end_timestamp } }) results = [ result for result in result_all if _is_valid(result, start_timestamp, end_timestamp) ] return results
def g_json_handler(start_timestamp, end_timestamp): start_timestamp = int(start_timestamp) end_timestamp = int(end_timestamp) cfg.logger.debug("start_timestamp: %s end_timestamp: %s", start_timestamp, end_timestamp) result_all = util.db_find("roadDB") # cfg.logger.debug('result_all: %s', result_all) results = [result for result in result_all if _is_valid(result, start_timestamp, end_timestamp)] results.sort(key=lambda (r): str(r["beginAt"]) + "_" + str(r["endAt"]), reverse=True) for result in results: del result["_id"] result["beginDate"] = util.timestamp_to_date(result["beginAt"]) result["endDate"] = util.timestamp_to_date(result["endAt"]) return results
def g_search_by_location_handler(params): lat = util._float(params.get('lat', 25)) lon = util._float(params.get('lng', 121)) dist = util._float(params.get('distance', 10)) dist = max(dist, 10.0) dist_x = 360.0 * dist / EARTH_EQUATOIAL_CIRCUMFERENCE dist_y = 360.0 * dist / EARTH_MERIDIONAL_CIRCUMFERENCE min_x = lon - dist_x max_x = lon + dist_x min_y = lat - dist_y max_y = lat + dist_y query = {'geo': {'$geoIntersects': {'$geometry': {'type': 'Polygon', 'coordinates': [[[min_x, min_y], [min_x, max_y], [max_x, max_y], [max_x, min_y], [min_x, min_y]]]}}}} cfg.logger.debug('dist: %s dist_x: %s dist_y: %s to db_find: query: %s', dist, dist_x, dist_y, query) results = util.db_find('roadDB', query) cfg.logger.debug('results: % len: %s', results, len(results)) return results
def g_json_handler(): db_result = util.db_find('bee', {}) return db_result
def g_ad_data_handler(): db_results = util.db_find('bee_img', {}, {"_id": False, "name": True, "the_type": True, "the_id": True}) return db_results
def post_all_ad_versions_handler(): db_results = util.db_find('bee_csv', {"is_processed_ad_version": True, "is_processed_address": True}, {"_id": False, "town": True, "count": True, "deliver_time": True, "deliver_date": True, "save_time": True, "geo": True, "county": True, "address": True, "user_name": True, "is_processed_ad_version": True, "is_processed_address": True, "csv_key": True, "ad_versions": True, "version_text": True, "memo": True, "deliver_status": True}) for db_result in db_results: csv_key = db_result.get('csv_key', '') util.db_update('bee', {'csv_key': csv_key}, db_result)