def test_split_collection(self): coll = list(xrange(3)) chunks = list(split_collection(coll, chunk_size=len(coll))) self.assertEquals(1, len(chunks)) self.assertListEqual(chunks[0], coll) chunks = list(split_collection(coll, chunk_size=len(coll) + 1)) self.assertEquals(1, len(chunks)) self.assertListEqual(chunks[0], coll) chunks = list(split_collection(coll, chunk_size=len(coll) - 1)) self.assertEquals(2, len(chunks)) self.assertListEqual(chunks[0], coll[:-1]) self.assertListEqual(chunks[1], coll[-1:])
def post(): app.logger.debug( "Handling action_logs post request: {}".format(request.json) ) action_logs = request.json['action_logs'] app.logger.debug("Inserting {} action logs".format(len(action_logs))) objects_info = [] for chunk in util.split_collection(action_logs, chunk_size=1000): existed_objs, action_logs_to_add = _separate_action_logs(chunk) objects_info.extend(_extract_objects_info(existed_objs)) skipped_objs = [] for obj in action_logs_to_add: # Scan index of JSON fields is slow, thus we are copying # action name and action type to columns of actions_logs. obj['action_type'] = obj['body'].get('action_type') obj['action_name'] = obj['body'].get('action_name') if obj['body']['action_type'] == 'nailgun_task' and \ not obj['body']['end_timestamp']: skipped_objs.append(obj) else: obj['body'] = json.dumps(obj['body']) for obj in skipped_objs: action_logs_to_add.remove(obj) objects_info.extend(_extract_dicts_info( skipped_objs, consts.ACTION_LOG_STATUSES.skipped)) objects_info.extend(_save_action_logs(action_logs_to_add)) return 200, {'status': 'ok', 'action_logs': list(objects_info)}
def post(): app.logger.debug("Handling oswl post request: %s", request.json) oswls = request.json['oswl_stats'] oswls_resp = [] dict_idx_names = ('master_node_uid', 'id') obj_idx_names = ('master_node_uid', 'external_id') dict_to_obj_fields_mapping = zip(dict_idx_names, obj_idx_names) for chunk in util.split_collection(oswls): app.logger.debug("Processing oswls chunk of size: %d", len(chunk)) dicts_new, objs_updated = \ util.split_new_dicts_and_updated_objs( chunk, dict_to_obj_fields_mapping, OpenStackWorkloadStats) try: app.logger.debug("Bulk insert of oswls started") util.bulk_insert(dicts_new, OpenStackWorkloadStats) app.logger.debug("Bulk insert of oswls finished") db.session.commit() oswls_resp.extend(generate_success_response(dicts_new, objs_updated)) app.logger.debug("Oswls chunk of size: %d is processed", len(chunk)) except Exception: app.logger.exception("Oswls chunk of size: %d processing failed", len(chunk)) db.session.rollback() oswls_resp.extend(generate_error_response(dicts_new, objs_updated)) return 200, {'status': 'ok', 'oswl_stats': oswls_resp}
def post(): app.logger.debug("Handling action_logs post request: {}".format( request.json)) action_logs = request.json['action_logs'] app.logger.debug("Inserting {} action logs".format(len(action_logs))) objects_info = [] for chunk in util.split_collection(action_logs, chunk_size=1000): existed_objs, action_logs_to_add = _separate_action_logs(chunk) _handle_existed_objects(objects_info, existed_objs) _save_action_logs(objects_info, action_logs_to_add) return {'status': 'ok', 'action_logs': list(objects_info)}
def post(): app.logger.debug( "Handling action_logs post request: {}".format(request.json) ) action_logs = request.json['action_logs'] app.logger.debug("Inserting {} action logs".format(len(action_logs))) objects_info = [] for chunk in util.split_collection(action_logs, chunk_size=1000): existed_objs, action_logs_to_add = _separate_action_logs(chunk) _handle_existed_objects(objects_info, existed_objs) _save_action_logs(objects_info, action_logs_to_add) return {'status': 'ok', 'action_logs': list(objects_info)}