def add_rl_save_state_data(uuid): rl_starts_b64 = request.form.get('rl_starts') rl_lengths_b64 = request.form.get('rl_lengths') rl_total_length = int(request.form.get('rl_total_length')) rl_lengths_length = int(request.form.get('rl_lengths_length')) rl_starts_length = int(request.form.get('rl_starts_length')) rl_starts_b_array = bytearray(base64.b64decode(rl_starts_b64)) rl_lengths_b_array = bytearray(base64.b64decode(rl_lengths_b64)) if len(rl_starts_b_array) != rl_starts_length: rl_starts_b_array.extend([0 for _ in range(len(rl_starts_b_array), rl_starts_length)]) if len(rl_lengths_b_array) != rl_lengths_length: rl_lengths_b_array.extend([0 for _ in range(len(rl_lengths_b_array), rl_lengths_length)]) rl_start_hash, file_name = save_byte_array_to_store(rl_starts_b_array, file_name=uuid) rl_lengths_hash, file_name = save_byte_array_to_store(rl_lengths_b_array, file_name=uuid) dbm.update_table(dbm.GAME_SAVE_TABLE, ['rl_starts_data','rl_lengths_data','rl_total_length','compressed'], [rl_start_hash, rl_lengths_hash, rl_total_length, True], ['uuid'], [uuid]) return jsonify({'record': dbm.retrieve_save_state(uuid=uuid)[0]})
def update_save_state(uuid): update_fields = json.loads(request.form.get('update_fields')) # if linking to a performance, do that and then throw away since GAME_SAVE_TABLE doesn't refer to performance if 'performance_uuid' in update_fields: performance_uuid = update_fields['performance_uuid'] performance_time_index = update_fields['performance_time_index'] action = update_fields['action'] dbm.link_save_state_to_performance(uuid, performance_uuid, performance_time_index, action) del update_fields['performance_uuid'] del update_fields['performance_time_index'] del update_fields['action'] # make sure that there are still fields to update if len(update_fields.keys()) > 0: dbm.update_table(dbm.GAME_SAVE_TABLE, update_fields.keys(),update_fields.values(), ['uuid'], [uuid]) return jsonify(dbm.retrieve_save_state(uuid=uuid)[0])
def add_save_state_data(uuid): save_state_data = request.form.get('buffer') compressed = True if request.form.get('compressed') == u'true' else False data_length = int(request.form.get('data_length')) save_state_b_array = bytearray(base64.b64decode(save_state_data)) # The base64 library will interpret trailing zeros as padding and remove them, this adds them back in if len(save_state_b_array) != data_length: save_state_b_array.extend([0 for _ in range(len(save_state_b_array), data_length)]) source_data_hash, file_name = save_byte_array_to_store(save_state_b_array, file_name=uuid) dbm.update_table(dbm.GAME_SAVE_TABLE, ['save_state_source_data', 'compressed'], [source_data_hash, compressed], ['uuid'], [uuid]) return jsonify({'record': dbm.retrieve_save_state(uuid=uuid)[0]})
def performance_add_data(uuid): sha1_hash = request.form.get('sha1_hash') chunk_id = request.form.get('chunk_id') total_chunks = int(request.form.get('total_chunks')) chunk_data = request.form.get('chunk_data') chunk_size = int(request.form.get('chunk_size')) save_state_b_array = bytearray(base64.b64decode(chunk_data)) # The base64 library will interpret trailing zeros as padding and remove them, this adds them back in if len(save_state_b_array) != chunk_size: save_state_b_array.extend([0 for _ in range(len(save_state_b_array), chunk_size)]) # If there isn't a temp directory for this hash, create one temp_path = os.path.join(LOCAL_DATA_ROOT, "tmp_{}".format(sha1_hash)) if not os.path.exists(temp_path): os.makedirs(temp_path) # Write the chunk of data to a temp file with open("{}/{}_of_{}".format(temp_path, (int(chunk_id) + 1), total_chunks), "wb") as temp: temp.write(save_state_b_array) chunk_paths = [chunk for chunk in fnmatch.filter(os.listdir(temp_path), "*_of_*")] # See if all the chunks are written, if so concatenate into final file and add that to storage if total_chunks == len(chunk_paths): final_name = "final_{}.mp4".format(sha1_hash) final_path = os.path.join(temp_path, final_name) final_file = open(final_path, "ab") for cp in sorted(chunk_paths, key=lambda p: int(p.split("_")[0])): with open("{}/{}".format(temp_path, cp), "rb") as cf: final_file.write(cf.read()) final_file.close() final_hash = save_file_to_store(final_path) shutil.rmtree(temp_path) # Attach data to performance record dbm.update_table(dbm.PERFORMANCE_CITATION_TABLE, ['replay_source_file_ref', 'replay_source_file_name'], [final_hash, final_name], ["uuid"], [uuid]) return "OK"
def game_update(uuid): update_fields = json.loads(request.form.get('update_fields')) dbm.update_table(dbm.GAME_CITATION_TABLE, update_fields.keys(), update_fields.values(), ["uuid"], [uuid]) game_ref = dbm.retrieve_game_ref(uuid) #dbm.update_table(dbm.FTS_INDEX_TABLE, ['content'], [game_ref.to_json_string()], ["uuid"], [uuid]) return game_ref.to_json_string()
def performance_update(uuid): update_fields = json.loads(request.form.get('update_fields')) dbm.update_table(dbm.PERFORMANCE_CITATION_TABLE, update_fields.keys(), update_fields.values(), ["uuid"], [uuid]) perf_ref = dbm.retrieve_perf_ref(uuid) #dbm.update_table(dbm.FTS_INDEX_TABLE, ['content'], [perf_ref.to_json_string()],["uuid"], [uuid]) return perf_ref.to_json_string()