def api(ctx, username, password, apikey, add, unwatched, rename, files, keep_structure, date_format, delete_empty, link, softlink, persistent, abort, state): if (not add and not rename): ctx.obj["output"].info("Nothing to do.") return try: conn = get_connector(apikey, username, password, persistent) except Exception as e: raise e ctx.obj["output"].error(e) exit(1) pipeline = [] pipeline.append(operations.HashOperation(ctx.obj["output"])) if add: pipeline.append( operations.MylistAddOperation(conn, ctx.obj["output"], state, unwatched)) if rename: pipeline.append( operations.GetFileInfoOperation(conn, ctx.obj["output"])) pipeline.append( operations.RenameOperation(ctx.obj["output"], rename, date_format, delete_empty, keep_structure, softlink, link, abort)) to_process = get_files_to_process(files, ctx) for file in to_process: file_obj = {} file_obj["path"] = file ctx.obj["output"].info("Processing file \"" + file + "\"") for operation in pipeline: res = operation.Process(file_obj) if not res: # Critical error, cannot proceed with pipeline break conn.close(persistent, get_persistent_file_path())
def test_hash_error(): filename = "asdasdasdasoasdjasiasd.tmp" out = flexmock.flexmock() out.should_receive("error").once() oper = operations.HashOperation(out, False) f = {"path": filename} assert not oper.Process(f) # pipeline should not continue without valid hash
def test_hash_operation(): filename = "file.tmp" with open(filename, "wb") as f: f.write(b"\x6F" * 31457280) f = {"path": filename} out = flexmock.flexmock(error=lambda x: print(x)) out.should_receive("success").once() oper = operations.HashOperation(out) oper.Process(f) os.remove(filename) assert f["size"] == 31457280 assert f["ed2k"] == "7e7611fe2ffc72398124dd3f24c4135e"
def api(ctx, username, password, apikey, add, rename, files, keep_structure, date_format, delete_empty, link, softlink): if (not add and not rename): ctx.obj["output"].info("Nothing to do.") return try: if apikey: conn = anidbconnector.AnidbConnector.create_secure( username, password, apikey) else: conn = anidbconnector.AnidbConnector.create_plain( username, password) except Exception as e: ctx.obj["output"].error(e) exit(1) pipeline = [] pipeline.append(operations.HashOperation(ctx.obj["output"])) if add: pipeline.append(operations.MylistAddOperation(conn, ctx.obj["output"])) if rename: pipeline.append( operations.GetFileInfoOperation(conn, ctx.obj["output"])) pipeline.append( operations.RenameOperation(ctx.obj["output"], rename, date_format, delete_empty, keep_structure, softlink, link)) to_process = get_files_to_process(files, ctx) for file in to_process: file_obj = {} file_obj["path"] = file ctx.obj["output"].info("Processing file \"" + file + "\"") for operation in pipeline: res = operation.Process(file_obj) if not res: # Critical error, cannot proceed with pipeline break conn.close()