def test_encrypt_decrypt_ok(self): gpg = Gpg(self._gpg_dir) gpg.generate_key(dest_dir=self._data_dir, name_email="*****@*****.**", passphrase="password") # Encryption instance = GpgEncrypt() Helper.set_property(instance, "logger", LisboaLog.get_logger(__name__)) Helper.set_property(instance, "gnupghome", self._gpg_dir) Helper.set_property(instance, "src_dir", self._data_dir) Helper.set_property(instance, "src_pattern", r"test\.txt") Helper.set_property(instance, "dest_dir", self._result_dir) Helper.set_property(instance, "recipients", ["*****@*****.**"]) instance.execute() # Decryption instance = GpgDecrypt() Helper.set_property(instance, "logger", LisboaLog.get_logger(__name__)) Helper.set_property(instance, "gnupghome", self._gpg_dir) Helper.set_property(instance, "src_dir", self._result_dir) Helper.set_property(instance, "src_pattern", r"test\.txt\.gpg") Helper.set_property(instance, "passphrase", "password") instance.execute() with open(os.path.join(self._result_dir, self._file_name), mode="r", encoding="utf-8") as f: txt = f.read() assert txt == "This is test"
def test_multi_process_error_stop(self): py_info = sys.version_info major_ver = py_info[0] minor_ver = py_info[1] py_ver = int(str(major_ver) + str(minor_ver)) log = LisboaLog.get_logger(self.__class__.__name__) log.info(minor_ver) if py_ver >= self.MULTI_PROC_SUPPORT_VER: step1 = SampleStep() Helper.set_property(step1, "logger", LisboaLog.get_logger(step1.__class__.__name__)) step2 = ErrorSampleStep() Helper.set_property(step2, "logger", LisboaLog.get_logger(step2.__class__.__name__)) q = StepQueue() q.force_continue = False setattr(ScenarioQueue, "step_queue", q) executor = MultiProcExecutor([step1, step2]) try: executor.execute_steps(None) self.fail("Error must be occured") except StepExecutionFailed: pass
def _async_step_execute(cls): try: clz = cloudpickle.loads(cls) clz.trigger() return "OK" except Exception as e: LisboaLog.get_logger(__name__).error(e) return "NG"
def __invoke_steps(self, yaml_scenario_list): """ Create executable instance and push them to queue Args: yaml_scenario_list: parsed yaml list """ self._logger.info("Start to invoke scenario") # Create queue to save step instances q = StepQueue() for s_dict in yaml_scenario_list: if "multi_process_count" in s_dict.keys(): q.multi_proc_cnt = s_dict.get("multi_process_count") continue if "force_continue" in s_dict.keys(): q.force_continue = s_dict.get("force_continue") continue instances = [] if "parallel" in s_dict.keys(): for row in s_dict.get("parallel"): instance = self.__create_instance(row, yaml_scenario_list) Helper.set_property( instance, "logger", LisboaLog.get_logger(instance.__class__.__name__), ) instances.append(instance) StepArgument._put(row["step"], instance) else: instance = self.__create_instance(s_dict, yaml_scenario_list) Helper.set_property( instance, "logger", LisboaLog.get_logger(instance.__class__.__name__), ) instances.append(instance) StepArgument._put(s_dict["step"], instance) # Put instance to queue q.push(instances) # save queue to static area setattr(ScenarioQueue, "step_queue", q) self._logger.info("Finish to invoke scenario")
def test_convert_entire(self): try: # create test file csv_list = [["key", "data"], ["1", "spam"], ["2", "spam"], ["3", "spam"]] os.makedirs(self._data_dir, exist_ok=True) test_csv = os.path.join(self._data_dir, "test.csv") with open(test_csv, "w") as t: writer = csv.writer(t) writer.writerows(csv_list) # set the essential attributes instance = CsvConvert() Helper.set_property(instance, "logger", LisboaLog.get_logger(__name__)) Helper.set_property(instance, "src_dir", self._data_dir) Helper.set_property(instance, "src_pattern", r"test\.csv") Helper.set_property(instance, "headers", [{"key": "new_key"}]) Helper.set_property(instance, "quote", "QUOTE_ALL") Helper.set_property(instance, "after_format", "tsv") instance.execute() with open(os.path.join(self._data_dir, "test.tsv"), "r") as t: for i in range(len(csv_list)): line = t.readline() if i == 0: assert line == '"new_key"\t"data"\n' else: assert line == '"%s"\t"%s"\n' % (csv_list[i][0], csv_list[i][1]) finally: shutil.rmtree(self._data_dir)
def test_compress_with_path(self): try: # create test file result_dir = os.path.join(self._data_dir, "out") os.makedirs(self._data_dir, exist_ok=True) os.makedirs(result_dir, exist_ok=True) test_file = os.path.join(self._data_dir, "test.txt") with open(test_file, "w") as t: t.write("ABCDEF") # set the essential attributes instance = FileArchive() Helper.set_property(instance, "logger", LisboaLog.get_logger(__name__)) Helper.set_property(instance, "src_dir", self._data_dir) Helper.set_property(instance, "src_pattern", r"test\.txt") Helper.set_property(instance, "format", "zip") Helper.set_property(instance, "dest_dir", result_dir) Helper.set_property(instance, "dest_pattern", "foo") instance.execute() files = glob(os.path.join(result_dir, "foo.zip")) assert 1 == len(files) assert "foo.zip" == os.path.basename(files[0]) finally: shutil.rmtree(self._data_dir)
def test_execute_with_key_content(self): try: os.makedirs(self._data_dir) dir_path = Path(self._data_dir) (dir_path / "a.txt").touch() (dir_path / "b.txt").touch() (dir_path / "c.exe").touch() instance = SftpUpload() Helper.set_property(instance, "logger", LisboaLog.get_logger(__name__)) Helper.set_property(instance, "host", "dummy.host") Helper.set_property(instance, "user", "dummy_user") Helper.set_property(instance, "key", {"content": "dummy_rsa"}) Helper.set_property(instance, "src_dir", self._data_dir) Helper.set_property(instance, "src_pattern", ".*.txt") Helper.set_property(instance, "dest_dir", self._data_dir) Helper.set_property(instance, "step", "sftp_class") with ExitStack() as stack: mock_sftp = stack.enter_context( patch("cliboa.util.sftp.Sftp.put_file")) instance.execute() assert mock_sftp.called finally: shutil.rmtree(self._data_dir)
def __init__(self, url, dest_path, timeout, retry_cnt, retry_intvl_sec): self._logger = LisboaLog.get_logger(__name__) self._url = url self._dest_path = dest_path self._timeout = timeout self._retry_cnt = retry_cnt self._retry_intvl_sec = retry_intvl_sec
def test_execute_with_key(self): try: os.makedirs(self._data_dir) dummy_pass = os.path.join(self._data_dir, "id_rsa") with open(dummy_pass, "w") as f: f.write("test") instance = SftpDownload() Helper.set_property(instance, "logger", LisboaLog.get_logger(__name__)) Helper.set_property(instance, "host", "dummy.host") Helper.set_property(instance, "user", "dummy_user") Helper.set_property(instance, "key", dummy_pass) Helper.set_property(instance, "src_dir", "/") Helper.set_property(instance, "src_pattern", ".*.txt") Helper.set_property(instance, "dest_dir", self._data_dir) Helper.set_property(instance, "step", "sftp_class") with ExitStack() as stack: mock_sftp = stack.enter_context( patch("cliboa.util.sftp.Sftp.list_files")) mock_sftp.return_value = ["test.txt"] instance.execute() assert mock_sftp.called assert ObjectStore.get("sftp_class") == ["test.txt"] finally: shutil.rmtree(self._data_dir)
def test_execute_ok_with_remain_column_numbers(self): # create test csv os.makedirs(self._data_dir, exist_ok=True) test_csv = os.path.join(self._data_dir, "test.csv") test_csv_data = [["1", "spam"], ["2", "spam"]] with open(test_csv, "w") as t: writer = csv.writer(t) writer.writerows(test_csv_data) t.flush() # set the essential attributes instance = CsvColumnExtract() Helper.set_property(instance, "logger", LisboaLog.get_logger(__name__)) Helper.set_property(instance, "src_dir", self._data_dir) Helper.set_property(instance, "src_pattern", "test.csv") Helper.set_property(instance, "dest_dir", self._data_dir) remain_column_number = 1 Helper.set_property(instance, "column_numbers", remain_column_number) try: instance.execute() output_file = os.path.join(self._data_dir, "test.csv") with open(output_file, "r") as o: reader = csv.DictReader(o) for r in reader: assert r[test_csv_data[0][0]] == test_csv_data[1][0] finally: shutil.rmtree(self._data_dir)
def test_execute_ng_no_multiple_files(self): # create test files os.makedirs(self._data_dir, exist_ok=True) test1_csv = os.path.join(self._data_dir, "test1.csv") test2_csv = os.path.join(self._data_dir, "test2.csv") open(test1_csv, "w").close open(test2_csv, "w").close with pytest.raises(InvalidCount) as execinfo: # set the essential attributes instance = CsvHeaderConvert() Helper.set_property(instance, "logger", LisboaLog.get_logger(__name__)) Helper.set_property(instance, "src_dir", self._data_dir) Helper.set_property(instance, "src_pattern", r"test(.*)\.csv") Helper.set_property(instance, "dest_dir", self._data_dir) Helper.set_property(instance, "dest_pattern", "test_new.csv") Helper.set_property(instance, "headers", [{ "key": "new_key" }, { "data": "new_data" }]) instance.execute() shutil.rmtree(self._data_dir) assert "only one" in str(execinfo.value)
def test_excute_ng_multiple_target2(self): with pytest.raises(InvalidCount) as execinfo: try: # create test file os.makedirs(self._data_dir, exist_ok=True) target1_file = os.path.join(self._data_dir, "test1.csv") open(target1_file, "w").close() target2_file = os.path.join(self._data_dir, "test22.csv") open(target2_file, "w").close() target2_file = os.path.join(self._data_dir, "test222.csv") open(target2_file, "w").close() # set the essential attributes instance = CsvMerge() Helper.set_property(instance, "logger", LisboaLog.get_logger(__name__)) Helper.set_property(instance, "src_dir", self._data_dir) Helper.set_property(instance, "src1_pattern", "test1.csv") Helper.set_property(instance, "src2_pattern", "test2(.*).csv") Helper.set_property(instance, "dest_dir", self._data_dir) Helper.set_property(instance, "dest_pattern", "test.csv") instance.execute() finally: shutil.rmtree(self._data_dir) assert "must be only one" in str(execinfo.value)
def test_execute_ok(self, m_get_client): # Arrange service = m_get_client.return_value blob_client = service.get_blob_client.return_value try: os.makedirs(self._data_dir) dir_path = Path(self._data_dir) (dir_path / "a.txt").touch() (dir_path / "b.txt").touch() (dir_path / "c.exe").touch() # Act instance = AzureBlobUpload() Helper.set_property(instance, "logger", LisboaLog.get_logger(__name__)) # use Postman echo Helper.set_property( instance, "account_url", "https://testtesttest.blob.core.windows.example/", ) Helper.set_property(instance, "account_access_key", "dummy") Helper.set_property(instance, "container_name", "test") Helper.set_property(instance, "src_dir", self._data_dir) Helper.set_property(instance, "src_pattern", r"(.*)\.txt") Helper.set_property(instance, "dest_dir", "out") instance.execute() # Assert assert blob_client.upload_blob.call_count == 2 finally: shutil.rmtree(self._data_dir)
def test_execute_ok_2(self): try: # create test file csv_list = [["key", "data"], ["1", "spam"], ["2", "spam"], ["3", "spam"]] os.makedirs(self._data_dir, exist_ok=True) test_csv = os.path.join(self._data_dir, "test.csv") with open(test_csv, "w") as t: writer = csv.writer(t) writer.writerows(csv_list) # set the essential attributes instance = CsvHeaderConvert() Helper.set_property(instance, "logger", LisboaLog.get_logger(__name__)) Helper.set_property(instance, "src_dir", self._data_dir) Helper.set_property(instance, "src_pattern", r"test\.csv") Helper.set_property(instance, "dest_dir", self._data_dir) Helper.set_property(instance, "dest_pattern", "test_new.csv") Helper.set_property(instance, "headers", [{"key": "new_key"}]) instance.execute() test_new_csv = os.path.join(self._data_dir, "test_new.csv") with open(test_new_csv, "r") as t: reader = csv.reader(t) line = next(reader) finally: shutil.rmtree(self._data_dir) assert line == ["new_key", "data"]
def test_source_path_reader_with_none(self): instance = SampleCustomStep() Helper.set_property(instance, "logger", LisboaLog.get_logger(instance.__class__.__name__)) ret = instance._source_path_reader(None) assert ret is None
def test_execute_encode_error_ignore(self): STR_UTF8 = "いろはにほへと☺" try: # create test file os.makedirs(self._data_dir, exist_ok=True) test_file = os.path.join(self._data_dir, "test.txt") with open(test_file, "w") as t: t.write(STR_UTF8) # set the essential attributes instance = FileConvert() Helper.set_property(instance, "logger", LisboaLog.get_logger(__name__)) Helper.set_property(instance, "src_dir", self._data_dir) Helper.set_property(instance, "src_pattern", r"test\.txt") Helper.set_property(instance, "encoding_from", "utf-8") Helper.set_property(instance, "encoding_to", "shift_jis") Helper.set_property(instance, "errors", "ignore") instance.execute() with open(test_file, encoding="shift_jis", errors="ignore") as t: str_output = t.read() assert str_output == "いろはにほへと" finally: shutil.rmtree(self._data_dir)
def test_execute_ok3(self): try: os.makedirs(self._data_dir, exist_ok=True) # create test file csv_list1 = [["key", "data"], ["c1", "spam"], ["c2", "spam"]] with open(os.path.join(self._data_dir, "test1.csv"), "w") as t1: writer = csv.writer(t1) writer.writerows(csv_list1) # set the essential attributes instance = CsvConcat() Helper.set_property(instance, "logger", LisboaLog.get_logger(__name__)) Helper.set_property(instance, "src_dir", self._data_dir) Helper.set_property(instance, "src_pattern", r"test.*\.csv") Helper.set_property(instance, "dest_dir", self._data_dir) Helper.set_property(instance, "dest_pattern", "test.csv") instance.execute() with open(os.path.join(self._data_dir, "test.csv")) as t: reader = csv.reader(t) concatenated_list = [row for row in reader] finally: shutil.rmtree(self._data_dir) assert concatenated_list == [ ["key", "data"], ["c1", "spam"], ["c2", "spam"], ]
def __init__(self, cmd_args): self._logger = LisboaLog.get_logger(__name__) self._cmd_args = cmd_args self._pj_dir = os.path.join(env.PROJECT_DIR, cmd_args.project_name) self._pj_scenario_dir = os.path.join( env.PROJECT_DIR, cmd_args.project_name, env.SCENARIO_DIR_NAME ) if cmd_args.format == "yaml": self._pj_scenario_file = ( os.path.join( env.PROJECT_DIR, cmd_args.project_name, env.SCENARIO_FILE_NAME ) + ".yml" ) self._cmn_scenario_file = ( os.path.join(env.COMMON_DIR, env.SCENARIO_FILE_NAME) + ".yml" ) else: self._pj_scenario_file = ( os.path.join( env.PROJECT_DIR, cmd_args.project_name, env.SCENARIO_FILE_NAME ) + "." + cmd_args.format ) self._cmn_scenario_file = ( os.path.join(env.COMMON_DIR, env.SCENARIO_FILE_NAME) + "." + cmd_args.format ) # key and var of dinamic variables self._dynamic_key_and_val = {}
def test_end_with_noerror(self): if sys.version_info.minor < 6: # ignore test if python version is less 3.6(assert_called is not supported) return with ExitStack() as stack: mock_before_step = stack.enter_context( patch("cliboa.core.listener.StepStatusListener.before_step")) mock_error_step = stack.enter_context( patch("cliboa.core.listener.StepStatusListener.error_step")) mock_after_step = stack.enter_context( patch("cliboa.core.listener.StepStatusListener.after_step")) mock_post_step = stack.enter_context( patch( "cliboa.core.listener.StepStatusListener.after_completion") ) step = SampleCustomStep() Helper.set_property(step, "logger", LisboaLog.get_logger(step.__class__.__name__)) Helper.set_property(step, "listeners", [StepStatusListener()]) executor = SingleProcExecutor([step]) executor.execute_steps(None) mock_before_step.assert_called_once() mock_error_step.assert_not_called() mock_after_step.assert_called_once() mock_post_step.assert_called_once()
def __init__(self): self._logger = LisboaLog.get_logger(__name__) self._s = StorageIO() self._step = None self._symbol = None self._parallel = None self._io = None
def __init__( self, host, user, password, timeout=TIMEOUT_SEC, retryTimes=3, port=21, tls=False, ): """ Must set whether password or key Args: host (str): hostname user (str): username password (str): password timeout=30 (int): timeout seconds retryTimes=3 (int): retry count port=21 (int): port number tls=False (bool): use secure connection """ self.__host = host self.__user = user self.__password = password self.__timeout = timeout self.__retryTimes = retryTimes self.__port = port self.__tls = tls self._logger = LisboaLog.get_logger(__name__)
def _create_instance(self): instance = SqliteExport() Helper.set_property(instance, "logger", LisboaLog.get_logger(__name__)) Helper.set_property(instance, "dbname", self._DB_NAME) Helper.set_property(instance, "dest_path", self._RESULT_FILE) Helper.set_property(instance, "tblname", self._TBL_NAME) return instance
def test_logging_mask_password(self): """ In log file, 'password' is masked. """ instance = SampleCustomStep() instance._logger = LisboaLog.get_logger(__name__) Helper.set_property(instance, "logger", LisboaLog.get_logger(instance.__class__.__name__)) Helper.set_property(instance, "password", "test") instance.trigger() ret = False with open(self._log_file, mode="r", encoding="utf-8") as f: for line in f: if "password : ****" in line: ret = True break self.assertTrue(ret)
def test_source_path_reader_with_content(self): instance = SampleCustomStep() Helper.set_property(instance, "logger", LisboaLog.get_logger(instance.__class__.__name__)) ret = instance._source_path_reader({"content": "test"}) with open(ret, "r") as fp: actual = fp.read() assert "test" == actual
def __init__(self, obj): """ Args: q: queue which stores execution target steps cmd_args: command line arguments """ self._logger = LisboaLog.get_logger(__name__) self._step = obj
def __init__(self, host, user, password, dbname, encoding="UTF8"): self._logger = LisboaLog.get_logger(__name__) self._host = host self._user = user self._password = password self._dbname = dbname self._encoding = encoding self._con = None
def __init__(self, cmd_args): """ Args: cmd_args: command line arguments """ self._logger = LisboaLog.get_logger(__name__) self._scenario_queue = ScenarioQueue self._cmd_args = cmd_args self._listeners = []
def _create_instance(self, pattern, refresh): instance = SqliteWrite() Helper.set_property(instance, "logger", LisboaLog.get_logger(__name__)) Helper.set_property(instance, "dbname", self.DB_NAME) Helper.set_property(instance, "src_dir", ".") Helper.set_property(instance, "src_pattern", pattern) Helper.set_property(instance, "tblname", self.TBL_NAME) Helper.set_property(instance, "refresh", refresh) return instance
def __init__(self, cmd_args): """ Set project directory, scenario file path, scenario file format, other command line arguments """ self._logger = LisboaLog.get_logger(__name__) self._pj_scenario_dir = os.path.join(env.PROJECT_DIR, cmd_args.project_name, env.SCENARIO_DIR_NAME) self._cmd_args = cmd_args
class BigQuery(object): """ bigquery api wrapper """ _logger = LisboaLog.get_logger(__name__) @staticmethod def get_bigquery_client(credentials): """ get bigquery client object Args: credentials: gcp service account json """ credentials_info = ServiceAccount.auth(credentials) return ( bigquery.Client( credentials=credentials_info, project=credentials_info.project_id ) if credentials_info else bigquery.Client() ) @staticmethod def get_extract_job_config(print_header=True): return bigquery.ExtractJobConfig(print_header=print_header) @staticmethod def get_query_job_config(): return bigquery.QueryJobConfig() @staticmethod def get_write_disposition(): return bigquery.WriteDisposition.WRITE_TRUNCATE @staticmethod def get_compression_type(): """ Output compression type """ return bigquery.Compression.GZIP @classmethod def get_destination_format(cls, ext): """ Ouptut file format Args: ext: destination file extention """ cls._logger.info("bigquery destination format: %s" % ext) format_and_dest_format = { ".csv": bigquery.DestinationFormat.CSV, ".json": bigquery.DestinationFormat.NEWLINE_DELIMITED_JSON, } return format_and_dest_format.get(ext)