Exemplo n.º 1
0
    def setUp(self):
        self.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
        self.ssh_params = {
            'cfg_mgr':
            self.cfg_mgr,
            'action_type':
            'ssh',
            'name':
            'fake_model',
            'user':
            '******',
            'error':
            'fake_model_run_log_status_failure',
            'execute':
            'exec bash /home/fake_open/scripts/fake_openSAS.sh' +
            ' fake_model_'
            'v001_t001.sas 57 1005',
            'ok':
            'fake_model_run_log_status_success',
            'host':
            'fake.sas.server',
            'args': ['argument1'],
            'capture_output':
            'false'
        }

        self.my_ssh = SSH(**self.ssh_params)
Exemplo n.º 2
0
 def setUp(self):
     self.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
     self.shell_params = {
         'cfg_mgr':
         self.cfg_mgr,
         'action_type':
         'shell',
         'name':
         'import_prep',
         'ok':
         'import',
         'error':
         'kill',
         'execute':
         'import_prep.sh',
         'delete': ['/foo', '/foo/bar'],
         'mkdir': ['/foo1', '/foo1/bar'],
         'config': [{
             'config1': 'val1',
             'config2': 'val2'
         }],
         'arg': ['arg1', 'arg2', 'arg3'],
         'env_var': [
             'table_name=test', 'target_dir=test', 'database=test',
             'domain=test', 'HADOOP_CONF_DIR=/etc/hadoop/conf'
         ],
         'file': ['/import_prep.sh#import_prep.sh'],
         'archive': ['foo.zip'],
         'capture_output':
         'true'
     }
     self.my_shell = Shell(**self.shell_params)
Exemplo n.º 3
0
 def setUp(self):
     self.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
     self.sqoop_params = {
         'cfg_mgr':
         self.cfg_mgr,
         'action_type':
         'sqoop',
         'name':
         'import',
         'ok':
         'ok',
         'error':
         'kill',
         'delete': ['foo', 'bar'],
         'mkdir': ['foo1', 'bar1'],
         'config': [{
             'fs.hdfs.impl.disable.cache': 'true',
             'sqoop.connection.factories': '{connection_factories}'
         }],
         'command':
         'import',
         'arg': ['-Doraoop.timestamp.string=false', '--as-avrodatafile'],
         'job_tracker':
         'track_me',
         'name_node':
         'node',
         'file': ['/f1.txt', '/f2.txt'],
         'archive': ['1.zip']
     }
     self.my_sqoop = Sqoop(**self.sqoop_params)
Exemplo n.º 4
0
def ingest_version(args):
    """used of deploying ibis"""
    cfg_mgr = ConfigManager(args.env[0], args.env[0], 'True')
    print 'Ingest version--', cfg_mgr.hdfs_ingest_version
    with open('ingest_version.txt', 'w') as file_h:
        file_h.write(cfg_mgr.hdfs_ingest_version)
        file_h.close()
        print 'Ingest version:', cfg_mgr.hdfs_ingest_version
Exemplo n.º 5
0
 def setUp(self):
     """Setup."""
     self.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
     self.cfg_mgr.requests_dir = BASE_DIR
     self.dsl_parser = DSLParser(self.cfg_mgr,
                                 ['import_prep', 'import', 'avro'],
                                 self.cfg_mgr.requests_dir)
     self.cfg_mgr.hadoop_credstore_password_disable = False
Exemplo n.º 6
0
 def setUp(self):
     self.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
     self.fork_params = {
         'cfg_mgr': self.cfg_mgr,
         'action_type': 'fork',
         'name': 'pipeline1',
         'to_nodes': ['tbl1', 'tbl2', 'tbl3', 'tbl4']
     }
     self.fork = Fork(**self.fork_params)
Exemplo n.º 7
0
 def setUp(self):
     self.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
     self.join_params = {
         'cfg_mgr': self.cfg_mgr,
         'action_type': 'join',
         'name': 'pipeline1_join',
         'to_node': 'pipeline2'
     }
     self.join = Join(**self.join_params)
Exemplo n.º 8
0
 def setUp(self, mock_connect, m_U):
     """Setup."""
     mock_util_methods = MagicMock()
     mock_util_methods.run_subprocess = MagicMock()
     mock_util_methods.run_subprocess.return_value = 0
     m_U.return_value = mock_util_methods
     self.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
     self.driver = Driver(self.cfg_mgr)
     self.start_time = time.time()
Exemplo n.º 9
0
 def setUp(self, mock_connect):
     self.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
     self.req_inventory = RequestInventory(self.cfg_mgr)
     self.driver = Driver(self.cfg_mgr)
     self.generator = WorkflowGenerator('test_workflow', self.cfg_mgr)
     # Expected workflows hardcoded
     self.generator.action_builder.cfg_mgr.host = \
         'fake.workflow.host'
     # Hadoop credstore enabled case
     self.cfg_mgr.hadoop_credstore_password_disable = False
     self.generator.action_builder.workflowName = 'test_workflow'
Exemplo n.º 10
0
 def setUp(self):
     """Setup."""
     self.it_table = {
         'source_database_name': 'fake_database',
         'source_table_name': 'fake_cen_tablename',
         'db_username': '******',
         'password_file': 'test-passwd',
         'connection_factories': 'test-conn-factory',
         'jdbcurl': 'oracle:jdbcurl',
         'mappers': '5'
     }
     self.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
     self.it_table_obj = ItTable(self.it_table, self.cfg_mgr)
     self.src_obj = SourceTable(self.cfg_mgr, self.it_table_obj)
Exemplo n.º 11
0
 def setUp(self):
     """Setup."""
     it_table = {
         'source_database_name': 'fake_database',
         'source_table_name': 'fake_cen_tablename',
         'db_username': '******',
         'password_file': 'test-passwd',
         'connection_factories': 'test-conn-factory',
         'jdbcurl': 'jdbc:oracle:thin:@//fake.oracle'
         ':1521/fake_servicename',
         'mappers': '10'
     }
     self.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
     it_table_obj = ItTable(it_table, self.cfg_mgr)
     self.ora_obj = OracleTable(self.cfg_mgr, it_table_obj)
Exemplo n.º 12
0
 def setUp(self):
     """Setup."""
     self.file_h = open(BASE_DIR + '/fixtures/it_table_gen_split_by.txt',
                        'r')
     self.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
     it_table = {
         'source_database_name': 'fake_database',
         'source_table_name': 'fake_cen_tablename',
         'db_username': '******',
         'password_file': 'test-passwd',
         'connection_factories': 'test-conn-factory',
         'jdbcurl': 'jdbc:db2://fake.db2:50400/fake_servicename',
         'mappers': '10'
     }
     self.it_table_obj = ItTable(it_table, self.cfg_mgr)
Exemplo n.º 13
0
 def setUp(self):
     self.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
     self.hive_params = {
         'cfg_mgr': self.cfg_mgr, 'action_type': 'hive',
         'name': 'avro_parquet',
         'ok': 'parquet_swap', 'error': 'kill',
         'script': 'parquet_live.hql',
         'job_tracker': 'my_tracker', 'name_node': 'node',
         'delete': ['foo', 'bar'],
         'mkdir': ['foo1', 'bar1'], 'job_xml': ['hive.xml'],
         'config': [{'oozie.launcher.action.main.class':
                     'org.apache.oozie.action.hadoop.Hive2Main'}],
         'param': ['InputDir=/home/tucu/input-data',
                   'OutputDir=${jobOutput}'],
         'file': ['my_file.txt'], 'archive': ['hive.zip']}
     self.my_hive = Hive(**self.hive_params)
Exemplo n.º 14
0
 def setUp(self):
     """setup"""
     self.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
     self.it_table_dict = {
         'db_username': '******',
         'password_file': 'test_passwd',
         'jdbcurl': 'jdbc:oracle:thin:@//fake.oracle:1521/fake_servicename',
         'db_env': 'dev',
         'domain': 'test_domain',
         'source_database_name': 'test_DB',
         'source_table_name': 'test_TABLE',
         'split_by': 'test_column',
         'mappers': 3,
         'connection_factories': 'org.cloudera.test',
         'fetch_size': 100,
         'esp_appl_id': 'test_esp_id',
         'views': 'fake_view_im'
     }
Exemplo n.º 15
0
 def setUp(self):
     """Set Up."""
     self.perf_inventory = PerfInventory(ConfigManager(UNIT_TEST_ENV))
     self.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
Exemplo n.º 16
0
 def setUp(self, mock_con):
     """Setup."""
     self.inventory = CheckBalancesInventory(ConfigManager(UNIT_TEST_ENV))
Exemplo n.º 17
0
 def test_unit_test_props(self, m_set):
     """test unit_test properties file"""
     _ = ConfigManager('unit_test', 'dev', True)
     self.assertTrue(isinstance(_, ConfigManager))
Exemplo n.º 18
0
 def test_jenkins_props(self, m_set):
     """test jenkins properties file"""
     _ = ConfigManager('jenkins', 'dev', True)
     self.assertTrue(isinstance(_, ConfigManager))
Exemplo n.º 19
0
 def test_perf_props(self, m_set):
     """test perf properties file"""
     _ = ConfigManager('perf', 'dev', True)
     self.assertTrue(isinstance(_, ConfigManager))
Exemplo n.º 20
0
 def setUp(self):
     """Setup."""
     self.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
Exemplo n.º 21
0
 def setUp(self, m):
     """Setup."""
     self.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
     self.builder = ActionBuilder(self.cfg_mgr)
     self.cfg_mgr.hadoop_credstore_password_disable = False
Exemplo n.º 22
0
 def setUp(self):
     """Setup."""
     it_table = {
         'source_database_name': 'fake_database',
         'source_table_name': 'client',
         'db_username': '******',
         'password_file': 'test-passwd',
         'connection_factories': 'test-conn-factory',
         'jdbcurl': 'jdbc:sqlserver:',
         'mappers': '10'
     }
     self.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
     it_table_obj = ItTable(it_table, self.cfg_mgr)
     self.sql_obj = SqlServerTable(self.cfg_mgr, it_table_obj)
     self.mock_claim_tbl_dict_sqlserver = [{
         'full_table_name':
         'claim.fake_database_fake_clm_tablename',
         'domain':
         'claim',
         'target_dir':
         'mdm/claim/fake_database/fake_clm_tablename',
         'split_by':
         'fake_split_by',
         'mappers':
         '20',
         'jdbcurl':
         'jdbc:sqlserver://fake.teradata/'
         'DATABASE=fake_database',
         'connection_factories':
         'com.cloudera.connector.teradata.'
         'TeradataManagerFactory',
         'db_username':
         '******',
         'password_file':
         'jceks://hdfs/user/dev/fake.passwords.jceks#'
         'fake.password.alias',
         'load':
         '010001',
         'fetch_size':
         '50000',
         'hold':
         '0',
         'automation_appl_id':
         'TEST01',
         'views':
         'fake_view_im|fake_view_open',
         'automation_group':
         '',
         'check_column':
         'test_inc_column',
         'source_schema_name':
         'dbo',
         'sql_query':
         'TRANS > 40',
         'actions':
         '',
         'db_env':
         'sys',
         'source_database_name':
         'fake_database',
         'source_table_name':
         'fake_clm_tablename'
     }]
Exemplo n.º 23
0
 def setUpClass(cls):
     cls.utilities = Utilities(ConfigManager(UNIT_TEST_ENV))
     cls.utilities_run = Utilities(ConfigManager('JENKINS', '', True))
     cls.td_tbl_weekly = {
         'load': '100001',
         'mappers': 20,
         'domain': 'fake_view_im',
         'target_dir': 'mdm/fake_view_im/fake_database_3/'
         'fake_services_tablename',
         'password_file': '/user/dev/fake.password.file',
         'source_table_name': 'fake_services_tablename',
         'hold': 0,
         'split_by': 'epi_id',
         'fetch_size': 50000,
         'source_database_name': 'fake_database_3',
         'connection_factories': 'com.cloudera.connector.teradata.'
         'TeradataManagerFactory',
         'full_table_name': 'fake_view_im.'
         'fake_database_3_fake_services_tablename',
         'db_username': '******',
         'jdbcurl': 'jdbc:teradata://fake.teradata/DATABASE='
         'fake_database_3',
         'views': 'analytics'
     }
     cls.oracle_tbl_monthly = {
         'load': '010001',
         'mappers': 20,
         'domain': 'fake_domain',
         'target_dir': 'mdm/fake_domain/fake_database_1/fake_svc_tablename',
         'password_file': '/user/dev/fake.password.file',
         'source_table_name': 'fake_svc_tablename',
         'hold': 0,
         'split_by': '',
         'fetch_size': 50000,
         'source_database_name': 'fake_database_1',
         'connection_factories': 'com.quest.oraoop.OraOopManagerFactory',
         'full_table_name':
         'fake_domain_fake_database_1_fake_svc_tablename',
         'db_username': '******',
         'jdbcurl': 'jdbc:oracle:thin:@//fake.oracle:1521/fake_servicename'
     }
     cls.sql_tbl_quarterly = {
         'full_table_name': 'logs.fake_database_2_fake_tools_tablename',
         'domain': 'logs',
         'target_dir': 'mdm/logs/fake_database_2/fake_tools_tablename',
         'split_by': 'fake_split_by',
         'mappers': 10,
         'jdbcurl': 'jdbc:sqlserver://fake.sqlserver:'
         '1433;database=fake_database_2',
         'connection_factories': 'com.cloudera.sqoop.manager.'
         'DefaultManagerFactory',
         'db_username': '******',
         'password_file': '/user/dev/fake.password.file',
         'load': '001100',
         'fetch_size': 50000,
         'hold': 0,
         'source_database_name': 'fake_database_2',
         'source_table_name': 'fake_tools_tablename'
     }
     cls.db2_tbl_fortnightly = {
         'full_table_name': 'rx.fake_database_4_fake_rx_tablename',
         'domain': 'rx',
         'target_dir': 'mdm/rx/fake_database_4/fake_rx_tablename',
         'split_by': '',
         'mappers': 1,
         'jdbcurl': 'jdbc:db2://fake.db2:50400/fake_servicename',
         'connection_factories': 'com.cloudera.sqoop.manager.'
         'DefaultManagerFactory',
         'db_username': '******',
         'password_file': '/user/dev/fake.password.file',
         'load': '110100',
         'fetch_size': 50000,
         'hold': 0,
         'source_database_name': 'fake_database_4',
         'source_table_name': 'fake_rx_tablename'
     }
     cls.mysql_tbl_fortnightly = {
         'full_table_name': 'dashboard.fake_servicename',
         'domain': 'dashboard',
         'target_dir': 'mdm/dashboard/fake_servicename/',
         'split_by': '',
         'mappers': 1,
         'jdbcurl': 'jdbc:mysql://\
                     fake.mysql:3306/fake_servicename',
         'connection_factories': 'com.cloudera.sqoop.manager.'
         'DefaultManagerFactory',
         'db_username': '******',
         'password_file': '/user/dev/fake.password.file',
         'load': '110100',
         'fetch_size': 50000,
         'hold': 0,
         'source_database_name': 'dashboard',
         'source_table_name': 'fake_servicename'
     }
     cls.appl_ref_id_tbl = {
         'job_name': 'C1_FAKE_CALL_FAKE_DATABASE_DAILY',
         'frequency': 'Daily',
         'time': '6:00',
         'string_date': 'Every Day',
         'ksh_name': 'call_fake_database_daily',
         'domain': 'call',
         'db': 'fake_database',
         'environment': 'DEV'
     }
Exemplo n.º 24
0
def main():
    """Command line arguments parser.
    Calls the appropriate handler method
    """
    global driver
    global logger

    parser = ArgumentParser()

    # Properties
    parser.add_argument('--db',
                        nargs=1,
                        type=str,
                        help='Used to provide a database name')
    parser.add_argument('--table',
                        nargs=1,
                        type=str,
                        help='Used to provide a table name')
    parser.add_argument('--frequency',
                        nargs=1,
                        type=str,
                        help='Used to provide a frequency')
    parser.add_argument('--teamname',
                        nargs=1,
                        type=str,
                        help='Used to provide a team name')
    parser.add_argument('--activate',
                        nargs=1,
                        type=str,
                        help='Used to provide a activator(yes/no)')
    parser.add_argument('--env',
                        nargs=1,
                        type=str,
                        required=True,
                        help='REQUIRED. Used to provide the ibis '
                        'environment for properties file.')
    parser.add_argument('--for-env',
                        nargs=1,
                        type=str,
                        help='Optional. To create workflow of dev on prod.')
    # Checks and Balances
    parser.add_argument('--checks-balances',
                        action='store_true',
                        help='Used to interact with check balances table. '
                        'required: --db {db_name}, --table {tbl_name}'
                        'options: --update-lifespan list[str], '
                        '--update-all-lifespan')

    # Business Table
    parser.add_argument('--submit-request',
                        type=FileType('r'),
                        help='Used to generate oozie workflow')
    parser.add_argument('--export-request',
                        type=FileType('r'),
                        help='Used to generate oozie workflow')
    parser.add_argument('--submit-request-prod',
                        type=FileType('r'),
                        help='Used to mark changes in it table '
                        'into staging_it_table')

    # IT Table
    parser.add_argument('--save-it-table',
                        action='store_true',
                        help='Saves all records in it_table to file')

    parser.add_argument('--update-it-table',
                        type=FileType('r'),
                        help='Used to submit text file containing table '
                        'properties for the IT table')

    # IT Table Export
    parser.add_argument('--update-it-table-export',
                        type=FileType('r'),
                        help='Used to submit text file containing table '
                        'properties for the IT table export')

    # Run
    parser.add_argument('--run-job',
                        type=str,
                        help='Used to submit a workflow to run an oozie job')

    # View generation
    parser.add_argument('--view',
                        action='store_true',
                        help='Create a view. required: --view-name '
                        '{name}, --db {db_name}, '
                        '--table {tbl_name} optional param: '
                        '--select {cols} ,'
                        ' --where {statement}')
    parser.add_argument('--view-name',
                        nargs=1,
                        type=str,
                        help='Used to provide a view name')
    parser.add_argument('--select',
                        nargs='+',
                        type=str,
                        help='Used to provide a list of columns')
    parser.add_argument('--where',
                        nargs=1,
                        type=str,
                        help='Used to provide a where statement')

    # Generate workflows base on filter
    parser.add_argument('--gen-esp-workflow',
                        nargs='+',
                        type=str,
                        help='Create workflow(s) based on a list of ESP '
                        'ids separated by spaces.')
    parser.add_argument('--gen-esp-workflow-tables',
                        type=FileType('r'),
                        help='Create workflow(s) based on a list of '
                        'tables from request file')

    # config based workflows
    parser.add_argument('--gen-config-workflow',
                        nargs=1,
                        type=FileType('r'),
                        help='Used to generate custom hive or'
                        ' shell scripts in workflows')
    parser.add_argument('--config-workflow-properties',
                        nargs=1,
                        type=str,
                        help='Used to provide config workflow properties')
    parser.add_argument('--queue-name',
                        nargs=1,
                        type=str,
                        help='Used for providing hadoop queue name')

    parser.add_argument('--esp-id', nargs=1, type=str, help='esp-appl-id')
    parser.add_argument('--message',
                        nargs=1,
                        type=str,
                        help='Provide description for bmrs')

    parser.add_argument('--export',
                        action='store_true',
                        help='Export hadoop table to teradata. '
                        'required: --db {db}, '
                        'name of db you want to export, '
                        '--table {table}, name of table '
                        'you want to export, --to {db}.{table}, '
                        'name of database and '
                        'table to export to')
    parser.add_argument('--to',
                        nargs=1,
                        type=str,
                        help='Used to provide {database}.{table} '
                        'to export to in Teradata')

    parser.add_argument('--auth-test',
                        action='store_true',
                        help='Test sqoop auth'
                        'required: --source-db {db}, name of db'
                        ' you want to export,'
                        '--source-table {table}, name of table '
                        'you want to export,'
                        '--jdbc-url {jdbcurl}, connection string '
                        'for target schema'
                        '--user-name {user_name}, db user name'
                        '--password-file {hdfs_path}, hdfs'
                        ' password file path')

    # Export to Oracle
    parser.add_argument('--export-oracle',
                        action='store_true',
                        help='Export hadoop table to Oracle. '
                        'required: --source-db {db}, name of db '
                        'you want to export,'
                        '--source-table {table}, name of table '
                        'you want to export,'
                        '--source-dir {dir}, hdfs location of '
                        'export table,'
                        '--jdbc-url {jdbcurl}, connection string'
                        ' for target schema,'
                        '--target-schema {targetdb}, oracle schema,'
                        '--target-table {targettable}, oracle table'
                        '--update-key {updatekey}, non mandatory'
                        ' param - primary key on target table,'
                        '--user-name {username}, oracle username,'
                        '--pass-alias {passalias}, oracle password'
                        ' alias or jceks url')
    parser.add_argument('--source-db',
                        nargs=1,
                        type=str,
                        help='Used to provide source hive schema to'
                        ' export to oracle')
    parser.add_argument('--source-table',
                        nargs=1,
                        type=str,
                        help='Used to provide source hive table to '
                        'export to oracle')
    parser.add_argument('--source-dir',
                        nargs=1,
                        type=str,
                        help='Used to provide hdfs source directory'
                        ' to export to'
                        'oracle, directory should not include'
                        ' the final table directory')
    parser.add_argument('--jdbc-url',
                        nargs=1,
                        type=str,
                        help='Used to provide oracle connection '
                        'information to export to oracle')
    parser.add_argument('--target-schema',
                        nargs=1,
                        type=str,
                        help='Used to provide oracle target schema '
                        'to export to oracle')
    parser.add_argument('--target-table',
                        nargs=1,
                        type=str,
                        help='Used to provide oracle target table to '
                        'export to oracle')
    parser.add_argument('--update-key',
                        nargs='*',
                        type=str,
                        help='Used to provide oracle primary key to'
                        ' export to oracle')
    parser.add_argument('--user-name',
                        nargs=1,
                        type=str,
                        help='Used to provide oracle user name to export'
                        ' to oracle')
    parser.add_argument('--password-file',
                        nargs=1,
                        type=str,
                        help='Used to provide oracle password file')
    parser.add_argument('--pass-alias',
                        nargs=1,
                        type=str,
                        help='Used to provide oracle password alias to'
                        ' export to oracle')
    parser.add_argument('--source-type',
                        nargs=1,
                        type=str,
                        help='Used to provide source vendor type')

    # Export to Teradata
    parser.add_argument(
        '--export_teradata',
        action='store_true',
        help='Export hadoop table to Teradata. '
        'required: --source-db {db}, name of db you want to export,'
        '--source-table {table}, name of table you want to export,'
        '--source-dir {dir}, hdfs location of export table,'
        '--jdbc-url {jdbcurl}, connection string for target schema,'
        '--target-schema {targetdb}, teradata Database,'
        '--target-table {targettable}, teradata table'
        '--user-name {username}, oracle username,'
        '--pass-alias {passalias}, oracle password alias or jceks url')

    # Generate IT request file input file
    parser.add_argument('--gen-it-table',
                        type=FileType('r'),
                        help='Generate IT table with automatic split-by '
                        'if possible')
    parser.add_argument('--gen-qa-data-sampling',
                        type=FileType('r'),
                        help='Generate workflow for QA data sampling')
    parser.add_argument('--parse-request-file',
                        type=FileType('r'),
                        help='Print each table in request file as json')

    # Workflow actions
    parser.add_argument('--hive',
                        nargs='*',
                        type=str,
                        help='Generates hive action workflow')
    parser.add_argument('--shell',
                        nargs='*',
                        type=str,
                        help='Generates shell action workflow')
    parser.add_argument('--impala',
                        nargs='*',
                        type=str,
                        help='Generate impala action workflow')
    parser.add_argument('--gen-action',
                        nargs='*',
                        type=str,
                        help='Generates action for hive,shell,impala '
                        'in one xml')

    # Copy backup files to live
    parser.add_argument('--retrieve-backup',
                        action='store_true',
                        help='Copies backup files to live. required: '
                        '--db {name} --table {name}')

    # Update freq_ingest Activator
    parser.add_argument('--update-activator',
                        action='store_true',
                        help='provide team frequency, Activator(yes/no), '
                        'team name and full table name')

    # Drop all the table from selected database
    parser.add_argument('--wipe-perf-env',
                        nargs=1,
                        type=str,
                        help='Provide the team_name or database '
                        'name for dropping all tables')

    parser.add_argument('--reingest-all',
                        action='store_true',
                        help='Use this option with wipe-perf-env to '
                        'reingest all tables')

    # Not saving workflows to git
    parser.add_argument('--no-git',
                        action='store_true',
                        help='Not saving workflow to git')
    # No dry run workflow
    parser.add_argument('--no-dry-run',
                        action='store_true',
                        help='Dont dry run workflow')

    parser.add_argument('--timeout',
                        type=str,
                        help='Timeout duration for auto split by')
    parser.add_argument('--ingest-version',
                        action='store_true',
                        help='Get the ingest version used for the xml')
    parser.add_argument('--kite-ingest',
                        type=FileType('r'),
                        help='Used to generate kite-ingest workflow')

    args = parser.parse_args()

    usr_opts = vars(args)
    # Filter usr_opt of None values
    usr_opts = {k: usr_opts[k] for k in usr_opts if usr_opts[k] is not None}
    # Filter usr_opt of False values
    usr_opts = {k: usr_opts[k] for k in usr_opts if usr_opts[k] is not False}

    ibis_opts = {
        'checks_balances': checks_balances,
        'export': export,
        'gen_esp_workflow_tables': gen_esp_workflow_tables,
        'update_activator': update_activator,
        'wipe_perf_env': wipe_perf_env,
        'gen_esp_workflow': gen_esp_workflow,
        'gen_config_workflow': gen_config_workflow,
        'retrieve_backup': retrieve_backup,
        'run_job': run_job,
        'gen_it_table': gen_it_table,
        'submit_request': submit_request,
        'export_request': export_request,
        'export_oracle': export_oracle,
        'save_it_table': save_it_table,
        'update_it_table': update_it_table,
        'update_it_table_export': update_it_table_export,
        'auth_test': auth_test,
        'ingest_version': ingest_version,
        'parse_request_file': parse_request_file,
        'kite_ingest': gen_kite_workflow
    }

    is_failed = False
    if args.env:
        cfg_mgr = ConfigManager(args.env[0], args.for_env)
        file_permission = 0774

        if not os.path.isdir(cfg_mgr.files):
            os.mkdir(cfg_mgr.files)
            os.chmod(cfg_mgr.files, file_permission)
        if not os.path.isdir(cfg_mgr.logs):
            os.mkdir(cfg_mgr.logs)
            os.chmod(cfg_mgr.logs, file_permission)
        if not os.path.isdir(cfg_mgr.saves):
            os.mkdir(cfg_mgr.saves)
            os.chmod(cfg_mgr.saves, file_permission)

        # clear log file
        with open(cfg_mgr.log_file, 'wb'):
            pass
        logger = get_logger(cfg_mgr)

        driver = Driver(cfg_mgr)

        try:
            # Utilize ibis_opts to call correct function(s)
            for key in usr_opts.keys():
                if ibis_opts.get(key, None):
                    # call the appropriate method
                    success = ibis_opts[key](args)
                    if success is False:
                        is_failed = True
            inventory.Inventory.close()
        except Exception:
            logger.error('\n' + traceback.format_exc())
            is_failed = True

        # print the log
        with open(cfg_mgr.log_file, 'rb') as file_handler:
            log_text = file_handler.read()
            if log_text:
                print '+' * 20
                print 'Printing ibis.log'
                print '=' * 20
                print log_text
                print '+' * 20
    else:
        is_failed = True
        err_msg = ('Environment required for ibis. '
                   'Please specify --env argument and provide a environment.')
        print err_msg

    if is_failed:
        # expose ibis failure to the calling env
        sys.exit(1)
Exemplo n.º 25
0
 def setUpClass(cls):
     cls.sqooper = SqoopHelper(ConfigManager(UNIT_TEST_ENV))
Exemplo n.º 26
0
 def setUpClass(cls, mock_connect):
     """Setup."""
     cls.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
     cls.inventory = ESPInventory(cls.cfg_mgr)
Exemplo n.º 27
0
 def setUpClass(cls):
     """setup"""
     cls.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
Exemplo n.º 28
0
 def setUp(self):
     self.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
     jdbcurl = ('jdbc:oracle:thin:@//fake.oracle:1521/'
                'fake_servicename')
     self.auth_test = AuthTest(self.cfg_mgr, 'fake_database', 'test_table',
                               jdbcurl)
Exemplo n.º 29
0
 def setUpClass(cls, mock_connect):
     """setup"""
     cls.cfg_mgr = ConfigManager(UNIT_TEST_ENV)
     cls.inventory = ExportITInventory(cls.cfg_mgr)
Exemplo n.º 30
0
 def setUp(self):
     unittest.TestCase.setUp(self)
     self.vizoozie = VizOozie(ConfigManager(UNIT_TEST_ENV))