def connect(self):
        if self.is_valid_postgres_schema() is False:
            return False

        if self.conn is not None and self.conn.closed == 0:
            return True
        try:
            svr = ACHelpers.resolve_vault_references(
                self.config_dict[POSTGRESQL_SCHEMA_HOSTNAME])
            prt = ACHelpers.resolve_vault_references(
                self.config_dict[POSTGRESQL_SCHEMA_PORT])
            usr = ACHelpers.resolve_vault_references(
                self.config_dict[POSTGRESQL_SCHEMA_USERNAME])
            pwd = ACHelpers.resolve_vault_references(
                self.config_dict[POSTGRESQL_SCHEMA_PASSWORD])
            db = ACHelpers.resolve_vault_references(
                self.config_dict[POSTGRESQL_SCHEMA_DATABASE])
            self.conn = psycopg2.connect(host=svr,
                                         port=prt,
                                         user=usr,
                                         password=pwd,
                                         database=db)
        except Exception, e:
            self.conn = None
            ACLogger().get_logger().error('Failed PostgreSQL login: %s' %
                                          str(e))
            return False
예제 #2
0
 def execute(self, key_values_dict, pdi_path, pdi_file_path=None):
     rv = True
     if PENTAHODI_LOGLEVEL in self.config_dict:
         loglevel = '%s' % self.config_dict[PENTAHODI_LOGLEVEL]
     else:
         loglevel = 'Minimal'
     shellret = ''
     if self.config_dict[PENTAHODI_KEYS] is not None and \
         isinstance(self.config_dict[PENTAHODI_KEYS], dict) is True:
         for key, item in self.config_dict[TABLEAU_SERVER_KEYS].iteritems():
             key_values_dict[key] = dict()
             key_values_dict[key]['output'] = ACHelpers.compress('')
             key_values_dict[key]['status'] = False
             pdi = os.path.join(pdi_path, 'pan.sh')
             if PENTAHODI_INTEGRATION_OUTPUT_FILENAME in item:
                 output_file_path = os.path.join(os.getcwd(), item[PENTAHODI_INTEGRATION_OUTPUT_FILENAME])
             else:
                 output_file_path = None
             if pdi_file_path is None:
                 pdi_file = os.path.join(os.getcwd(), 'docker-share', item[PENTAHODI_FILENAME])
             else:
                 pdi_file = os.path.join(pdi_file_path, item[PENTAHODI_FILENAME])
             try:
                 # pan.sh -file="/PRD/Customer Dimension.ktr" -level=Minimal -param:MASTER_HOST=192.168.1.3 -param:MASTER_PORT=8181
                 params = [pdi, '-file=%s' % pdi_file, '-level=%s' % loglevel]
                 if PENTAHODI_FILE_PARAMETERS in item:
                     for p, pval in item[PENTAHODI_FILE_PARAMETERS].iteritems():
                         params.append('-param:%s=%s' % (p, pval))
                 shellret = subprocess.check_output(params)
                 ACLogger().get_logger().info("calling pan by: %s" % params)
                 key_values_dict[key]['output'] = ACHelpers.compress('')
                 if shellret is not None and shellret != '':
                     key_values_dict[key]['status'] = True
                     if output_file_path is not None:
                         ACLogger().get_logger().info("looking for file @: %s" % output_file_path)
                         try:
                             statinfo = os.stat(output_file_path)
                         except:
                             pass
                         else:
                             if  statinfo.st_size > 0:
                                 with open(output_file_path, 'rb') as f:
                                     d = f.read()
                                     if d is not None and len(d) > 0:
                                         key_values_dict[key]['output'] = ACHelpers.compress(d)
                                         ACLogger().get_logger().info(" got outuput data for key %s: %s" % (key,d))
                 else:
                     key_values_dict[key]['status'] = False
                     rv = False
             except subprocess.CalledProcessError, e:
                 ACLogger().get_logger().error('PentahoDI execture exception: %s' % str(e))
                 key_values_dict[key]['status'] = False
             finally:
    def postgres_execute(self, key_name, the_file):
        if self.config_dict is None or key_name is None or self.connect() is False or the_file is None:
            return False
        if isinstance(the_file, file) is False or the_file.closed is True:
            ACLogger().get_logger().error('ACPostgreSQL: must have open file object')
            return False
        try:
            stmnts = self.config_dict[POSTGRESQL_STATEMENTS]
            query_set = stmnts[key_name]
            sql = query_set[POSTGRESQL_SQL_STRING]
            sql = ACHelpers.resolve_vault_references(sql)
            query_type = query_set[POSTGRESQL_TYPE]
        except KeyError:
            ACLogger().get_logger().error('ACPostgreSQL: cannot get statements from config for key %s', key_name)
            return False
        insert_column_names = False
        try:
            if ACHelpers.string_is_true(query_set[POSTGRESQL_INSERT_COLUMN_NAMES]) is True:
                insert_column_names = True
        except KeyError:
            pass

        to_json_parser = False
        try:
            if POSTGRESQL_SQL_PARSER in query_set:
                if POSTGRESQL_SQL_TO_JSON_PARSER in query_set[POSTGRESQL_SQL_PARSER]:
                    to_json_parser = True
                    if insert_column_names is False:
                        ACLogger().get_logger().error('ACPostgreSQL:  key %s must be true when %s present, resetting'
                                                  % (POSTGRESQL_INSERT_COLUMN_NAMES, POSTGRESQL_SQL_TO_JSON_PARSER))
                        insert_column_names = True
        except KeyError:
            pass

        def handle_exceptions(cursor, the_key_name, ee, the_query_set, commit=False):
            if POSTGRESQL_IGNORE_EXCEPTIONS in query_set:
                ignore_exceptions = ACHelpers.string_is_true(the_query_set[POSTGRESQL_IGNORE_EXCEPTIONS])
            else:
                ignore_exceptions = False

            if ignore_exceptions is False:
                ACLogger().get_logger().error('PostgreSql exception for key %s (%s)' % (the_key_name, ee))
                if commit is True:
                    try:
                        self.conn.commit()
                    except Exception, exe:
                         ACLogger().get_logger().info('PostgreSql unable to commit post exception for key %s (%s)' %
                                                      (the_key_name, exe))
                cursor.close()
                return False
            else:
    def connect(self):
        if self.is_valid_postgres_schema() is False:
            return False

        if self.conn is not None and self.conn.closed == 0:
            return True
        try:
            svr = ACHelpers.resolve_vault_references(self.config_dict[POSTGRESQL_SCHEMA_HOSTNAME])
            prt = ACHelpers.resolve_vault_references(self.config_dict[POSTGRESQL_SCHEMA_PORT])
            usr = ACHelpers.resolve_vault_references(self.config_dict[POSTGRESQL_SCHEMA_USERNAME])
            pwd = ACHelpers.resolve_vault_references(self.config_dict[POSTGRESQL_SCHEMA_PASSWORD])
            db = ACHelpers.resolve_vault_references(self.config_dict[POSTGRESQL_SCHEMA_DATABASE])
            self.conn = psycopg2.connect(host=svr, port=prt,
                                         user=usr, password=pwd, database=db)
        except Exception, e:
            self.conn = None
            ACLogger().get_logger().error('Failed PostgreSQL login: %s' % str(e))
            return False
        def handle_exceptions(cursor, the_key_name, ee, the_query_set, commit=False):
            if POSTGRESQL_IGNORE_EXCEPTIONS in query_set:
                ignore_exceptions = ACHelpers.string_is_true(the_query_set[POSTGRESQL_IGNORE_EXCEPTIONS])
            else:
                ignore_exceptions = False

            if ignore_exceptions is False:
                ACLogger().get_logger().error('PostgreSql exception for key %s (%s)' % (the_key_name, ee))
                if commit is True:
                    try:
                        self.conn.commit()
                    except Exception, exe:
                         ACLogger().get_logger().info('PostgreSql unable to commit post exception for key %s (%s)' %
                                                      (the_key_name, exe))
                cursor.close()
                return False
        def handle_exceptions(cursor,
                              the_key_name,
                              ee,
                              the_query_set,
                              commit=False):
            if POSTGRESQL_IGNORE_EXCEPTIONS in query_set:
                ignore_exceptions = ACHelpers.string_is_true(
                    the_query_set[POSTGRESQL_IGNORE_EXCEPTIONS])
            else:
                ignore_exceptions = False

            if ignore_exceptions is False:
                ACLogger().get_logger().error(
                    'PostgreSql exception for key %s (%s)' %
                    (the_key_name, ee))
                if commit is True:
                    try:
                        self.conn.commit()
                    except Exception, exe:
                        ACLogger().get_logger().info(
                            'PostgreSql unable to commit post exception for key %s (%s)'
                            % (the_key_name, exe))
                cursor.close()
                return False
 def execute(self, key_values_dict, pdi_path, pdi_file_path=None):
     rv = True
     if PENTAHODI_LOGLEVEL in self.config_dict:
         loglevel = '%s' % self.config_dict[PENTAHODI_LOGLEVEL]
     else:
         loglevel = 'Minimal'
     shellret = ''
     if self.config_dict[PENTAHODI_KEYS] is not None and \
         isinstance(self.config_dict[PENTAHODI_KEYS], dict) is True:
         for key, item in self.config_dict[TABLEAU_SERVER_KEYS].iteritems():
             key_values_dict[key] = dict()
             key_values_dict[key]['output'] = ACHelpers.compress('')
             key_values_dict[key]['status'] = False
             pdi = os.path.join(pdi_path, 'pan.sh')
             if PENTAHODI_INTEGRATION_OUTPUT_FILENAME in item:
                 output_file_path = os.path.join(
                     os.getcwd(),
                     item[PENTAHODI_INTEGRATION_OUTPUT_FILENAME])
             else:
                 output_file_path = None
             if pdi_file_path is None:
                 pdi_file = os.path.join(os.getcwd(), 'docker-share',
                                         item[PENTAHODI_FILENAME])
             else:
                 pdi_file = os.path.join(pdi_file_path,
                                         item[PENTAHODI_FILENAME])
             try:
                 # pan.sh -file="/PRD/Customer Dimension.ktr" -level=Minimal -param:MASTER_HOST=192.168.1.3 -param:MASTER_PORT=8181
                 params = [
                     pdi,
                     '-file=%s' % pdi_file,
                     '-level=%s' % loglevel
                 ]
                 if PENTAHODI_FILE_PARAMETERS in item:
                     for p, pval in item[
                             PENTAHODI_FILE_PARAMETERS].iteritems():
                         params.append('-param:%s=%s' % (p, pval))
                 shellret = subprocess.check_output(params)
                 ACLogger().get_logger().info("calling pan by: %s" % params)
                 key_values_dict[key]['output'] = ACHelpers.compress('')
                 if shellret is not None and shellret != '':
                     key_values_dict[key]['status'] = True
                     if output_file_path is not None:
                         ACLogger().get_logger().info(
                             "looking for file @: %s" % output_file_path)
                         try:
                             statinfo = os.stat(output_file_path)
                         except:
                             pass
                         else:
                             if statinfo.st_size > 0:
                                 with open(output_file_path, 'rb') as f:
                                     d = f.read()
                                     if d is not None and len(d) > 0:
                                         key_values_dict[key][
                                             'output'] = ACHelpers.compress(
                                                 d)
                                         ACLogger().get_logger().info(
                                             " got outuput data for key %s: %s"
                                             % (key, d))
                 else:
                     key_values_dict[key]['status'] = False
                     rv = False
             except subprocess.CalledProcessError, e:
                 ACLogger().get_logger().error(
                     'PentahoDI execture exception: %s' % str(e))
                 key_values_dict[key]['status'] = False
             finally:
    def execute(self, key_values_dict, jupyter_dir, notebook_file_dir=None):
        rv = True
        shellret = ''
        if self.config_dict[PYTHON_ANACONDA_KEYS] is not None and \
            isinstance(self.config_dict[PYTHON_ANACONDA_KEYS], dict) is True:
            for key, item in self.config_dict[TABLEAU_SERVER_KEYS].iteritems():
                key_values_dict[key] = dict()
                output_file_path = None
                if jupyter_dir is not None:
                    jupyter = os.path.join(jupyter_dir, 'jupyter')
                else:
                    jupyter = '/opt/conda/bin/jupyter'
                if notebook_file_dir is None:
                    notebook_file = os.path.join(os.getcwd(), 'docker-share', item[PYTHON_ANACONDA_FILENAME])
                    if PYTHON_ANACONDA_NOTEBOOK_OUTPUT_FILENAME in item:
                        output_file_path = os.path.join(os.getcwd(), 'docker-share',
                                                    item[PYTHON_ANACONDA_NOTEBOOK_OUTPUT_FILENAME])
                else:
                    notebook_file = os.path.join(notebook_file_dir, item[PYTHON_ANACONDA_FILENAME])
                    if PYTHON_ANACONDA_NOTEBOOK_OUTPUT_FILENAME in item:
                        output_file_path = os.path.join(notebook_file_dir,
                                                    item[PYTHON_ANACONDA_NOTEBOOK_OUTPUT_FILENAME])
                try:
                    try:
                        statinfo = os.stat(notebook_file)
                    except:
                        pass
                    else:
                        if statinfo is None or statinfo.st_size == 0:
                            ACLogger().get_logger().error(
                                'ACPythonAnaconda unable to open notebook_file %s' % notebook_file)
                            key_values_dict[key]['status'] = False
                            return False

                    params = [jupyter, 'nbconvert', '--execute' , '%s' % notebook_file]
                    my_env = os.environ.copy()
                    if PYTHON_ANACONDA_FILE_PARAMETERS in item:
                        for p, pval in item[PYTHON_ANACONDA_FILE_PARAMETERS].iteritems():
                            my_env[p] = pval
                    #ACLogger.log_and_print("calling jupyter with env variables of: %s" % my_env)
                    shellret = subprocess.check_output(params, env=my_env, stderr=subprocess.STDOUT)
                    key_values_dict[key]['output'] = ACHelpers.compress('')
                    ACLogger.log_and_print("calling jupyter by: %s" % params)
                    if shellret is not None:
                        key_values_dict[key]['status'] = True
                        if output_file_path is not None:
                            try:
                                statinfo = os.stat(output_file_path)
                            except:
                                pass
                            else:
                                if statinfo.st_size > 0:
                                    with open(output_file_path, 'rb') as f:
                                        d = f.read()
                                        if d is not None and len(d) > 0:
                                            key_values_dict[key]['output'] = ACHelpers.compress(d)
                                            ACLogger().get_logger().info(" got outuput data for key %s: %s" % (key,d))
                    else:
                        key_values_dict[key]['status'] = False
                        rv = False

                except Exception, e:
                    ACLogger().get_logger().error('ACPythonAnaconda execute exception: %s shellret(%s)' % (str(e), shellret))
                    key_values_dict[key]['status'] = False
                finally:
    def postgres_execute(self, key_name, the_file):
        if self.config_dict is None or key_name is None or self.connect(
        ) is False or the_file is None:
            return False
        if isinstance(the_file, file) is False or the_file.closed is True:
            ACLogger().get_logger().error(
                'ACPostgreSQL: must have open file object')
            return False
        try:
            stmnts = self.config_dict[POSTGRESQL_STATEMENTS]
            query_set = stmnts[key_name]
            sql = query_set[POSTGRESQL_SQL_STRING]
            sql = ACHelpers.resolve_vault_references(sql)
            query_type = query_set[POSTGRESQL_TYPE]
        except KeyError:
            ACLogger().get_logger().error(
                'ACPostgreSQL: cannot get statements from config for key %s',
                key_name)
            return False
        insert_column_names = False
        try:
            if ACHelpers.string_is_true(
                    query_set[POSTGRESQL_INSERT_COLUMN_NAMES]) is True:
                insert_column_names = True
        except KeyError:
            pass

        to_json_parser = False
        try:
            if POSTGRESQL_SQL_PARSER in query_set:
                if POSTGRESQL_SQL_TO_JSON_PARSER in query_set[
                        POSTGRESQL_SQL_PARSER]:
                    to_json_parser = True
                    if insert_column_names is False:
                        ACLogger().get_logger().error(
                            'ACPostgreSQL:  key %s must be true when %s present, resetting'
                            % (POSTGRESQL_INSERT_COLUMN_NAMES,
                               POSTGRESQL_SQL_TO_JSON_PARSER))
                        insert_column_names = True
        except KeyError:
            pass

        def handle_exceptions(cursor,
                              the_key_name,
                              ee,
                              the_query_set,
                              commit=False):
            if POSTGRESQL_IGNORE_EXCEPTIONS in query_set:
                ignore_exceptions = ACHelpers.string_is_true(
                    the_query_set[POSTGRESQL_IGNORE_EXCEPTIONS])
            else:
                ignore_exceptions = False

            if ignore_exceptions is False:
                ACLogger().get_logger().error(
                    'PostgreSql exception for key %s (%s)' %
                    (the_key_name, ee))
                if commit is True:
                    try:
                        self.conn.commit()
                    except Exception, exe:
                        ACLogger().get_logger().info(
                            'PostgreSql unable to commit post exception for key %s (%s)'
                            % (the_key_name, exe))
                cursor.close()
                return False
            else:
예제 #10
0
    def execute(self, key_values_dict, jupyter_dir, notebook_file_dir=None):
        rv = True
        shellret = ''
        if self.config_dict[PYTHON_ANACONDA_KEYS] is not None and \
            isinstance(self.config_dict[PYTHON_ANACONDA_KEYS], dict) is True:
            for key, item in self.config_dict[TABLEAU_SERVER_KEYS].iteritems():
                key_values_dict[key] = dict()
                output_file_path = None
                if jupyter_dir is not None:
                    jupyter = os.path.join(jupyter_dir, 'jupyter')
                else:
                    jupyter = '/opt/conda/bin/jupyter'
                if notebook_file_dir is None:
                    notebook_file = os.path.join(
                        os.getcwd(), 'docker-share',
                        item[PYTHON_ANACONDA_FILENAME])
                    if PYTHON_ANACONDA_NOTEBOOK_OUTPUT_FILENAME in item:
                        output_file_path = os.path.join(
                            os.getcwd(), 'docker-share',
                            item[PYTHON_ANACONDA_NOTEBOOK_OUTPUT_FILENAME])
                else:
                    notebook_file = os.path.join(
                        notebook_file_dir, item[PYTHON_ANACONDA_FILENAME])
                    if PYTHON_ANACONDA_NOTEBOOK_OUTPUT_FILENAME in item:
                        output_file_path = os.path.join(
                            notebook_file_dir,
                            item[PYTHON_ANACONDA_NOTEBOOK_OUTPUT_FILENAME])
                try:
                    try:
                        statinfo = os.stat(notebook_file)
                    except:
                        pass
                    else:
                        if statinfo is None or statinfo.st_size == 0:
                            ACLogger().get_logger().error(
                                'ACPythonAnaconda unable to open notebook_file %s'
                                % notebook_file)
                            key_values_dict[key]['status'] = False
                            return False

                    params = [
                        jupyter, 'nbconvert', '--execute',
                        '%s' % notebook_file
                    ]
                    my_env = os.environ.copy()
                    if PYTHON_ANACONDA_FILE_PARAMETERS in item:
                        for p, pval in item[
                                PYTHON_ANACONDA_FILE_PARAMETERS].iteritems():
                            my_env[p] = pval
                    #ACLogger.log_and_print("calling jupyter with env variables of: %s" % my_env)
                    shellret = subprocess.check_output(
                        params, env=my_env, stderr=subprocess.STDOUT)
                    key_values_dict[key]['output'] = ACHelpers.compress('')
                    ACLogger.log_and_print("calling jupyter by: %s" % params)
                    if shellret is not None:
                        key_values_dict[key]['status'] = True
                        if output_file_path is not None:
                            try:
                                statinfo = os.stat(output_file_path)
                            except:
                                pass
                            else:
                                if statinfo.st_size > 0:
                                    with open(output_file_path, 'rb') as f:
                                        d = f.read()
                                        if d is not None and len(d) > 0:
                                            key_values_dict[key][
                                                'output'] = ACHelpers.compress(
                                                    d)
                                            ACLogger().get_logger().info(
                                                " got outuput data for key %s: %s"
                                                % (key, d))
                    else:
                        key_values_dict[key]['status'] = False
                        rv = False

                except Exception, e:
                    ACLogger().get_logger().error(
                        'ACPythonAnaconda execute exception: %s shellret(%s)' %
                        (str(e), shellret))
                    key_values_dict[key]['status'] = False
                finally: