def export_sql_statement(self):
     if self.validate_query_parameters():
         qry = self.make_query_object()
         sql = qry.sql
         if qry.status is not None:
             self.error = qry.status
             return False
         else:
             name = qry.name
             path = FSO.write_text(sql, name)
             FSO.open_file(path)
             return True
     return False
    def export_query_results(self, parent, name, tbl, sql, pw):
        """
         Writes a sql query to a csv.
        """
        start_datetime = timestr()
        start_time = just_time()
        logger = logging.getLogger(
            'main.sql_exporter.ExportSql.export_query_results')
        logger.debug('Pulling query ' + name)

        fso = FSO()
        fso.make_dir('output')
        csv_path = 'output\\' + name + '_' + start_datetime + '.csv'

        con = Connection(table=tbl, password=pw)

        def result_iter(cursor, chunksize=1000):
            while True:
                results = cursor.fetchmany(chunksize)
                if not results:
                    break
                for result in results:
                    yield result

        def call_grandfather(status, done=False):
            if done == True:
                finish = just_time()
            else:
                finish = '...'

            parent.parent.callback(
                name
                , start_time
                , finish
                , status
            )

        call_grandfather(status='Connecting')
        if con.open():
            cursor = con.connection.cursor()
            call_grandfather(status='Executing query')
            try:
                cursor.execute(sql)
                with open(csv_path, 'w', newline='') as csv_file:
                    call_grandfather(status='Writing csv')
                    writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
                    writer.writerow([i[0] for i in cursor.description]) # header
                    for r, row in enumerate(result_iter(cursor, 1000)):
                        if r > 100000:
                            break
                        if r % 1000 == 0:
                            logger.info('Writing row ' + str(r))
                        writer.writerow(list(row) + ['', '', '', ''])
                call_grandfather(status='Great Success!', done=True)
                fso.open_file(csv_path)
            except Exception as e:
                err = str(e)
                logger.error(err)
                call_grandfather(status=str(e), done=True)
            finally:
                con.close()
                parent.call_dad()
Beispiel #3
0
 def read_config_file(path:  str) -> list:
     return FSO.read_json(path)
Beispiel #4
0
                , 'facility': 'ss_loc'
                , 'accounting_date': 'accounting_date'
                , 'service_date': 'service_month'
                , 'check': ''
                , 'batch': ''
            }
        )
        , 'AR for Reserves': ('TopLine_SQL', 'mssql', 'Y', 'queries\\ar_for_reserves.sql', {
                'mpi': 'patient_mpi'
                , 'claim': 'claim_no'
                , 'facility': 'ss_loc'
                , 'accounting_date': 'accounting_date'
                , 'service_date': 'service_month'
                , 'check': ''
                , 'batch': ''
            }
        )
        , 'Test': (
            'AdventureWorks_32bit', 'mssql', 'Y', 'queries\\test.sql', {
                'mpi': 'mpi'
                , 'claim': 'claim'
                , 'facility': 'facility'
                , 'accounting_date': 'accounting_date'
                , 'service_date': 'service_date'
                , 'check': 'check_no'
                , 'batch': ''
            }
        )
    }
    FSO.write_json(dsn_dict, 'config.json')
 def clean_up_output_folder(self):
     FSO.clean_up('output')
 def open_output_folder(self, path='output'):
     self.logger.info('Opening ' + path + ' folder...')
     FSO.make_dir(path)
     FSO.open_file(path)