Ejemplo n.º 1
0
def export_file(conn, dbf_file, shop_number):
    sales_table = []

    str_date = os.path.splitext(str(dbf_file.name))[0]

    d = Table(str(dbf_file), codepage='cp866')
    d.open()
    for r in d:
        if r.oper == ' 9':
            sales_table.append((shop_number, str_date, r.time.strip(), r.sum))

    conn.executemany("INSERT OR IGNORE INTO sales VALUES(?,?,?,?);",
                     sales_table)
    conn.commit()
Ejemplo n.º 2
0
def viewExportMetrics():
    monthDateStr = request.form.get("monthDate", None)
    if not monthDateStr:
        raise RuntimeError("Месяц не задан.")
    month = datetime.strptime(monthDateStr, "%m.%Y")
    metrics = db.session.execute(
        selectQuery.format(month=month.strftime("%Y%m"))).fetchall()

    if "view" in request.form:
        return render_template("export/viewExportMetrics.tpl",
                               metrics=metrics,
                               serviceNames=serviceNames,
                               datetime=datetime,
                               months=months)
    else:
        fullname = os.path.join(current_app.config['UPLOAD_FOLDER'],
                                "export.dbf")
        columns = "code_ls C(7); np C(20); st C(30); dom C(4); korp C(7); kw C(9); komn C(9); abonent C(30); s_code C(2); s_name C(25); code_nn C(2); sn C(20); date C(10); quan1 C(15); quan1n C(15); quan2 C(15); quan2n C(15)"
        table = Table(fullname, columns, codepage="cp866")
        try:
            table.open()
            for row in metrics:
                m1, m2 = row.m.split(";")
                table.append(
                    (row.number, row.cityName, row.streetName, row.houseNumber,
                     row.buildingName, row.flatNumber, row.room, row.fio,
                     row.service,
                     serviceNames[row.service], row.order, row.serial,
                     datetime.fromtimestamp(row.time).strftime("%d.%m.%Y"), "",
                     m1, "", m2))
        finally:
            table.close()
        return send_file(fullname, as_attachment=True)
Ejemplo n.º 3
0
 def ingest(self, file_path):
     self.result.flag(self.result.FLAG_TABULAR)
     try:
         table = Table(file_path).open()
         self.result.emit_rows(self.generate_rows(table))
     except DbfError as err:
         raise ProcessingException('Cannot open DBF file: %s' % err)
Ejemplo n.º 4
0
 def ingest(self, file_path, entity):
     entity.schema = model.get('Table')
     try:
         table = Table(file_path.as_posix()).open()
         self.emit_row_dicts(entity, self.generate_rows(table))
     except DbfError as err:
         raise ProcessingException('Cannot open DBF file: %s' % err) from err  # noqa
Ejemplo n.º 5
0
def parseDBF(dbffile):
    """parse dbf with ratezones return dict"""
    from dbf import Table, DbfError, FieldMissingError
    import shelve
    try:
        t = Table(dbffile)
        t.open()
    except DbfError:
        log.error('Cant open dbf file %s', dbffile)
        return None
    zones = {}
    for record in t:
        try:
            zones[str(record[INDEXFIELD])] = record[RATEFIELD]
        except FieldMissingError:
            log.error('Wrong fieldnames in dbf file %s', dbffile)
            return None
    return zones
Ejemplo n.º 6
0
    def __init__(
            self,
            dbf_path: str  #full path to dbf filename
        ,
            dbf_filename: str  #file/table name
        ,
            target_db=None  #target db connection
        ,
            target_tablename=None  #target table name                 
        ,
            append=False  #append or drop and create new                 
        ,
            ignore_memos=True  #ignore memo field
        ,
            codepage=None  #codepage (https://en.wikipedia.org/wiki/Code_page)
        ,
            quietmode=False  #don't output to StdIO
    ):

        self.__quietmode = quietmode
        dbf_file = path.join(dbf_path, dbf_filename)
        #open the dbf table as read only
        self.__table = Table(dbf_file,
                             ignore_memos=ignore_memos,
                             codepage=codepage)

        self.__table.open(DbfStatus.READ_ONLY)

        self.__record_count = len(self.__table)
        self.__written_count = 0

        if target_tablename:
            self.__table_name = target_tablename
        else:
            self.__table_name = dbf_filename.lower().replace('.dbf', '')

        self.__structure = self.__table.structure()
        self.__target_db = target_db
        self.__dbms_name = self.__get_dbms_info()
        self.__dtype_map = DTYPE_MAP[self.__dbms_name]
        self.__append = append
Ejemplo n.º 7
0
def importMetrics(file, importMetricsFlag):
    existedPUs = {}
    table = Table(file)
    try:
        table.open()

        clients = {}
        for row in table:
            number, notUse1, notUse2, notUse3, notUse4, notUse5, notUse6, notUse7, service, notUse8, order, serial, date, m1, m1n, m2, m2n = row
            if number in clients:
                client = clients[number]
            else:
                client = Client.query.filter_by(number=number).one_or_none()
                clients[number] = client

            if client:
                # загружаю все счетчики разово по лицевому счету
                if number not in existedPUs:
                    pus = PU.query.filter_by(client=client).all()
                    existedPUs[number] = pus

                # если счетчик найден удаляю его из списка существующих
                # после завершения импорта все оставшиеся счетчики и их показания в этом словаре будут удалены
                pu = searchPU(existedPUs[number], order, service)
                if pu:
                    pu.setSerial(serial)
                    existedPUs[number].remove(pu)
                else:
                    pu = PU(client, order, service, serial)
                    db.session.add(pu)
                # если в форме указано импортировать показания то помимо импорта счетчиков импортируются и показания
                if importMetricsFlag:
                    print("Импорт показаний")
                    date = datetime.strptime(date, "%d.%m.%Y")
                    m = Metric.query.filter_by(
                        pu=pu, month=date.strftime("%Y%m")).one_or_none()
                    if not m:
                        m = Metric(pu, date, m1, m2)
                        db.session.add(m)
                    m.setM(m1, m2)

        # удаление счетчиков которые есть в базе, но нет в импортируемом файле
        for pus in existedPUs.values():
            for pu in pus:
                db.session.delete(pu)
        db.session.commit()
    finally:
        table.close()
Ejemplo n.º 8
0
def importLsAndDebt(file):
    table = Table(file)
    try:
        table.open()
        for row in table:
            number, city, street, dom, building, flat, room, fio, debt, penalty, notUse1, notUse2, payDate = row
            client = Client.query.filter_by(number=number).one_or_none()
            if not client:
                client = Client(number, city, street, dom, building, flat,
                                room, fio)
            client.setDebt(debt)
            client.setPenalty(penalty)
            client.setPayDate(payDate)
            db.session.add(client)
        db.session.commit()
    finally:
        table.close()
Ejemplo n.º 9
0

#------------------------------------------------------------------------
#
# TMG DBF tables
#
#------------------------------------------------------------------------
'''
All TMG DBF Tables
'''
#------------------------------------------------------------------------
#
#  Person File
##TODO
#------------------------------------------------------------------------
'''

Table(s):
$.DBF - tmgPeople - Person File
'''


#------------------------------------------------------------------------
#
#  Source Type File
##TODO
#------------------------------------------------------------------------
'''

Table(s):
A .dbf - tmgSourceCategories - Source Type File
Ejemplo n.º 10
0
class Dbf2Db:
    def __init__(
            self,
            dbf_path: str  #full path to dbf filename
        ,
            dbf_filename: str  #file/table name
        ,
            target_db=None  #target db connection
        ,
            target_tablename=None  #target table name                 
        ,
            append=False  #append or drop and create new                 
        ,
            ignore_memos=True  #ignore memo field
        ,
            codepage=None  #codepage (https://en.wikipedia.org/wiki/Code_page)
        ,
            quietmode=False  #don't output to StdIO
    ):

        self.__quietmode = quietmode
        dbf_file = path.join(dbf_path, dbf_filename)
        #open the dbf table as read only
        self.__table = Table(dbf_file,
                             ignore_memos=ignore_memos,
                             codepage=codepage)

        self.__table.open(DbfStatus.READ_ONLY)

        self.__record_count = len(self.__table)
        self.__written_count = 0

        if target_tablename:
            self.__table_name = target_tablename
        else:
            self.__table_name = dbf_filename.lower().replace('.dbf', '')

        self.__structure = self.__table.structure()
        self.__target_db = target_db
        self.__dbms_name = self.__get_dbms_info()
        self.__dtype_map = DTYPE_MAP[self.__dbms_name]
        self.__append = append

    def update_target(self, new_table_name=None, close_dbf=True):
        '''update the target database'''

        if new_table_name:
            self.__table_name = new_table_name

        time_start = datetime.datetime.now()

        if not self.__quietmode:
            print(
                f'Extracting {self.__record_count} records to target database...'
            )

        self.__update_table(append=self.__append)

        time_finish = datetime.datetime.now()
        time_elapsed = (time_finish - time_start).total_seconds()
        if not self.__quietmode:
            print(
                f'{self.__written_count} records of {self.__record_count} updated successfully in {time_elapsed} seconds.'
            )

        if close_dbf:
            #close the link to dbf file
            self.__table.close()
        else:
            print('dbf source table remains open.')

    def close_dbf(self):
        #close the link to dbf file
        self.__table.close()

    @property
    def table_structure(self):
        return self.__structure

    @property
    def dbms_name(self):
        return self.__dbms_name

    @property
    def record_count(self):
        return self.__record_count

    @property
    def written_count(self):
        return self.__written_count

    def __table_def(self, field):
        '''extract the name and datatype of each field'''
        field_name, field_def = field.split(' ')
        field_def = field_def.replace(')', '').split('(')

        if len(field_def) == 1 or ',' in field_def[1]:
            field_def = field_def[0], ''
            return ' '.join(
                [f'[{field_name}]', self.__dtype_map[field_def[0]]])
        else:
            field_def = field_def[0], f'({field_def[1]})'
            return ' '.join([
                f'[{field_name}]', self.__dtype_map[field_def[0]], field_def[1]
            ])

    def __get_dbms_info(self):
        '''try to find out what kind of target database is it'''
        info = None
        #databases handled by pypyodbc
        try:
            info = self.__target_db.getinfo(SQL_DBMS_NAME)
            if info.lower() == 'access':
                return 'access'
            elif info.lower() == 'microsoft sql server':
                return 'sqlserver'
        #sqlite3 - not handled by pypyodbc
        except:
            try:
                info = self.__target_db.Warning.__dict__['__module__']
                if info.lower() == 'sqlite3':
                    return 'sqlite'
            except:
                pass

        return info

    def __update_table(self, append):
        ''' create and update table with source data'''
        if self.__table_exists():
            if append:
                self.__insert_data()
            else:
                self.__drop_table()
                self.__make_table()
                self.__insert_data()
        else:
            self.__make_table()
            self.__insert_data()

    def __table_exists(self):
        '''function to check if table exists'''
        cur = self.__target_db.cursor()

        if self.__dbms_name == 'sqlite':
            cur.execute(
                f'SELECT name FROM sqlite_master WHERE type="table" AND name="{self.__table_name}"'
            )
            if len(cur.fetchall()) > 0:
                print(f'... table [{self.__table_name}] exists')
                return True
            else:
                return False

        elif self.__dbms_name in ['access', 'sqlserver']:
            if len([
                    x for x in cur.tables()
                    if x[2].lower() == self.__table_name
                    and x[3].lower() == 'table'
            ]):
                return True
            else:
                return False

    def __drop_table(self):
        '''drop a given table_name in db'''
        print(f'... dropping table [{self.__table_name}]')
        self.__target_db.cursor().execute(f'DROP TABLE {self.__table_name}')
        self.__target_db.commit()

    def __make_table_sql(self):
        '''assemble a create table sql command'''
        fields = [self.__table_def(x) for x in self.__structure]
        fields = ' ,'.join(fields)
        sql = f'CREATE TABLE {self.__table_name} ({fields})'
        return sql

    def __make_table(self):
        '''make table if not exists'''
        query = self.__make_table_sql()
        self.__target_db.cursor().execute(query)
        self.__target_db.commit()
        print(f'... table [{self.__table_name}] created')

    def __insert_data(self):
        '''insert data into table'''
        print(f'... inserting data into [{self.__table_name}]')
        field_size = len(self.__structure)
        values = ', '.join(['?' for x in range(field_size)])
        query = f'insert into {self.__table_name} values ({values})'
        cur = self.__target_db.cursor()
        count = 0
        for record in self.__table:
            try:
                _record = [self.__record_processing(x) for x in record]
                cur.execute(query, _record)
                count += 1
            except Exception as e:
                print(e)
                print(_record)
        self.__target_db.commit()
        self.__written_count = count

    def __record_processing(self, record):
        '''clean up the record a bit'''
        #access and sqk server doesn't like dates < 1753-01-01
        #https://msdn.microsoft.com/en-us/library/system.data.sqltypes.sqldatetime(v=vs.110).aspx
        #tables with many datetime fields seem to be much slower
        if self.__dbms_name in ['access', 'sqlserver'] and \
            isinstance(record, (datetime.date, datetime.datetime)) and \
            record < datetime.date(1753, 1, 1):
            return None
        #get rid of unneccessary white space
        #this doesn't seem to affect speed much
        elif isinstance(record, str):
            return record.strip()
        else:
            return record
Ejemplo n.º 11
0
 def closeEtalonDfb(self, table: dbf.Table):
     table.close()
     return None
Ejemplo n.º 12
0
 def writeCortege(self, cortege: str, table: dbf.Table):
     table.append(cortege)
     return None
Ejemplo n.º 13
0
 def ingest(self, file_path):
     table = Table(file_path).open()
     self.result.flag(self.result.FLAG_TABULAR)
     self.result.emit_rows(self.generate_rows(table))