def create_dbf(): """ Creates a dbf file. """ success = False try: if settings.DBF_DIR is None: logger.error('DBF_DIR setting is NOT available') return else: dbf_filepath = os.path.join(settings.DBF_DIR, 'configuratie_debietberekening.dbf') logger.info("Create en open dbf file='{}'.".format(dbf_filepath)) out = Dbf(dbf_filepath, new=True) logger.info("Add fields.") fields_to_dbf(out) logger.info("Store data.") store_data(out) logger.info("Close file.") out.close() success = True except Exception as ex: logger.error(','.join(map(str, ex.args))) return success
def import_structures(self, model_name, v_config=None): """Import structures from dbf. Omit not existing objects. Arguments: model_name -- name of model as string, 'Structure' """ status_tuple = (True, "") mapping = WBConfigurationDBFMapping.objects.filter( model_name=model_name) db = Dbf(self.structures_filepath) for rec in db: if v_config is not None and rec['GEBIED'] != v_config.area.ident: continue structure = self._get_structure(rec['GEBIED'], rec['ID']) if structure is None: continue for item in mapping: if item.wbfield_name.lower() in self.read_only_fields: self.logger.debug( "Omit readonly field, dbf_fieldname='%s'." % ( item.wbfield_name)) continue try: value = self._retrieve_importvalue(rec, item, structure) self.logger.debug( "Set value='%s' of dbffield='%s' into field='%s'." % ( value, item.dbffield_name, item.wbfield_name)) if value is not None: setattr(structure, item.wbfield_name, value) if self.logger.getEffectiveLevel() == 10: structure.lizard_history_summary = self.fews_meta_info structure.save() except Exception as ex: msg = "Error: '%s', bucket: '%s', item: '%s', value: '%s'." % ( ','.join(map(str, ex.args)), rec['ID'], item.wbfield_name, value) self.logger.error(msg) return (False, msg) if self.logger.getEffectiveLevel() > 10: structure.fews_meta_info = self.fews_meta_info structure.lizard_history_summary = self.fews_meta_info structure.save() db.close() return status_tuple
def import_areaconfigurations(self, model_name, v_config=None): """Import areaconfigurations from dbf. Omit not existing objects. Arguments: model_name -- name of model as string, 'AreaConfiguration' """ status_tuple = (True, "") mapping = WBConfigurationDBFMapping.objects.filter( model_name=model_name) db = Dbf(self.areas_filepath) self.logger.debug("Import areaconfiguration %s" % self.areas_filepath) for rec in db: if v_config is not None and rec['GAFIDENT'] != v_config.area.ident: continue areaconfiguration = self._get_areaconfiguration(rec['GAFIDENT']) if areaconfiguration is None: continue for item in mapping: if item.wbfield_name.lower() in self.read_only_fields: self.logger.debug( "Omit readonly field dbf_fieldname='%s'." % ( item.wbfield_name)) continue try: value = self._retrieve_importvalue( rec, item, areaconfiguration) self.logger.debug( "Set value='%s' of dbffield='%s' in field='%s'." % ( value, item.dbffield_name, item.wbfield_name)) if value is not None: setattr(areaconfiguration, item.wbfield_name, value) except Exception as ex: msg = "Error: '%s', ident: '%s', item: '%s', value: '%s'." % ( ','.join(map(str, ex.args)), rec['GAFIDENT'], item.wbfield_name, value) self.logger.error(msg) return (False, msg) areaconfiguration.fews_meta_info = self.fews_meta_info areaconfiguration.lizard_history_summary = self.fews_meta_info areaconfiguration.save() db.close() return status_tuple
def create_dbf(): """ Creates a dbf file. """ success = False try: if settings.DBF_DIR is None: logger.error('DBF_DIR setting is NOT available') return else: dbf_filepath = os.path.join(settings.DBF_DIR, 'configuratie.dbf') logger.info("Create en open dbf file='{}'.".format(dbf_filepath)) out = Dbf(dbf_filepath, new=True) logger.info("Add fields.") fields_to_dbf(out) logger.info("Store data.") store_data(out) logger.info("Close file.") out.close() success = True except Exception as ex: logger.error(','.join(map(str, ex.args))) return success
def load_dataset(self, name, ref): col_name = self.columns["name"][name] col_pop = self.columns["population"][name] col_x = self.columns["xy"][name].format("X") col_y = self.columns["xy"][name].format("Y") ref_key = self.datasets.get(name) if ref_key is None: dataset = {} else: dataset = defaultdict(dict) if name == "ZSJ": filename = os.path.join(self.directory, "ZSJD.DBF") else: filename = os.path.join(self.directory, "{}.DBF".format(name)) for row in Dbf(filename, True): row = row.asDict() place_ref = row["LAU1"] + row["ICOB"] if not place_ref.startswith(ref) or (name == "ZSJ" and row["DD"] != row["DIL"]): continue place = { "name": row[col_name].decode("cp852"), "population": row[col_pop], "xy": (row[col_x], row[col_y]) } if name == "ZSJ": place["COBE"] = row["KOD_CAST"] match = self.fix_name.search(place["name"]) if match is not None: place["name"] = match.group(1) place["note"] = match.group(2) if ref_key is None: dataset[place_ref] = place else: dataset[place_ref][row[ref_key]] = place if ref_key is None: log.info( _("Loaded {} nodes from UIR-ZSJ dataset {}.").format( len(dataset), name)) else: log.info( _("Loaded {} nodes from UIR-ZSJ dataset {}.").format( sum(len(row) for row in dataset.values()), name)) self.data[name] = dataset
class DBFExporter(object): """ Creates a dbf file. """ def __init__(self, logger=None): if logger is not None: self.logger = logger else: self.logger = logging.getLogger(__name__) def export_aanafvoergebieden(self, owner, save_to, filename): """Export areas into dbf.""" filepath = self.file_path(save_to, filename) if owner is not None: areas = Area.objects.filter(data_set=owner) else: areas = Area.objects.exclude(data_set=None) areas = areas.exclude(area_class=Area.AREA_CLASS_KRW_WATERLICHAAM) success = self.create_dbf('area', areas, filepath) self.logger.debug("Status export areas is '%s' for %s to %s" % ( success, owner, filepath)) def export_areaconfiguration(self, owner, save_to, filename): """Export areaconfigurations into dbf.""" filepath = self.file_path(save_to, filename) area_configurations = AreaConfiguration.objects.filter(data_set=owner) success = self.create_dbf('areaconfiguration', area_configurations, filepath) self.logger.debug("Status export areaconfig. is '%s' for %s to %s" % ( success, owner.name, filepath)) def export_bucketconfiguration(self, owner, save_to, filename): """Export buckets into dbf.""" filepath = self.file_path(save_to, filename) buckets = Bucket.objects.filter(data_set=owner, deleted=False) success = self.create_dbf('bucket', buckets, filepath) self.logger.debug("Status export buckets is '%s' for %s into %s" % ( success, owner.name, filepath)) def export_structureconfiguration(self, owner, save_to, filename): """Export structures into dbf.""" filepath = self.file_path(save_to, filename) structures = Structure.objects.filter(data_set=owner, deleted=False) success = self.create_dbf('structure', structures, filepath) self.logger.debug("Status export structure is '%s' for %s into %s" % ( success, owner.name, filepath)) def export_configuration_to_dbf(self, object_id): """ Exports water balance configuration of passed area into 3 dbf files (AreaConfiguration, Bucket, Structure). """ if object_id is None: return False area_configurations = AreaConfiguration.objects.filter(ident=object_id) if area_configurations.exists() == False: self.logger.debug('Water Balance configuration of area "%s" %s', object_id, 'is NOT exists.') return False else: area_configuration = area_configurations[0] self.logger.debug("Export area configuration.") filename = self.create_filename('areaconfiguration') is_created_1 = self.create_dbf('areaconfiguration', [area_configuration], filename) buckets = Bucket.objects.filter(area=area_configuration) self.logger.debug("Export bucket.") filename = self.create_filename('bucket') is_created_2 = self.create_dbf('bucket', buckets, filename) structures = Structure.objects.filter(area=area_configuration) self.logger.debug("Export structure.") filename = self.create_filename('structure') is_created_3 = self.create_dbf('structure', structures, filename) if is_created_1 and is_created_2 and is_created_3: return True else: return False def file_path(self, save_to, filename): success = True if not os.path.exists(save_to): self.logger.error("Path %s not exists" % save_to) success = False if filename is None or len(filename) < 1: self.logger.error("File name is not exists") success = False if success: filename = ".".join((filename, 'dbf')) filepath = os.path.abspath(os.path.join(save_to, filename)) else: filepath = self.create_filename('') return filepath def create_filename(self, modul_name): default_filename = 'not_configured.dbf' default_dbfdir = 'media/lizard_wbconfiguration/dbf' default_package = 'lizard_wbconfiguration' filenames = { 'areaconfiguration': 'area_configuration.dbf', 'structure': 'structures.dbf', 'bucket': 'buckets.dbf'} if pkg_resources.resource_isdir(default_package, default_dbfdir): dbf_dir_path = pkg_resources.resource_filename(default_package, default_dbfdir) filename = '%s/%s' % (dbf_dir_path, filenames.get( modul_name, 'not_configured.dbf')) self.logger.info("File to save %s.", filename) return filename else: self.logger.debug('Location to write .dbf files is not defined.') self.logger.debug( 'Used default file name "%s".' % default_filename) return default_filename def create_dbf(self, model_name, area_objects, filename): """ Creates a dbf file. """ success = False mapping = WBConfigurationDBFMapping.objects.filter( model_name__iexact=model_name).order_by('index') try: self.logger.info("Create en open dbf file='%s'." % filename) self.create_out(filename) self.logger.info("Add fields.") self.fields_to_dbf(mapping) self.logger.info("Store '%s' '%s'." % ( len(area_objects), model_name)) self.store_data(area_objects, mapping) self.logger.info("Close file.") self.close_out() success = True except Exception as ex: self.logger.error(','.join(map(str, ex.args))) return success def fields_to_dbf(self, mapping): """ Adds fields into dbf file. """ for item in mapping: field_options = [str(item.dbffield_name), str(item.dbffield_type)] if item.dbffield_length is not None: field_options.append(item.dbffield_length) if item.dbffield_decimals is not None: field_options.append(item.dbffield_decimals) self.add_field_out(field_options) def store_data(self, area_objects, mapping): """ Store data into dbf file. """ for area_object in area_objects: rec = self.new_record() for item in mapping: value = self.retrieve_value(area_object, item.wbfield_name.lower()) if value is not None: dbffield_name = item.dbffield_name.lower() if dbffield_name == 'x' and isinstance(value, Point): value = value.x if dbffield_name == 'y' and isinstance(value, Point): value = value.y rec[dbffield_name] = value self.store_record(rec) def retrieve_value(self, area_object, field_name): """Return the value Arguments: area_object -- the instance object of a model field_name -- field name """ if not hasattr(area_object, field_name): self.logger.debug("%s has not attribute %s" % ( area_object._meta.module_name, field_name)) return None value = getattr(area_object, field_name) if value is None: self.logger.debug("Value of %s.%s is None." % ( area_object._meta.module_name, field_name)) return None if isinstance(value, Area): if field_name.lower() == 'parent': value = value.ident else: value = value.id elif isinstance(value, AreaConfiguration): value = value.area.ident elif isinstance(value, BucketsType): value = value.code elif isinstance(value, StructureInOut): value = bool(value.index) elif isinstance(value, MultiPolygon): value = self.get_centrpoint(value) elif isinstance(value, Polygon): value = self.get_centrpoint(value) elif isinstance(value, DataSet): value = str(value.name) elif isinstance(value, unicode): value = value else: value = value return value def get_centrpoint(self, geometry): """Retrieve center point of geometry, transform the gometry to srid=28992.""" srid = 28992 if geometry.srid != srid: geometry_clone = geometry.transform(srid, clone=True) return geometry_clone.centroid def create_out(self, file_path): self.out = Dbf(file_path, new=True) def add_field_out(self, field_options): self.out.addField(tuple(field_options)) def close_out(self): self.out.close() def new_record(self): return self.out.newRecord() def store_record(self, rec): rec.store()
def create_out(self, file_path): self.out = Dbf(file_path, new=True)
def parse_swdb(file, options): """Parse swdb file. "file" can be a file, url, or string suitable for openAnything(). Also needs a source of the "codes" to annotate the choice names. """ one_contest_prefixes = ('PRS', 'SEN', 'PR_') dist_contest_prefixes = ('CNG', 'ASS') contest_prefixes = one_contest_prefixes + dist_contest_prefixes """ choices = {} totals = {} codes_name = "003.codes" codes = openanything.openAnything(codes_name) for l in codes: (code, choice, total) = l.rstrip().split('\t') if code.startswith(contest_prefixes): choices[code] = choice totals[code] = total elif code.endswith(('VOTE', 'REG', 'DIST')): # FIXME - deal with this later continue else: print "unrecognized code: %s in line %s" % (code, l) """ reader = Dbf(file) au = util.AuditUnit(options.election) #for r in reader: reader_iter = iter(reader) rec = 0 while True: try: r = reader[rec] except (IndexError, StopIteration): break except: import traceback traceback.print_exc(1) logging.error("Dbf error: %s\nrecord %d" % (r, rec)) rec = rec + 1 continue rec = rec + 1 #batch = r["SRPREC"] batch = r["SRPREC_KEY"] #batch = r["SVPREC"] #batch = r["SVPREC_KEY"] if batch.startswith('SOV') or batch.endswith('TOT'): continue # state-wide data marks absentee with trailing "A", # county data marks them with "_A" if batch.endswith('A'): type = "AB" if batch.endswith('_A'): batch = batch[0:-2] else: batch = batch[0:-1] else: type = "BA" addist = r['ADDIST'] cddist = r['CDDIST'] #sddist = r['SDDIST'] for code in reader.fieldNames: if code.endswith(('PREC', 'VOTE', 'REG', 'DIST', 'SVPREC_KEY')): continue code_full = code contest = code[:3] if code.startswith('ASS'): code_full = code[:3] + ("%02d" % addist) + code[-3:] contest = code_full[:5] elif code.startswith('CNG'): code_full = code[:3] + ("%02d" % cddist) + code[-3:] contest = code_full[:5] elif code.startswith('PR_'): contest = code[:-1] else: contest = code[:3] if options.contest != None and options.contest != contest: continue # until we fully figure out how to get the district numbers... # contest = contests[code] try: au = util.AuditUnit(options.election, contest, type, [batch]) au.update(code_full[len(contest):], str(r[code])) util.pushAuditUnit(au, min_ballots = options.min_ballots) except: print "Error looking up code %s (%s) for %s-%s" % (code, code_full, batch, type) continue
psyco.full() except ImportError: pass from dbfpy.dbf import Dbf from constants.extensions import CSV import sys import argv import parser input = argv.input(sys.argv) output = argv.output(sys.argv) page_size = argv.page_size(sys.argv) db = Dbf() db.openFile(input, readOnly=1) # TODO: Real error handling #try: record_count = db.recordCount() # If no record number is specified write everything if (page_size == 0): page_size = record_count pages = record_count / page_size for page in xrange(pages):
from dbfpy.dbf import Dbf import string, glob, os dirbase = "D:\Workspace\PiuraTumbes\_extract_CRU3_1_tmp" dbfList = sorted(glob.glob(dirbase + "\\*.dbf")) outtxtFile = dirbase + "\\__extract_CRU3_1_tmp.txt" if os.path.isfile(outtxtFile): outFile = open(outtxtFile, "a") else: outFile = open(outtxtFile, "w") outFile.write("Variable\tMonth\tTumbes\tPiura\n") for dbf in dbfList: dbfFile = Dbf(dbf, True) outFile.write( os.path.basename(dbf)[4:-4].split("_")[0] + "\t" + os.path.basename(dbf)[4:-4].split("_")[1] + "\n") for rec in dbfFile: outFile.write("a" + str(rec[3]) + "\n") ## for fldName in dbfFile.fieldNames: ## outFile.write(fldName + "\t" + str(rec[fldName]) + "\n") outFile.close() dbfFile.close()
from dbfpy.dbf import Dbf # #przystanki = Dbf("C:\dane\PRZYSTANKI_AUTOBUSOWE.dbf") #linie = Dbf("C:\dane\LINIE_PRZEWOZNIK.dbf") # #from shapefile import Reader as shpr # #przystanki =shpr("C:\dane\PRZYSTANKI_AUTOBUSOWE.shp") #Przystanki= przystanki.shapeRecords() # # #for przystanek in Przystanki: # print przystanek td = Dbf( "D:\\Dropbox\\i2\\Prace\\___Nie Visumowe\\2012, Malopolska\\Dane Wejsciowe\\PBS_styczen\\a.dbf" )
from dbfpy.dbf import Dbf import string, glob, os dirbase = "D:\Workspace\PiuraTumbes\_extract_CRU3_1_tmp" dbfList = sorted(glob.glob(dirbase + "\\*.dbf")) outtxtFile = dirbase + "\\__extract_CRU3_1_tmp.txt" if os.path.isfile(outtxtFile): outFile = open(outtxtFile, "a") else: outFile = open(outtxtFile, "w") outFile.write("Variable\tMonth\tTumbes\tPiura\n") for dbf in dbfList: dbfFile = Dbf(dbf, True) outFile.write(os.path.basename(dbf)[4:-4].split("_")[0] + "\t" + os.path.basename(dbf)[4:-4].split("_")[1] +"\n") for rec in dbfFile: outFile.write("a" + str(rec[3]) + "\n") ## for fldName in dbfFile.fieldNames: ## outFile.write(fldName + "\t" + str(rec[fldName]) + "\n") outFile.close() dbfFile.close()
class DBFExporter(object): """ Creates a dbf file. """ def __init__(self, logger=None): if logger is not None: self.logger = logger else: self.logger = logging.getLogger(__name__) def export_esf_configurations(self, owner, save_to, dbf_file, filename): """Export esf configurations into dbf. Arguments: owner -- instance of lizard_security DataSet object save_to -- location to save dbf file as string, ex. '/tmp/' dbf_file -- instance of DbfFile object """ if owner is not None: areas = Area.objects.filter(data_set=owner) else: areas = Area.objects.exclude(data_set=None) areas = areas.exclude(area_class=Area.AREA_CLASS_KRW_WATERLICHAAM) configurations = Configuration.objects.filter( dbf_file=dbf_file).order_by('dbf_index') if configurations.exists() == False: self.logger.warning('NO configurations for dbf_file %s' % dbf_file) return filepath = self.file_path(save_to, filename, 'dbf') if filepath is None: self.logger.error("File path='%s' does NOT exist." % save_to) return self.logger.debug("Creating dbf file: %s." % filepath) self.create_out(filepath) self.fields_to_dbf(configurations) counter = 0 for area in areas: areaconfigurations = AreaConfiguration.objects.filter( configuration__dbf_file=dbf_file, area=area) counter = counter + len(areaconfigurations) if areaconfigurations.exists(): self.store_data(area, areaconfigurations) self.logger.debug("Processed %d areaconfigurations." % counter) self.close_out() def field_to_dbf( self, f_name, f_type, f_length=None, f_decimals=None): """Add a field into passed dbf. Arguments: f_name -- field name as string where len(f_name)<= 10 f_type -- field type as string (C, D, N) f_length -- decimals as integer """ field_options = [f_name, f_type] if f_length is not None: field_options.append(f_length) if f_length is not None and f_decimals is not None: field_options.append(f_decimals) try: self.add_field_out(field_options) except Exception as ex: self.logger.error(','.join(map(str, ex.args))) def fields_to_dbf(self, mapping): """ Add fields into dbf file. Avoid fields with None or empty value. """ self.field_to_dbf(u'GAFIDENT', 'C', 24) self.field_to_dbf(u'GAFNAAM', 'C', 100) for item in mapping: if self.is_nonempty_value(item.dbf_valuefield_name): self.field_to_dbf(item.dbf_valuefield_name, item.dbf_valuefield_type, item.dbf_valuefield_length, item.dbf_valuefield_decimals) if self.is_nonempty_value(item.dbf_manualfield_name): self.field_to_dbf(item.dbf_manualfield_name, 'L') def store_data(self, area, areaconfigurations): """ Store data into dbf file. """ rec = self.new_record() rec['GAFIDENT'] = area.ident rec['GAFNAAM'] = area.name for item in areaconfigurations: dbf_manualfield_name = item.configuration.dbf_manualfield_name dbf_valuefield_name = item.configuration.dbf_valuefield_name manual_text_value = item.manual_text_value manual_value = item.manual_value value = item.manual if self.is_nonempty_value(dbf_manualfield_name) and \ self.is_nonempty_value(value): rec[dbf_manualfield_name] = value if self.is_nonempty_value(dbf_valuefield_name): if item.configuration.dbf_valuefield_type == 'C': if self.is_nonempty_value(manual_text_value): rec[dbf_valuefield_name] = manual_text_value else: if self.is_nonempty_value(manual_value): if item.configuration.dbf_valuefield_type == 'L': manual_value = manual_value > 0.0 rec[dbf_valuefield_name] = manual_value self.store_record(rec) def file_path(self, save_to, filename, extension): """ Create absolute filepath. Arguments: save_to -- pathname as string, example. '/tmp/share/' filename -- filename as string, example. 'aanafvoergebied' extention -- file extention as string, example. 'dbf' """ success = True if not os.path.exists(save_to): self.logger.error("Path %s not exists" % save_to) success = False if filename is None or len(filename) < 1: self.logger.error("File name is not exists") success = False if success: filename = ".".join((filename, extension)) filepath = os.path.abspath(os.path.join(save_to, filename)) else: filepath = None return filepath def is_nonempty_value(self, value): return ((value is not None) and (value != '')) def create_out(self, file_path): self.out = Dbf(file_path, new=True) def add_field_out(self, field_options): self.out.addField(tuple(field_options)) def close_out(self): self.out.close() def new_record(self): return self.out.newRecord() def store_record(self, rec): rec.store()
def __openFile(self): try: db = Dbf(self.__path) yield db finally: db.close()
do_terrobj = 1 do_tobjhnr = 1 do_huisnr = 1 postal_code = 0 if(len(args.filter-postcode) > 0): postal_code = int(args.filter-postcode) print 'Filtering on postalcode: ' + str(postal_code) # parse & index pkancode huisnr_dic = dict() pkancode_set = set() print 'Extracting pkancode' db = Dbf() db.openFile(pkancode_dbf, readOnly = 1) record_count = db.recordCount() for i in range(0, record_count): rec = db[i] if(i % (record_count / 50) is 0 and not i is 0): sys.stdout.write('.') sys.stdout.flush() huisnr_id = rec['HUISNRID'] pkancode = rec['PKANCODE'] if(pkancode == postal_code or postal_code is 0): huisnr_dic[huisnr_id] = dict()
do_terrobj = 1 do_tobjhnr = 1 do_huisnr = 1 postal_code = 0 if (len(sys.argv) > 2): postal_code = int(sys.argv[2]) print 'Filtering on postalcode: ' + str(postal_code) # parse & index pkancode huisnr_dic = dict() pkancode_set = set() print 'Extracting pkancode' db = Dbf() db.openFile(pkancode_dbf, readOnly=1) record_count = db.recordCount() for i in range(0, record_count): rec = db[i] if (i % (record_count / 50) is 0 and not i is 0): sys.stdout.write('.') sys.stdout.flush() huisnr_id = rec['HUISNRID'] pkancode = rec['PKANCODE'] if (pkancode == postal_code or postal_code is 0): huisnr_dic[huisnr_id] = dict()
from dbfpy.dbf import Dbf import os folder = "E:\\dropbox\\pk\\phd\\dane" for plik in os.listdir(folder): if plik.endswith(".DBF"): baza = Dbf(os.path.join(folder, plik)) i = 0 for row in baza: i += 1 print row if i == 10: wswsw
def copyDbf(dbf_file, file = 'tst.dbf', first=None, last=None, verbose=False): from dbfpy.dbfnew import dbf_new from dbfpy.dbf import Dbf, DbfRecord # Leemos la base de datos inicial db = Dbf() db.openFile(dbf_file) num = db.header.recordCount # Creamos la cabecera de la nueva dbfn = dbf_new() headers = {} for f in db.header.fieldDefs: headers[f.name] = f.typeCode() dbfn.add_field(f.name, f.typeCode(), f.length, f.decimalCount) dbfn.write(file) db.close() # Leemos los datos viejos rec = readDbf(dbf_file) # Insertamos todos los datos first = 0 if not first else int(first) last = num if not last else int(last) if first < 0 or first > num - 2: first = 0 if last > num - 1 or last < 0 or last < first: last = num - 1 dbft = Dbf() dbft.openFile(file, readOnly=0) # Mostramos header if verbose: dbft.reportOn() # Limitamos las inserciones a los parametros for j in rec[first:last]: new_rec = DbfRecord(dbft) for k,v in j.items(): if headers[k] == 'N': v = float(v) new_rec[k] = v new_rec.store() dbft.close() # Numero de filas copiadas if verbose: print "%d raws copied" % last - first return True
psyco.full() except ImportError: pass from dbfpy.dbf import Dbf from constants.extensions import CSV import sys import argv import parser input = argv.input(sys.argv) output = argv.output(sys.argv) page_size = argv.page_size(sys.argv) db = Dbf() db.openFile(input, readOnly=1) # TODO: Real error handling # try: record_count = db.recordCount() # If no record number is specified write everything if page_size == 0: page_size = record_count pages = record_count / page_size for page in xrange(pages):