def dump_data(): dir_name = 'nextgisbio/initial_data/csv/' + time.strftime("%Y_%m_%d_%H_%M_%S") if not os.path.exists(dir_name): os.makedirs(dir_name) else: raise Exception('Directory for exported csv files already exists') get_path_name = lambda name: os.path.join(dir_name, name) with transaction.manager: Annotation.export_to_file(get_path_name('annotation.csv')) Cards.export_to_file(get_path_name('cards.csv')) Person.export_to_file(get_path_name('person.csv')) Taxa_scheme.export_to_file(get_path_name('taxa_scheme.csv')) Museum.export_to_file(get_path_name('museum.csv')) Coord_type.export_to_file(get_path_name('coord_type.csv')) Anthr_press.export_to_file(get_path_name('anthr_press.csv')) Vitality.export_to_file(get_path_name('vitality.csv')) Abundance.export_to_file(get_path_name('abundance.csv')) Footprint.export_to_file(get_path_name('footprint.csv')) Pheno.export_to_file(get_path_name('pheno.csv')) Inforesources.export_to_file(get_path_name('inforesources.csv')) Legend.export_to_file(get_path_name('legend.csv')) Area_type.export_to_file(get_path_name('area_type.csv')) Key_area.export_to_file(get_path_name('key_area.csv')) RedBook.export_to_file(get_path_name('redbooks.csv')) User.export_to_file(get_path_name('user.csv')) Squares.export_to_file(get_path_name('square_karea_association.csv')) Taxon.export_to_file(get_path_name('taxon.csv')) Synonym.export_to_file(get_path_name('synonym.csv')) Images.export_to_file(get_path_name('images.csv')) CardsImages.export_to_file(get_path_name('cards_images.csv')) return dir_name
def new_card(request): with transaction.manager: dbsession = DBSession() card = Cards() _update_card_attributes(card, dict(request.POST)) card.added_date = datetime.now() card.edited_date = card.added_date dbsession.add(card) return {}
def cards_download(request): format = request.matchdict['format'] if not format in ['csv', 'shp']: return Response() try: # taxon_list -- список, аналогичный querystring в points_text # (taxon_id1,taxon_id2) taxons = request.params['taxon_list'] if taxons != '': taxons = urllib.unquote(taxons) taxons = taxons.split(',') taxons = [t.split('_') for t in taxons] taxon_list = [id for (t, id) in taxons] if any('root' in s for s in taxon_list): taxon_list = None else: taxon_list = None except KeyError: taxon_list = None cards = Cards.as_join_list(taxon_list) if format == 'csv': fname = tempfile.mktemp() try: file = open(fname, 'w') writer = csv.writer(file, delimiter='\t') # Сохраним в файл for card in cards: x = [try_encode(v) for v in card] writer.writerow(x) file.close() # Наверное, стоит архивировать перед передачей. Тут без архивации. file = open(fname, 'r') data = file.read() resname = 'cards.csv' finally: # в любом случае удаляем файл os.remove(fname) elif format == 'shp': workdir = tempfile.mkdtemp() try: driver = ogr.GetDriverByName('ESRI Shapefile') sr = osr.SpatialReference() sr.ImportFromProj4("+init=epsg:4326") ds = driver.CreateDataSource(workdir) lyr = ds.CreateLayer('point_out', sr, ogr.wkbPoint) # Создадим поля в dbf, при этом # Обрежем имена полей на 10-ти символах для сохранения в dbf fieldnames = [name[:10] for name in cards[0]] fieldsize = 254 for name in fieldnames: field_defn = ogr.FieldDefn(name, ogr.OFTString) field_defn.SetWidth(fieldsize) if lyr.CreateField(field_defn) != 0: print "Creating Name field failed.\n" # Заполним данными lon_idx, lat_idx = 36, 37 # номера полей lat,lon в cards for row in cards[1:]: # пропустили загловки row = [try_encode(v, 'cp1251') for v in row] x = row[lon_idx] y = row[lat_idx] if x and y: x = float(row[lon_idx]) y = float(row[lat_idx]) feat = ogr.Feature(lyr.GetLayerDefn()) for i, name in enumerate(fieldnames): if row[i]: feat.SetField(name, row[i]) pt = ogr.Geometry(ogr.wkbPoint) pt.SetPoint_2D(0, x, y) feat.SetGeometry(pt) if lyr.CreateFeature(feat) != 0: print "Failed to create feature in shapefile.\n" feat.Destroy() ds = None zipfd = tempfile.NamedTemporaryFile(delete=False, suffix='.zip', prefix='') zipa = zipfile.ZipFile(zipfd, 'w') for dirname, dirnames, filenames in os.walk(workdir): for filename in filenames: zipa.write(os.path.join(dirname, filename), os.path.join(dirname, filename).replace(workdir + os.sep, ''), zipfile.ZIP_DEFLATED) zipa.close() file = open(zipa.filename, 'r') data = file.read() resname = 'cards.zip' finally: # в любом случае подчищаем папку с собранными данными shutil.rmtree(workdir) return Response(content_type="application/octet-stream", content_disposition="attachment; filename=%s" % (resname,), body=data)
def main(argv=sys.argv): if len(argv) != 2 and len(argv) != 3: usage(argv) config_uri = argv[1] setup_logging(config_uri) settings = get_appsettings(config_uri) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) md5_pass = False if len(argv) == 3 and argv[2] == '--md5-pass': md5_pass = True Base.metadata.drop_all(engine) Base.metadata.create_all(engine) # Заполним таблицы данными: # Таксоны taxons_file = 'nextgisbio/initial_data/csv/taxon.csv' Taxon.add_from_file(taxons_file) synonym_file = 'nextgisbio/initial_data/csv/synonym.csv' Synonym.add_from_file(synonym_file) # Справочники person_file = 'nextgisbio/initial_data/csv/person.csv' Person.add_from_file(person_file) taxa_file = 'nextgisbio/initial_data/csv/taxa_scheme.csv' Taxa_scheme.add_from_file(taxa_file) museum_file = 'nextgisbio/initial_data/csv/museum.csv' Museum.add_from_file(museum_file) coord_type_file = 'nextgisbio/initial_data/csv/coord_type.csv' Coord_type.add_from_file(coord_type_file) ant_file = 'nextgisbio/initial_data/csv/anthr_press.csv' Anthr_press.add_from_file(ant_file) vital_file = 'nextgisbio/initial_data/csv/vitality.csv' Vitality.add_from_file(vital_file) abundance_file = 'nextgisbio/initial_data/csv/abundance.csv' Abundance.add_from_file(abundance_file) footprint_file = 'nextgisbio/initial_data/csv/footprint.csv' Footprint.add_from_file(footprint_file) pheno_file = 'nextgisbio/initial_data/csv/pheno.csv' Pheno.add_from_file(pheno_file) infores_file = 'nextgisbio/initial_data/csv/inforesources.csv' Inforesources.add_from_file(infores_file) area_type_file = 'nextgisbio/initial_data/csv/area_type.csv' Area_type.add_from_file(area_type_file) legend_file = 'nextgisbio/initial_data/csv/legend.csv' Legend.add_from_file(legend_file) key_area_file = 'nextgisbio/initial_data/csv/key_area.csv' Key_area.add_from_file(key_area_file) # Нужно добавить шейпы и заполнить данными таблицу # связей (square_keyarea_association) многие-ко-многим между Squares и Key_area shp_file = 'nextgisbio/initial_data/shp/key_areas_25km.shp' association_file = 'nextgisbio/initial_data/csv/square_karea_association.csv' Squares.add_from_file(association_file, shp_file) # Карточки и аннотации cards_file = 'nextgisbio/initial_data/csv/cards.csv' Cards.add_from_file(cards_file) ann_file = 'nextgisbio/initial_data/csv/annotation.csv' Annotation.add_from_file(ann_file) # Пользователи users_file = 'nextgisbio/initial_data/csv/user.csv' User.add_from_file(users_file, md5_pass) red_books_csv = 'nextgisbio/initial_data/csv/redbooks.csv' RedBook.import_from_csv(red_books_csv) images_csv = 'nextgisbio/initial_data/csv/images.csv' Images.import_from_csv(images_csv) cards_images_csv = 'nextgisbio/initial_data/csv/cards_images.csv' CardsImages.import_from_csv(cards_images_csv)