def sync(self): with self.conn.cursor() as cursor: stmt = SQL("CREATE TABLE IF NOT EXISTS {} ();") stmt = stmt.format(Identifier(self.table)) # print stmt.as_string(cursor) cursor.execute(stmt) stmt = SQL("SELECT column_name FROM " "information_schema.columns " "WHERE table_name = %s;") # noqa cursor.execute(stmt, (self.table,)) columns = [c[0] for c in cursor.fetchall()] columns = [c.decode(self.conn.encoding) for c in columns] for column, label in self.headers.items(): if column not in columns: stmt = SQL("ALTER TABLE {} ADD COLUMN {} TEXT;") stmt = stmt.format(Identifier(self.table), Identifier(column)) # print stmt.as_string(cursor) cursor.execute(stmt) stmt = SQL("COMMENT ON COLUMN {}.{} IS {};") stmt = stmt.format(Identifier(self.table), Identifier(column), Literal(label)) cursor.execute(stmt) self.conn.commit()
def getSiteConfigKey(conn, key, site_id): value = '' with conn.cursor() as cursor: query = SQL(""" select value from config where key = {} and site_id = {} """) query = query.format(Literal(key), Literal(site_id)) print(query.as_string(conn)) cursor.execute(query) for row in cursor: value = row[0] conn.commit() # get the default value if not found if value == '': query = SQL( """ select value from config where key = {} and site_id is null """ ) query = query.format(Literal(key)) print(query.as_string(conn)) cursor.execute(query) for row in cursor: value = row[0] conn.commit() return value
def _rename_if_exists(self, name, suffix=""): """ Rename an index or constraint if it exists, appending ``_depN`` if so. INPUT: - ``name`` -- a string, the name of an index or constraint - ``suffix`` -- a suffix to append to the name """ if self._relation_exists(name + suffix): # First we determine its type kind = None tablename = self._constraint_exists(name + suffix) if tablename: kind = "Constraint" begin_renamer = SQL( "ALTER TABLE {0} RENAME CONSTRAINT").format( Identifier(tablename)) end_renamer = SQL("{0} TO {1}") begin_command = SQL("ALTER TABLE {0}").format( Identifier(tablename)) end_command = SQL("DROP CONSTRAINT {0}") elif self._index_exists(name + suffix): kind = "Index" begin_renamer = SQL("") end_renamer = SQL("ALTER INDEX {0} RENAME TO {1}") begin_command = SQL("") end_command = SQL("DROP INDEX {0}") else: raise ValueError( "Relation with name " + name + suffix + " already exists. And it is not an index or a constraint") # Find a new name for the existing index depsuffix = "_dep0" + suffix i = 0 deprecated_name = name[:64 - len(depsuffix)] + depsuffix while self._relation_exists(deprecated_name): i += 1 depsuffix = "_dep" + str(i) + suffix deprecated_name = name[:64 - len(depsuffix)] + depsuffix self._execute(begin_renamer + end_renamer.format( Identifier(name + suffix), Identifier(deprecated_name))) command = begin_command + end_command.format( Identifier(deprecated_name)) logging.warning( "{} with name {} ".format(kind, name + suffix) + "already exists. " + "It has been renamed to {} ".format(deprecated_name) + "and it can be deleted with the following SQL command:\n" + self._db.cursor().mogrify(command))
def move_rows( base_model: Type[Model], raw_query: SQL, *, src_db_table: Optional[str] = None, returning_id: bool = False, **kwargs: Composable, ) -> List[int]: """Core helper for bulk moving rows between a table and its archive table""" if src_db_table is None: # Use base_model's db_table unless otherwise specified. src_db_table = base_model._meta.db_table fields = [ field for field in base_model._meta.fields if field not in EXCLUDE_FIELDS ] src_fields = [Identifier(src_db_table, field.column) for field in fields] dst_fields = [Identifier(field.column) for field in fields] sql_args = { "src_fields": SQL(",").join(src_fields), "dst_fields": SQL(",").join(dst_fields), } sql_args.update(kwargs) with connection.cursor() as cursor: cursor.execute(raw_query.format(**sql_args), ) if returning_id: return [id for (id, ) in cursor.fetchall()] # return list of row ids else: return []
def apply_anonymized_data(connection, temp_table, source_table, primary_key, definitions): logging.info('Applying changes on table {}'.format(source_table)) cursor = connection.cursor() create_index_sql = SQL('CREATE INDEX ON {temp_table} ({primary_key})') sql = create_index_sql.format(temp_table=Identifier(temp_table), primary_key=Identifier(primary_key)) cursor.execute(sql.as_string(connection)) column_names = get_column_names(definitions) columns_identifiers = [ SQL('{column} = s.{column}').format(column=Identifier(column)) for column in column_names ] set_columns = SQL(', ').join(columns_identifiers) sql_args = { "table": Identifier(source_table), "columns": set_columns, "source": Identifier(temp_table), "primary_key": Identifier(primary_key) } sql = SQL('UPDATE {table} t ' 'SET {columns} ' 'FROM {source} s ' 'WHERE t.{primary_key} = s.{primary_key}').format(**sql_args) cursor.execute(sql.as_string(connection)) cursor.close()
def drop(self): with self.conn.cursor() as cursor: stmt = SQL('DROP TABLE IF EXISTS {};') stmt = stmt.format(Identifier(self.table)) # print stmt.as_string(cursor) cursor.execute(stmt) self.conn.commit()
def exportFilterIdsFile(config, conn, pg_path, site_id, out_file): lpis_table = "decl_{}_{}".format(config.site_short_name, config.year) srid = get_srid(conn, lpis_table) with conn.cursor() as cursor: query = SQL(""" select lpis."NewID" as "SEQ_ID" from l4c_practices ap inner join {} lpis on lpis.ori_id = ap.orig_id where ap.site_id = {} and ap.year = {} and not lpis.is_deleted order by lpis."NewID" """) query = query.format(Identifier(lpis_table), Literal(site_id), Literal(config.year)) query_str = query.as_string(conn) print(query_str) name = os.path.basename(out_file) table_name = os.path.splitext(name)[0].lower() command = get_export_table_command(config.ogr2ogr_path, out_file, pg_path, "-nln", table_name, "-sql", query_str, "-a_srs", "EPSG:" + str(srid), "-gt", 100000) run_command(command)
def save(self, knowl, who): """who is the ID of the user, who wants to save the knowl""" most_recent = self.get_knowl(knowl.id, ['id'] + self._default_fields, allow_deleted=False) new_knowl = most_recent is None if new_knowl: authors = [] else: authors = most_recent.pop('authors', []) if who and who not in authors: authors = authors + [who] search_keywords = make_keywords(knowl.content, knowl.id, knowl.title) cat = extract_cat(knowl.id) # When renaming, source is set explicitly on the knowl if knowl.type == 0 and knowl.source is not None: typ, source, name = 0, knowl.source, knowl.source_name else: typ, source, name = extract_typ(knowl.id) links = extract_links(knowl.content) defines = extract_defines(knowl.content) # id, authors, cat, content, last_author, timestamp, title, status, type, links, defines, source, source_name values = (knowl.id, authors, cat, knowl.content, who, knowl.timestamp, knowl.title, knowl.status, typ, links, defines, source, name, search_keywords) with DelayCommit(self): inserter = SQL("INSERT INTO kwl_knowls (id, {0}, _keywords) VALUES ({1})") inserter = inserter.format(SQL(', ').join(map(Identifier, self._default_fields)), SQL(", ").join(Placeholder() * (len(self._default_fields) + 2))) self._execute(inserter, values) self.cached_titles[knowl.id] = knowl.title
def save(self, knowl, who): """who is the ID of the user, who wants to save the knowl""" new_history_item = self.get_knowl(knowl.id, ['id'] + self._default_fields + ['history']) new_knowl = new_history_item is None if new_knowl: history = [] authors = [] else: history = new_history_item.pop('history') if history is not None: history += [new_history_item] else: history = [] authors = new_history_item.pop('authors', []) if authors is None: authors = [] if who and who not in authors: authors = authors + [who] search_keywords = make_keywords(knowl.content, knowl.id, knowl.title) cat = extract_cat(knowl.id) values = (authors, cat, knowl.content, who, knowl.quality, knowl.timestamp, knowl.title, history, search_keywords) with DelayCommit(self): insterer = SQL("INSERT INTO kwl_knowls (id, {0}, history, _keywords) VALUES (%s, {1}) ON CONFLICT (id) DO UPDATE SET ({0}, history, _keywords) = ({1})") insterer = insterer.format(SQL(', ').join(map(Identifier, self._default_fields)), SQL(", ").join(Placeholder() * (len(self._default_fields) + 2))) self._execute(insterer, (knowl.id,) + values + values) self.save_history(knowl, who)
def save(self, knowl, who): """who is the ID of the user, who wants to save the knowl""" new_history_item = self.get_knowl(knowl.id, ['id'] + self._default_fields + ['history']) new_knowl = new_history_item is None if new_knowl: history = [] authors = [] else: history = new_history_item.pop('history') if history is not None: history += [new_history_item] else: history = [] authors = new_history_item.pop('authors', []) if authors is None: authors = [] if who and who not in authors: authors = authors + [who] search_keywords = make_keywords(knowl.content, knowl.id, knowl.title) cat = extract_cat(knowl.id) values = (authors, cat, knowl.content, who, knowl.quality, knowl.timestamp, knowl.title, history, search_keywords) with DelayCommit(self): insterer = SQL( "INSERT INTO kwl_knowls (id, {0}, history, _keywords) VALUES (%s, {1}) ON CONFLICT (id) DO UPDATE SET ({0}, history, _keywords) = ({1})" ) insterer = insterer.format( SQL(', ').join(map(Identifier, self._default_fields)), SQL(", ").join(Placeholder() * (len(self._default_fields) + 2))) self._execute(insterer, (knowl.id, ) + values + values) self.save_history(knowl, who)
def get_tile_footprints(conn, site_id): with conn.cursor() as cursor: query = SQL( """ select shape_tiles_s2.tile_id, shape_tiles_s2.epsg_code, ST_AsBinary(shape_tiles_s2.geog) as geog from shape_tiles_s2 where shape_tiles_s2.tile_id in ( select tile_id from sp_get_site_tiles({} :: smallint, 1 :: smallint) ); """ ) site_id_filter = Literal(site_id) query = query.format(site_id_filter) print(query.as_string(conn)) cursor.execute(query) results = cursor.fetchall() conn.commit() tiles = {} for (tile_id, epsg_code, geog) in results: geog = bytes(geog) geog = ogr.CreateGeometryFromWkb(geog) tiles[tile_id] = (geog, epsg_code) return tiles
def save(self, knowl, who, most_recent=None, minor=False): """who is the ID of the user, who wants to save the knowl""" if most_recent is None: most_recent = self.get_knowl(knowl.id, ['id'] + self._default_fields, allow_deleted=False) new_knowl = most_recent is None if new_knowl: authors = [] else: authors = most_recent.pop('authors', []) if not minor and who and who not in authors: authors = authors + [who] search_keywords = make_keywords(knowl.content, knowl.id, knowl.title) cat = extract_cat(knowl.id) # When renaming, source is set explicitly on the knowl if knowl.type == 0 and knowl.source is not None: typ, source, name = 0, knowl.source, knowl.source_name else: typ, source, name = extract_typ(knowl.id) links = extract_links(knowl.content) defines = extract_defines(knowl.content) # id, authors, cat, content, last_author, timestamp, title, status, type, links, defines, source, source_name values = (knowl.id, authors, cat, knowl.content, who, knowl.timestamp, knowl.title, knowl.status, typ, links, defines, source, name, search_keywords) with DelayCommit(self): inserter = SQL("INSERT INTO kwl_knowls (id, {0}, _keywords) VALUES ({1})") inserter = inserter.format(SQL(', ').join(map(Identifier, self._default_fields)), SQL(", ").join(Placeholder() * (len(self._default_fields) + 2))) self._execute(inserter, values) self.cached_titles[knowl.id] = knowl.title
def get_updates(config, conn): update_list = [] with conn.cursor() as cursor: query = SQL(""" select id, satellite_id, full_path from downloader_history where tiles is null and status_id in (2, 5, 6, 7) {} """) if config.site_id is not None: query = query.format(SQL("and site_id = {}").format(Placeholder())) cursor.execute(query, (config.site_id, )) else: query = query.format(SQL("")) cursor.execute(query) products = cursor.fetchall() conn.commit() for (id, satellite_id, full_path) in products: try: if satellite_id == 1: tiles = get_sentinel_tiles(full_path) elif satellite_id == 2: tiles = get_landsat_tiles(full_path) else: print("Unknown satellite id {} for product {}".format( satellite_id, full_path)) continue if len(tiles) > 0: update_list.append((id, tiles)) elif len(tiles) == 0: print("No tiles found for product {}".format(full_path)) update_list.append((id, [])) else: print("Unable to recognize product {}".format(full_path)) except Exception: print("While checking product {}".format(full_path)) traceback.print_exc() return update_list
def get_l2a_products_from_db(config, conn): with conn.cursor() as cursor: query = SQL(""" with products as ( select product.site_id, product.name, product.created_timestamp as date, product.processor_id, product.product_type_id, product.full_path, product.tiles from product where product.site_id = {} and product.product_type_id = 1 ) select products.date, products.tiles, products.name, products.full_path from products where date between {} and {} order by date; """) site_id_filter = Literal(config.site_id) start_date_filter = Literal(config.start_date) end_date_filter = Literal(config.end_date) query = query.format(site_id_filter, start_date_filter, end_date_filter) # print(query.as_string(conn)) cursor.execute(query) results = cursor.fetchall() conn.commit() products = [] log_time = datetime.datetime.now().strftime("%Y-%m-%d %H-%M-%S") for (dt, tiles, prdName, full_path) in results: #print("Full path: {}".format(full_path)) try: files = os.listdir(full_path) except: print( "Product {} found in DB but not on disk".format(full_path)) continue l2aTilePaths = fnmatch.filter(files, "S2*_OPER_SSC_L2VALD_*.HDR") if len(l2aTilePaths) == 0: l2aTilePaths = fnmatch.filter(files, "L8_TEST_L8C_L2VALD_*.HDR") for file in l2aTilePaths: relHdrFilePath = os.path.join(prdName, file) fullHdrFilePath = os.path.join(full_path, file) #print("Full HDR file path: {}".format(fullHdrFilePath)) #config.logging.info('[%s] L2A HDR PATH from PRODUCT:[ %s ]',log_time,fullHdrFilePath) products.append(L2AProduct(relHdrFilePath, fullHdrFilePath)) return products
def get_radar_products(config, conn, site_id): with conn.cursor() as cursor: query = SQL(""" select * from ( select greatest(substr(split_part(product.name, '_', 4), 2), split_part(product.name, '_', 5)) :: date as date, site_tiles.tile_id, product.orbit_type_id, split_part(product.name, '_', 6) as polarization, product.product_type_id, product.name, product.full_path from sp_get_site_tiles({} :: smallint, 1 :: smallint) as site_tiles inner join shape_tiles_s2 on shape_tiles_s2.tile_id = site_tiles.tile_id inner join product on ST_Intersects(product.geog, shape_tiles_s2.geog) where product.satellite_id = 3 and product.site_id = {} ) products where date between {} and {} order by date; """) site_id_filter = Literal(site_id) start_date_filter = Literal(config.season_start) end_date_filter = Literal(config.season_end) query = query.format(site_id_filter, site_id_filter, start_date_filter, end_date_filter) print(query.as_string(conn)) cursor.execute(query) results = cursor.fetchall() conn.commit() products = [] for ( dt, tile_id, orbit_type_id, polarization, radar_product_type, name, full_path, ) in results: if config.products is None or name in config.products: products.append( RadarProduct( dt, tile_id, orbit_type_id, polarization, radar_product_type, full_path, )) return products
def get_radar_products(config, conn, site_id): with conn.cursor() as cursor: query = SQL(""" with products as ( select product.site_id, site_tiles.tile_id, product.name, case when substr(product.name, 17, 8) > substr(product.name, 33, 8) then substr(product.name, 17, 8) else substr(product.name, 33, 8) end :: date as date, coalesce(product.orbit_type_id, 1) as orbit_type_id, substr(product.name, 49, 2) as polarization, product.processor_id, product.product_type_id, substr(product.name, length(product.name) - strpos(reverse(product.name), '_') + 2) as radar_product_type, product.orbit_id, product.full_path from sp_get_site_tiles({} :: smallint, 1 :: smallint) as site_tiles inner join shape_tiles_s2 on shape_tiles_s2.tile_id = site_tiles.tile_id inner join product on ST_Intersects(product.geog, shape_tiles_s2.geog) where product.satellite_id = 3 and product.site_id = {} ) select products.date, products.tile_id, products.orbit_type_id, products.polarization, products.radar_product_type, products.full_path from products where date between {} and {} order by date; """) site_id_filter = Literal(site_id) start_date_filter = Literal(config.season_start) end_date_filter = Literal(config.season_end) query = query.format(site_id_filter, site_id_filter, start_date_filter, end_date_filter) # print(query.as_string(conn)) cursor.execute(query) results = cursor.fetchall() conn.commit() products = [] for (dt, tile_id, orbit_type_id, polarization, radar_product_type, full_path) in results: if config.file_type == radar_product_type: if config.polarisation == "" or config.polarisation == polarization: products.append( RadarProduct(dt, tile_id, orbit_type_id, polarization, radar_product_type, full_path)) return products
def load(self): with self.conn.cursor() as cursor: headers = self.headers.keys() stmt = SQL("COPY {} ({}) FROM STDIN WITH CSV HEADER NULL AS ''") columns = Composed([Identifier(c) for c in headers]) columns = columns.join(', ') stmt = stmt.format(Identifier(self.table), columns) # print stmt.as_string(cursor) cursor.copy_expert(stmt, self.proxy) self.conn.commit()
def _grant(self, action, table_name, users): """ Utility function for granting permissions on tables. """ action = action.upper() if action not in ["SELECT", "INSERT", "UPDATE", "DELETE"]: raise ValueError("%s is not a valid action" % action) grantor = SQL("GRANT %s ON TABLE {0} TO {1}" % action) for user in users: self._execute(grantor.format(Identifier(table_name), Identifier(user)), silent=True)
def get_srid(conn, lpis_table): with conn.cursor() as cursor: query = SQL("select Find_SRID('public', {}, 'wkb_geometry')") query = query.format(Literal(lpis_table)) print(query.as_string(conn)) cursor.execute(query) srid = cursor.fetchone()[0] conn.commit() return srid
def process_db_ndvi_products(config, conn, site_id): with conn.cursor() as cursor: query = SQL(""" with products as ( select product.site_id, product.name, product.created_timestamp as date, product.processor_id, product.product_type_id, product.full_path, product.tiles from product where product.site_id = {} and product.product_type_id = 3 ) select products.date, products.tiles, products.full_path from products where date between {} and {} order by date; """) site_id_filter = Literal(site_id) start_date_filter = Literal(config.season_start) end_date_filter = Literal(config.season_end) query = query.format(site_id_filter, start_date_filter, end_date_filter) #print(query.as_string(conn)) # execute the query cursor.execute(query) results = cursor.fetchall() conn.commit() print("Extracted a number of {} products to check".format( len(results))) for (dt, tiles, full_path) in results: tilesPath = os.path.join(full_path, "TILES") try: tilesDirs = os.listdir(tilesPath) except: print( "Product {} found in DB but not on disk".format(full_path)) continue for tile in tiles: tilePaths = fnmatch.filter(tilesDirs, "S2AGRI_L3B_A*_T{}".format(tile)) if len(tilePaths) == 1: subPath = tilePaths[0] m = re.match( "S2AGRI_L3B_A(\d{{8}}T\d{{6}})_T{}".format(tile), subPath) checkTileDir(config, tilesPath, subPath, tile, m.group(1))
def get_product_id_by_name(conn, prd_name): product_id = -1 with conn.cursor() as cursor: query = SQL(""" select id from product where name = {} """) query = query.format(Literal(prd_name)) print(query.as_string(conn)) cursor.execute(query) for row in cursor: product_id = row[0] conn.commit() return product_id
def load(self, fh, delimiter): with self.conn.cursor() as cursor: headers = list(self.headers.keys()) stmt = SQL("COPY {} ({}) FROM STDIN " "WITH CSV HEADER DELIMITER AS {} NULL AS ''") columns = Composed([Identifier(c) for c in headers]) columns = columns.join(', ') stmt = stmt.format(Identifier(self.table), columns, Literal(delimiter)) print(stmt.as_string(cursor)) cursor.copy_expert(stmt, fh) self.conn.commit()
def get_product_path(conn, prd_id): product_path = '' with conn.cursor() as cursor: query = SQL(""" select full_path from product where id = {} """) query = query.format(Literal(prd_id)) print(query.as_string(conn)) cursor.execute(query) for row in cursor: product_path = row[0] conn.commit() return product_path
def _get_shared_cols(downstream, upstream, table: str): """ Given two database connections and a table name, return the list of columns that the two tables have in common. The upstream table has the "_view" suffix attached to it. :param downstream: an open connection to the downstream PostgreSQL database :param upstream: an open connection to the upstream PostgreSQL database :param table: the name of the downstream table :return: a list of the column names that are common to both databases """ with downstream.cursor() as cur1, upstream.cursor() as cur2: get_tables = SQL("SELECT * FROM {table} LIMIT 0;") cur1.execute(get_tables.format(table=Identifier(table))) conn1_cols = set([desc[0] for desc in cur1.description]) cur2.execute(get_tables.format(table=Identifier(f"{table}_view"))) conn2_cols = set([desc[0] for desc in cur2.description]) shared = list(conn1_cols.intersection(conn2_cols)) log.info(f"Shared columns: {shared}") return shared
def exportPracticesFile(config, conn, pg_path, practice, site_id, out_file): lpis_table = "decl_{}_{}".format(config.site_short_name, config.year) lut_table = "lut_{}_{}".format(config.site_short_name, config.year) srid = get_srid(conn, lpis_table) with conn.cursor() as cursor: query = SQL(""" select lpis."NewID" as "SEQ_ID", lpis.ori_id as "FIELD_ID", ap.country as "COUNTRY", ap.year as "YEAR", ap.main_crop as "MAIN_CROP", ap.veg_start as "VEG_START", ap.h_start as "H_START", ap.h_end as "H_END", ap.practice as "PRACTICE", ap.p_type as "P_TYPE", ap.p_start as "P_START", ap.p_end as "P_END", lpis."GeomValid", lpis."Duplic", lpis."Overlap", lpis."Area_meters" as "Area_meter", lpis."ShapeInd", lut.ctnum as "CTnum", lut.ct as "CT", lut.lc as "LC", lpis."S1Pix", lpis."S2Pix" from l4c_practices ap inner join {} lpis on lpis.ori_id = ap.orig_id natural join {} lut where ap.site_id = {} and ap.year = {} and ap.practice_short_name = {} and not lpis.is_deleted order by lpis."NewID" """) query = query.format(Identifier(lpis_table), Identifier(lut_table), Literal(site_id), Literal(config.year), Literal(practice)) query_str = query.as_string(conn) print(query_str) # TODO: change natural join with inner join lut_nld_2019_2019 lut using (ori_crop) or something like this name = os.path.basename(out_file) table_name = os.path.splitext(name)[0].lower() command = get_export_table_command(config.ogr2ogr_path, out_file, pg_path, "-nln", table_name, "-sql", query_str, "-a_srs", "EPSG:" + str(srid), "-gt", 100000) run_command(command)
def get_ndvi_products_from_db(config, conn, site_id): with conn.cursor() as cursor: query = SQL(""" with products as ( select product.site_id, product.name, product.created_timestamp as date, product.processor_id, product.product_type_id, product.full_path, product.tiles from product where product.site_id = {} and product.product_type_id = 3 ) select products.date, products.tiles, products.full_path from products where date between {} and {} order by date; """) site_id_filter = Literal(site_id) start_date_filter = Literal(config.season_start) end_date_filter = Literal(config.season_end) query = query.format(site_id_filter, start_date_filter, end_date_filter) # print(query.as_string(conn)) cursor.execute(query) results = cursor.fetchall() conn.commit() products = [] for (dt, tiles, full_path) in results: for tile in tiles: ndviTilePath = os.path.join(full_path, "TILES") acq_date = dt.strftime("%Y%m%dT%H%M%S") ndviTilePath = os.path.join( ndviTilePath, "S2AGRI_L3B_A{}_T{}".format(acq_date, tile)) ndviTilePath = os.path.join(ndviTilePath, "IMG_DATA") ndviTilePath = os.path.join( ndviTilePath, "S2AGRI_L3B_SNDVI_A{}_T{}.TIF".format(acq_date, tile)) products.append(NdviProduct(dt, tile, ndviTilePath)) #if not os.path.exists(ndviTilePath) : # print ("FILE DOES NOT EXISTS: ".format(ndviTilePath)) return products
def get_site_name(conn, site_id): with conn.cursor() as cursor: query = SQL(""" select short_name from site where id = {} """) site = Literal(site_id) query = query.format(site) print(query.as_string(conn)) cursor.execute(query) rows = cursor.fetchall() conn.commit() return rows[0][0]
def getLpisProduct(conn, site_id): lpis_prod = '' with conn.cursor() as cursor: query = SQL(""" select full_path from product where product_type_id in (select id from product_type where name = 'lpis') and site_id = {} """) query = query.format(Literal(site_id)) print(query.as_string(conn)) cursor.execute(query) for row in cursor: lpis_prod = row[0] conn.commit() return lpis_prod
def create_temporary_table(connection, definitions, source_table, temp_table, primary_key): primary_key = primary_key if primary_key else DEFAULT_PRIMARY_KEY column_names = get_column_names(definitions) sql_columns = SQL(', ').join([ Identifier(column_name) for column_name in [primary_key] + column_names ]) ctas_query = SQL("""CREATE TEMP TABLE {temp_table} AS SELECT {columns} FROM {source_table} WITH NO DATA""") cursor = connection.cursor() cursor.execute( ctas_query.format(temp_table=Identifier(temp_table), source_table=Identifier(source_table), columns=sql_columns).as_string(connection)) cursor.close()
def getSiteId(conn, siteShortName): site_id = -1 with conn.cursor() as cursor: query = SQL(""" select id from site where short_name = {} """) query = query.format(Literal(siteShortName)) print(query.as_string(conn)) cursor.execute(query) for row in cursor: site_id = row[0] conn.commit() return site_id
def get_product_info(conn, product_id): with conn.cursor() as cursor: query = SQL( """ select product_type_id, full_path from product where id = {} """ ) query = query.format(Literal(product_id)) print(query.as_string(conn)) cursor.execute(query) row = cursor.fetchone() conn.commit() return row
def get_aux_files_from_ndvi_products_from_db(config, conn): with conn.cursor() as cursor: query = SQL(""" with products as ( select product.site_id, product.name, product.created_timestamp as date, product.processor_id, product.product_type_id, product.full_path, product.tiles from product where product.site_id = {} and product.product_type_id = 3 ) select products.date, products.tiles, products.full_path from products where date between {} and {} order by date; """) site_id_filter = Literal(config.site_id) start_date_filter = Literal(config.start_date) end_date_filter = Literal(config.end_date) query = query.format(site_id_filter, start_date_filter, end_date_filter) # print(query.as_string(conn)) cursor.execute(query) results = cursor.fetchall() conn.commit() products = [] patternIppFile = re.compile("S2AGRI_L3B_IPP_A\d{8}T\d{6}\.xml$") for (dt, tiles, full_path) in results: ndviTilePath = os.path.join(full_path, "AUX_DATA") acq_date = dt.strftime("%Y%m%dT%H%M%S") for file in os.listdir(ndviTilePath): if patternIppFile.match(file): # ndviTilePath = os.path.join(ndviTilePath, "S2AGRI_L3B_IPP_A{}.xml".format(acq_date)) ndviTilePath = os.path.join(ndviTilePath, file) s2HdrFiles = getFilesFromIPPFile(config, ndviTilePath) products += s2HdrFiles return products