示例#1
0
    def upgrade(self, db_conn):
        """Overrides AbstractMigrator upgrade method."""
        logger = logging.getLogger('dirbs.db')
        with db_conn.cursor() as cursor:
            logger.info('Creating historic tables...')
            cursor.execute(
                sql.SQL("""CREATE TABLE historic_stolen_list (
                                          imei_norm text NOT NULL,
                                          reporting_date DATE DEFAULT NULL,
                                          start_date TIMESTAMP NOT NULL,
                                          end_date TIMESTAMP DEFAULT NULL
                                      );
                                      CREATE UNIQUE INDEX
                                                 ON historic_stolen_list
                                              USING btree (imei_norm)
                                              WHERE (end_date IS NULL);

                                      CREATE TABLE historic_pairing_list (
                                          imei_norm text NOT NULL,
                                          imsi text NOT NULL,
                                          start_date TIMESTAMP NOT NULL,
                                          end_date TIMESTAMP DEFAULT NULL
                                      );
                                      CREATE UNIQUE INDEX
                                                ON historic_pairing_list
                                             USING btree (imei_norm, imsi)
                                             WHERE (end_date IS NULL);

                                      CREATE TABLE historic_golden_list (
                                          hashed_imei_norm UUID NOT NULL,
                                          start_date TIMESTAMP NOT NULL,
                                          end_date TIMESTAMP DEFAULT NULL
                                      );
                                      CREATE UNIQUE INDEX
                                                 ON historic_golden_list
                                              USING btree (hashed_imei_norm)
                                              WHERE (end_date IS NULL);

                                      CREATE TABLE historic_registration_list (
                                          imei_norm text NOT NULL,
                                          start_date TIMESTAMP NOT NULL,
                                          end_date TIMESTAMP DEFAULT NULL
                                      );
                                      CREATE UNIQUE INDEX
                                                 ON historic_registration_list
                                              USING btree (imei_norm)
                                              WHERE (end_date IS NULL);"""))
            logger.info('Created historic tables')

            logger.info('Start migrating import tables to historic tables...')
            logger.info(
                'Migrating stolen_list table to historic_stolen_list table...')
            stolen_job_start_time = most_recent_job_start_time_by_command(
                db_conn,
                'dirbs-import',
                subcommand='stolen_list',
                successful_only=True)
            if not stolen_job_start_time:
                stolen_job_start_time = datetime.datetime.now()
            cursor.execute(
                sql.SQL(
                    """INSERT INTO historic_stolen_list(imei_norm, reporting_date, start_date, end_date)
                                           SELECT imei_norm, reporting_date, %s, NULL
                                             FROM stolen_list;"""),
                [stolen_job_start_time])

            logger.info(
                'Migrating pairing_list table to historic_pairing_list table...'
            )
            pairing_job_start_time = most_recent_job_start_time_by_command(
                db_conn,
                'dirbs-import',
                subcommand='pairing_list',
                successful_only=True)
            if not pairing_job_start_time:
                pairing_job_start_time = datetime.datetime.now()
            cursor.execute(
                sql.SQL(
                    """INSERT INTO historic_pairing_list(imei_norm, imsi, start_date, end_date)
                                           SELECT imei_norm, imsi, %s, NULL
                                             FROM pairing_list;"""),
                [pairing_job_start_time])

            logger.info(
                'Migrating registration_list table to historic_registration_list table...'
            )
            registration_job_start_time = most_recent_job_start_time_by_command(
                db_conn,
                'dirbs-import',
                subcommand='registration_list',
                successful_only=True)
            if not registration_job_start_time:
                registration_job_start_time = datetime.datetime.now()
            cursor.execute(
                sql.SQL(
                    """INSERT INTO historic_registration_list(imei_norm, start_date, end_date)
                                           SELECT imei_norm, %s, NULL
                                             FROM registration_list;"""),
                [registration_job_start_time])

            logger.info(
                'Migrating golden_list table to historic_golden_list table...')
            golden_job_start_time = most_recent_job_start_time_by_command(
                db_conn,
                'dirbs-import',
                subcommand='golden_list',
                successful_only=True)
            if not golden_job_start_time:
                golden_job_start_time = datetime.datetime.now()
            cursor.execute(
                sql.SQL(
                    """INSERT INTO historic_golden_list(hashed_imei_norm, start_date, end_date)
                                           SELECT hashed_imei_norm, %s, NULL
                                             FROM golden_list;"""),
                [golden_job_start_time])
            logger.info('Migrated all the import tables to historic tables')

            logger.info('Dropping old import tables...')
            cursor.execute(
                sql.SQL("""DROP TABLE pairing_list;
                                      DROP TABLE stolen_list;
                                      DROP TABLE golden_list;
                                      DROP TABLE registration_list;"""))
            logger.info('Dropped old import tables')

            logger.info(
                'Creating views to keep a compatibility with the previous importers ...'
            )
            cursor.execute(
                sql.SQL("""CREATE VIEW pairing_list AS
                                          SELECT imei_norm, imsi
                                            FROM historic_pairing_list
                                           WHERE end_date IS NULL WITH CHECK OPTION;

                                      CREATE VIEW stolen_list AS
                                          SELECT imei_norm, reporting_date
                                            FROM historic_stolen_list
                                           WHERE end_date IS NULL WITH CHECK OPTION;

                                      CREATE VIEW golden_list AS
                                          SELECT hashed_imei_norm
                                            FROM historic_golden_list
                                           WHERE end_date IS NULL WITH CHECK OPTION;

                                      CREATE VIEW registration_list AS
                                          SELECT imei_norm
                                            FROM historic_registration_list
                                           WHERE end_date IS NULL WITH CHECK OPTION;"""
                        ))
            logger.info('Created views')

            logger.info('Granting privileges on views and historic tables...')
            cursor.execute(
                sql.SQL("""GRANT SELECT ON historic_pairing_list TO
                                          dirbs_core_listgen,
                                          dirbs_core_report,
                                          dirbs_core_api;
                                      GRANT SELECT ON pairing_list TO
                                          dirbs_core_listgen,
                                          dirbs_core_report,
                                          dirbs_core_api,
                                          dirbs_core_import_pairing_list;
                                      GRANT SELECT, INSERT, UPDATE ON historic_pairing_list TO
                                          dirbs_core_import_pairing_list;
                                      GRANT SELECT ON historic_stolen_list TO dirbs_core_classify;
                                      GRANT SELECT ON stolen_list TO
                                          dirbs_core_classify,
                                          dirbs_core_import_stolen_list;
                                      GRANT SELECT, INSERT, UPDATE ON historic_stolen_list TO
                                          dirbs_core_import_stolen_list;
                                      GRANT SELECT ON historic_golden_list TO dirbs_core_listgen;
                                      GRANT SELECT ON golden_list TO
                                          dirbs_core_listgen,
                                          dirbs_core_import_golden_list;
                                      GRANT SELECT, INSERT, UPDATE ON historic_golden_list TO
                                          dirbs_core_import_golden_list;
                                      GRANT SELECT ON historic_registration_list TO
                                          dirbs_core_classify,
                                          dirbs_core_api;
                                      GRANT SELECT ON registration_list TO
                                          dirbs_core_classify,
                                          dirbs_core_api,
                                          dirbs_core_import_registration_list;
                                      GRANT SELECT, INSERT, UPDATE ON historic_registration_list TO
                                          dirbs_core_import_registration_list;"""
                        ))
            logger.info('Granted privileges')
示例#2
0
 def test_compose_empty(self):
     s = sql.SQL("select foo;").format()
     s1 = s.as_string(self.conn)
     self.assertEqual(s1, "select foo;")
示例#3
0
 def test_braces_escape(self):
     s = sql.SQL("{{{0}}}").format(sql.Literal(7))
     self.assertEqual(s.as_string(self.conn), "{7}")
     s = sql.SQL("{{1,{0}}}").format(sql.Literal(7))
     self.assertEqual(s.as_string(self.conn), "{1,7}")
示例#4
0
 def test_seq(self):
     l = [sql.SQL('foo'), sql.Literal('bar'), sql.Identifier('baz')]
     self.assertEqual(sql.Composed(l).seq, l)
示例#5
0
 def test_unicode(self):
     s = sql.SQL("select {0} from {1}").format(sql.Identifier('field'),
                                               sql.Identifier('table'))
     s1 = s.as_string(self.conn)
     self.assertTrue(isinstance(s1, str))
     self.assertEqual(s1, 'select "field" from "table"')
示例#6
0
 def test_repr(self):
     self.assertEqual(repr(sql.SQL("foo")), "SQL('foo')")
     self.assertEqual(str(sql.SQL("foo")), "SQL('foo')")
     self.assertEqual(sql.SQL("foo").as_string(self.conn), "foo")
示例#7
0
 def test_sum_inplace(self):
     obj = sql.SQL("foo")
     obj += sql.SQL("bar")
     self.assertTrue(isinstance(obj, sql.Composed))
     self.assertEqual(obj.as_string(self.conn), "foobar")
def drop_database(connection, database):
    with connection:
        with connection.cursor() as cursor:
            cursor.execute(
                sql.SQL('DROP DATABASE IF EXISTS {name};').format(
                    name=sql.Identifier(database)))
示例#9
0
def create_views(db,
                 schema_tiles,
                 table_index,
                 fields_index,
                 table_centroid,
                 fields_centroid,
                 table_footprint,
                 fields_footprint,
                 prefix_tiles='t_'):
    """Creates PostgreSQL Views for the footprint tiles.

    Parameters
    ----------
    db : db Class instance
    schema_tiles : str
        Name of the schema where to create the footprint tiles.
    table_index : list of str
        [schema, table] of the tile index.
    fields_index : list of str
        [ID, geometry, unit]
        ID: Name of the ID field.
        geometry: Name of the geometry field.
        unit: Name of the field in table_index that contains the index unit names.
        These values are used for the tile names in schema_tiles.
    table_centroid : list of str
        [schema, table] of the footprint centroids.
    fields_centroid : list of str
        [ID, geometry]
        Name of the ID field in table_centroid that can be joined on table_footprint.
        There must be an identical value in fields_footprint.
        Name of the geometry field.
    table_footprint : list of str
        [schema, table] of the footprints (e.g. building footprints) that will be extruded.
    fields_footprint : list of str
        [ID, geometry, ...]
        Names of the fields that should be selected into the View. Must contain
        at least an ID and a geometry field, where ID is the field that can be joined on
        table_centroid.
    prefix_tiles : str or None
        Prefix to prepend to the view names. If None, the views are named as
        the values in fields_index.

    Returns
    -------
    nothing

    """
    schema_tiles_q = sql.Identifier(schema_tiles)

    schema_idx_q = sql.Identifier(table_index[0])
    table_idx_q = sql.Identifier(table_index[1])
    field_idx_unit_q = sql.Identifier(fields_index[2])
    field_idx_geom_q = sql.Identifier(fields_index[1])

    schema_ctr_q = sql.Identifier(table_centroid[0])
    table_ctr_q = sql.Identifier(table_centroid[1])
    field_ctr_id = fields_centroid[0]
    field_ctr_id_q = sql.Identifier(field_ctr_id)
    field_ctr_geom_q = sql.Identifier(fields_centroid[1])

    table_poly = table_footprint[1]
    schema_poly_q = sql.Identifier(table_footprint[0])
    table_poly_q = sql.Identifier(table_poly)

    assert isinstance(fields_footprint, list)
    assert len(fields_footprint) > 1,\
        "You must provide at least two fields (e.g. id, geometry)"
    assert field_ctr_id in fields_footprint,\
        "There must be a join field for table_centroid and table_footprint."
    # prepare SELECT FROM table_footprint
    s = []
    for f in fields_footprint:
        s.append(
            sql.SQL('.').join([sql.Identifier(table_poly),
                               sql.Identifier(f)]))
    sql_fields_footprint = sql.SQL(', ').join(s)

    field_poly_id_q = sql.Identifier(fields_footprint[0])
    #     print(sql_fields_footprint.as_string(dbs.conn))

    # Create schema to store the tiles
    query = sql.SQL("CREATE SCHEMA IF NOT EXISTS {};").format(schema_tiles_q)
    db.sendQuery(query)

    # Get footprint index unit names
    tiles = db.getQuery(
        sql.SQL("SELECT {} FROM {}.{};").format(field_idx_unit_q, schema_idx_q,
                                                table_idx_q))
    tiles = [str(i[0]) for i in tiles]

    if not prefix_tiles:
        prefix_tiles = ""
    assert isinstance(prefix_tiles, str)
    # Create a BAG tile with equivalent area of an AHN tile
    for tile in tiles:
        # !!! the 't_' prefix is hard-coded in config.call3dfier() !!!
        n = prefix_tiles + str(tile)
        view = sql.Identifier(n)

        tile = sql.Literal(tile)
        query = sql.SQL("""CREATE OR REPLACE VIEW {schema_tiles}.{view} AS
                        SELECT
                            {fields_poly}
                        FROM
                            {schema_poly}.{table_poly}
                        INNER JOIN {schema_ctr}.{table_ctr} ON
                            {table_poly}.{field_poly_id} = {table_ctr}.{field_ctr_id},
                            {schema_idx}.{table_idx}
                        WHERE
                            {table_idx}.{field_idx} = {tile}
                            AND(
                                st_containsproperly(
                                    {table_idx}.{field_idx_geom},
                                    {table_ctr}.{field_ctr_geom}
                                )
                                OR st_contains(
                                    {table_idx}.geom_border,
                                    {table_ctr}.{field_ctr_geom}
                                )
                        );""").format(schema_tiles=schema_tiles_q,
                                      view=view,
                                      fields_poly=sql_fields_footprint,
                                      schema_poly=schema_poly_q,
                                      table_poly=table_poly_q,
                                      schema_ctr=schema_ctr_q,
                                      table_ctr=table_ctr_q,
                                      field_poly_id=field_poly_id_q,
                                      field_ctr_id=field_ctr_id_q,
                                      schema_idx=schema_idx_q,
                                      table_idx=table_idx_q,
                                      field_idx=field_idx_unit_q,
                                      tile=tile,
                                      field_idx_geom=field_idx_geom_q,
                                      field_ctr_geom=field_ctr_geom_q)
        db.sendQuery(query)

    return ("%s Views created in schema '%s'." % (len(tiles), schema_tiles))
示例#10
0
文件: filters.py 项目: as8709/camstat
 def coarse_pass(self):
     '''
     '''
     return sql.SQL("(class in {})").format(
         sql.Literal(tuple(self.allowed_classes)))
示例#11
0
def get_existing_databases(connection):
    with connection.cursor() as cursor:
        cursor.execute(sql.SQL('SELECT datname FROM pg_database'))
        return {result[0] for result in cursor.fetchall()}
示例#12
0
def loginpostgis():
    mytoken = session.get('mytoken')

    projectname = session.get('projectname')
    datainicio = session.get('inicio')
    datafim = session.get('fim')

    arc = gis.GIS(username=session.get('arcuser'),
                  password=session.get('arcsenha'))

    items = arc.content.search(
        query="NOT title: %stakeholder% AND NOT title: %fieldworker% AND " +
        "owner:" + arc.users.me.username + " AND Survey",
        item_type="Feature Layer",
        max_items=500)

    item_to_add = [
        temp_item for temp_item in items
        if temp_item.title == session.get('projectname')
    ]
    project = item_to_add[0].layers[0].properties['serviceItemId']
    session['project'] = project

    if item_to_add[0].layers[0].properties[
            'geometryType'] == 'esriGeometryPoint':
        registrosbruto = pd.DataFrame.spatial.from_layer(
            item_to_add[0].layers[0])
        registros = []

        for i in range(0, len(registrosbruto)):
            ano = int(str(registrosbruto.iloc[i]['CreationDate'])[0:4])
            mes = int(str(registrosbruto.iloc[i]['CreationDate'])[5:7])
            dia = int(str(registrosbruto.iloc[i]['CreationDate'])[8:10])
            dataobjeto = datetime.date(ano, mes, dia)
            if dataobjeto < datafim:
                registros.append(registrosbruto.iloc[i])

    ano = datafim.year
    mes = datafim.month
    if mes < 10:
        dataref = str(ano) + '0' + str(mes)
    else:
        dataref = str(ano) + str(mes)
    session['dataref'] = dataref

    form = LoginFormPostgis()
    if form.validate_on_submit():
        session['hostinput'] = str(form.hostinput.data)
        session['dbnameinput'] = str(form.dbnameinput.data)
        session['userinput'] = str(form.userinput.data)
        session['senhainput'] = str(form.senhainput.data)
        #Define our connection string
        conn_string = "host=" + str(
            session.get('hostinput')) + " dbname=" + str(
                session.get('dbnameinput')) + " user="******" password="******"""CREATE EXTENSION IF NOT EXISTS postgis;
			DROP TABLE IF EXISTS {}"""

            nometabela = dataref + '_' + projectid

            createdbgenerica = """CREATE UNLOGGED TABLE IF NOT EXISTS {}(
			id integer PRIMARY KEY,
			created_at DATE,
			updated_at DATE,
			latitude real,
			longitude real,
			geom geometry(Point, 4326)
			);"""

            cur.execute(
                sql.SQL(dropdbgenerica).format(sql.Identifier(nometabela)))
            cur.execute(
                sql.SQL(createdbgenerica).format(sql.Identifier(nometabela)))
            tabelagerada = dataref + '_' + projectid

            conn.commit()

            for item in registros:
                genericfields = [
                    str(item['objectid']),
                    str(item['CreationDate']),
                    str(item['EditDate']),
                    float(item['SHAPE']['y']),
                    float(item['SHAPE']['x'])
                ]
                my_data = [field for field in genericfields]
                cur.execute(
                    sql.SQL(
                        "INSERT INTO {} VALUES (%s, %s, %s, %s, %s)").format(
                            sql.Identifier(nometabela)), tuple(my_data))

            conn.commit()

            nomeindex = tabelagerada + 'index'
            cur.execute(
                sql.SQL(
                    "UPDATE {} SET geom = ST_SetSRID(ST_MakePoint(longitude, latitude), 4326); CREATE INDEX {} ON {} USING GIST(geom)"
                ).format(sql.Identifier(tabelagerada),
                         sql.Identifier(nomeindex),
                         sql.Identifier(tabelagerada)))

            conn.commit()

            dbsegmentos = """DROP TABLE IF EXISTS {};
			CREATE UNLOGGED TABLE IF NOT EXISTS {}(
			nome TEXT PRIMARY KEY,
			geom geometry(MultiPolygon, 4326)
			);"""
            segmentnome = "egrfauna_segmentos"
            cur.execute(
                sql.SQL(dbsegmentos).format(sql.Identifier(segmentnome),
                                            sql.Identifier(segmentnome)))
            conn.commit()

            segmentos = requests.get(
                'https://raw.githubusercontent.com/guilhermeiablo/survey2infoambiente/master/dados/ERS_segmentos_rodoviarios.geojson'
            )

            for feature in segmentos.json()['features']:
                geom = (json.dumps(feature['geometry']))
                nome = feature['properties']['nome']
                cur.execute(
                    sql.SQL(
                        "INSERT INTO {} (nome, geom) VALUES (%s, ST_SetSRID(ST_GeomFromGeoJSON(%s), 4326));"
                    ).format(sql.Identifier(segmentnome)), (nome, geom))
            cur.execute(
                sql.SQL(
                    "CREATE INDEX sp_index_segmentos ON {} USING GIST(geom)").
                format(sql.Identifier(segmentnome)))

            conn.commit()

            intersecta = '''DROP TABLE IF EXISTS {nome0}; SELECT {nome1}.*, {nome2}.nome INTO {nome0} FROM {nome2} INNER JOIN {nome1} ON ST_Intersects({nome2}.geom, {nome1}.geom) AND {nome2}.nome=%s;'''

            for feature in segmentos.json()['features']:
                nomedosegmento = feature['properties']['nome']
                if projectid == '10762':
                    nomecompleto = str(feature['properties']['nome'] +
                                       '_PMF_' + tabelagerada)
                else:
                    nomecompleto = str(feature['properties']['nome'] + '_' +
                                       tabelagerada)
                cur.execute(
                    sql.SQL(intersecta).format(
                        nome0=sql.Identifier(nomecompleto),
                        nome1=sql.Identifier(tabelagerada),
                        nome2=sql.Identifier(segmentnome)), [
                            nomedosegmento,
                        ])

            conn.commit()
            session['tabelagerada'] = str(tabelagerada)

            return redirect(
                url_for('logingeoserver',
                        mytoken=session['mytoken'],
                        project=session['project']))
        else:
            flash('Erro ao conectar a base de dados. Tente novamente.',
                  'danger')
    return render_template('loginpostgis.html',
                           title='LoginPostgis',
                           form=form,
                           mytoken=mytoken,
                           project=session['project'])
def read_shp_file(number_of_previous_days, url , purpose):

	
	page = requests.get(url).text
	soup = BeautifulSoup(page, 'html.parser')
	ext = '.shp.zip'
	ext_second = '.zip'
	ext_forbidden = '.tif.zip'
	all_files = [node.get('href') for node in soup.find_all('a') if node.get('href') is not None and (node.get('href').endswith(ext) or node.get('href').endswith(ext_second)
		and not(node.get('href').endswith(ext_forbidden)))]
	print(str(all_files))
	if(purpose == 'global_hazards'):	
		all_files = [(url + curr_file[0:]) for curr_file in all_files if 'hazards' in curr_file]
	else:
		all_files = [('http' + curr_file[3:]) for curr_file in all_files]

	


	

	try:
		params = config()
		conn = psycopg2.connect(**params)
		conn.autocommit = True
		cur = conn.cursor()
		if(purpose == 'global_hazards'):
			cur.execute(sql.SQL("DROP TABLE international_hazards;"))
			cur.execute(sql.SQL("CREATE TABLE international_hazards( region varchar(20) NOT NULL, date_hazard date NOT NULL, type integer NOT NULL, geom geometry);"))
		elif(purpose == 'global_surface_temperatures'):
			cur.execute(sql.SQL("DROP TABLE global_surface_temperatures;"))
			cur.execute(sql.SQL("CREATE TABLE global_surface_temperatures(temperature integer NOT NULL, number_of_vertices integer NOT NULL, unit varchar(20) NOT NULL, geom geometry(Polygon));"))
		elif(purpose == 'drought'):
			cur.execute(sql.SQL("DROP TABLE drought;"))
			cur.execute(sql.SQL("CREATE TABLE drought(improvement integer NOT NULL, persistent integer NOT NULL, development integer NOT NULL, date_drought date NOT NULL, removal integer NOT NULL, geom geometry);"))

		
	



		for file in all_files:
			

			
			result = requests.get(file)
			z = zipfile.ZipFile(io.BytesIO(result.content))

			all_types = [y for y in sorted(z.namelist()) for ending in ['dbf', 'prj', 'shp', 'shx'] if y.endswith(ending)]  
			
			dbf, prj, shp, shx = [io.BytesIO(z.read(types)) for types in all_types]
			
			
			try:
				r = shapefile.Reader(shp=shp, shx=shx, prj = prj, dbf=dbf)
			

				print(r.numRecords)
				geom = []
				field_names = [properties[0] for properties in r.fields[1:]]  

				

				for curr_row in r.shapeRecords():  
					geom.append(shape(curr_row.shape.__geo_interface__))

					if(purpose == 'global_hazards'):
						
						region = curr_row.record[1]
						date = curr_row.record[2]
						type_category = curr_row.record[3]
						geometric_shape = curr_row.shape.__geo_interface__	
						cur.execute(sql.SQL("INSERT INTO international_hazards VALUES (%s, %s, %s, ST_GeomFromText(%s));"),
							(region, date, type_category, shape(curr_row.shape.__geo_interface__).wkt))

					elif(purpose == 'global_surface_temperatures'):
						
						temp = curr_row.record[0]
						num_vertices = curr_row.record[1]
						unit_c = "Celsius"
						geometric_shape = curr_row.shape.__geo_interface__
						cur.execute(sql.SQL("INSERT INTO global_surface_temperatures VALUES (%s, %s, %s, ST_GeomFromText(%s));"),
							(temp, num_vertices, unit_c, shape(curr_row.shape.__geo_interface__).wkt))
					elif(purpose == 'drought'):
						improvement = curr_row.record[0]
						persistent = curr_row.record[1]
						development = curr_row.record[2]
						date_drought = curr_row.record[3]
						removal = curr_row.record[5]

						geometric_shape = curr_row.shape.__geo_interface__

						cur.execute(sql.SQL("INSERT INTO drought VALUES (%s, %s, %s, %s, %s, ST_GeomFromText(%s));"),
							(improvement, persistent, development, date_drought, removal, shape(geometric_shape).wkt))



			except Exception as e:
				print("ERROR: " + str(e))
				continue

	except(Exception, psycopg2.DatabaseError) as error:
		print("ERROR: " + str(error))
	finally:
		if conn is not None:
			conn.close()
			print('Database connection closed.')
示例#14
0
 def ana_report(self, stk, start_date, end_date):
     res = '<table><tr>'
     jl_start_date = stxcal.move_busdays(end_date, -8)
     # add the A/D setups table
     res += '<td><table>'
     qad = sql.Composed([
         sql.SQL('select * from jl_setups where dt between '),
         sql.Literal(start_date),
         sql.SQL(' and '),
         sql.Literal(end_date),
         sql.SQL(' and setup in ('),
         sql.SQL(',').join(
             [sql.Literal('Gap'),
              sql.Literal('SC'),
              sql.Literal('RDay')]),
         sql.SQL(') and abs(score) >= 100 and stk='),
         sql.Literal(stk),
         sql.SQL(' order by dt, direction, setup')
     ])
     df_ad = pd.read_sql(qad, stxdb.db_get_cnx())
     for _, row in df_ad.iterrows():
         res += '<tr><td>{}</td><td>{}</td><td>{}</td><td>{}</td>'\
             '</tr>'.format(row['dt'].strftime('%b %d'), row['setup'],
                            row['direction'], row['score'])
     res += '</td></table>'
     # add the JL setups table
     res += '<td><table>'
     qjl = sql.Composed([
         sql.SQL('select * from jl_setups where dt between '),
         sql.Literal(jl_start_date),
         sql.SQL(' and '),
         sql.Literal(end_date),
         sql.SQL(' and setup in ('),
         sql.SQL(',').join([
             sql.Literal('JL_B'),
             sql.Literal('JL_P'),
             sql.Literal('JL_SR')
         ]),
         sql.SQL(') and stk='),
         sql.Literal(stk),
         sql.SQL(' order by dt, direction, setup, factor')
     ])
     df_jl = pd.read_sql(qjl, stxdb.db_get_cnx())
     for _, row in df_jl.iterrows():
         res += '<tr><td>{}</td><td>{}</td><td>{}</td><td>{}</td>'\
             '<td>{}</td></tr>'.format(row['dt'].strftime('%b %d'),
                                       row['setup'], row['direction'],
                                       row['factor'], row['score'])
     res += '</table></td>'
     # add the candlesticks setups table
     res += '<td><table>'
     qcs = sql.Composed([
         sql.SQL('select * from jl_setups where dt between '),
         sql.Literal(start_date),
         sql.SQL(' and '),
         sql.Literal(end_date),
         sql.SQL(' and setup in ('),
         sql.SQL(',').join([
             sql.Literal('EngHarami'),
             sql.Literal('Cbs'),
             sql.Literal('3out'),
             sql.Literal('3'),
             sql.Literal('Kicking'),
             sql.Literal('Piercing'),
             sql.Literal('Engulfing'),
             sql.Literal('Star')
         ]),
         sql.SQL(') and stk='),
         sql.Literal(stk),
         sql.SQL(' order by dt, direction, setup')
     ])
     df_cs = pd.read_sql(qcs, stxdb.db_get_cnx())
     for _, row in df_cs.iterrows():
         res += '<tr><td>{}</td><td>{}</td><td>{}</td></tr>'.format(
             row['dt'].strftime('%b %d'), row['setup'], row['direction'])
     res += '</td></table>'
     res += '</tr></table>'
     return res
示例#15
0
 def test_init(self):
     self.assertTrue(isinstance(sql.SQL('foo'), sql.SQL))
     self.assertTrue(isinstance(sql.SQL('foo'), sql.SQL))
     self.assertRaises(TypeError, sql.SQL, 10)
     self.assertRaises(TypeError, sql.SQL, dt.date(2016, 12, 31))
示例#16
0
def update_tile_index(db, table_index, fields_index):
    """Update the tile index to include the lower/left boundary of each polygon.

    The function is mainly relevant for the tile index of the footprints.
    The tile edges are then used for checking centroid containment in a tile polygon.

    Parameters
    ----------
    db : db Class instance
    table_index : list of str
        (schema, table) that contains the tile index polygons.
    fields_index: list of str
        [ID, geometry, unit] field names of the ID, geometry, tile unit name fields in table_index.

    Returns
    -------
    nothing

    """
    schema = table_index[0]
    table = table_index[1]
    id_col = fields_index[0]
    geom_col = fields_index[1]

    schema_q = sql.Identifier(schema)
    table_q = sql.Identifier(table)
    geom_col_q = sql.Identifier(geom_col)
    id_col_q = sql.Identifier(id_col)

    db.sendQuery(
        sql.SQL("""ALTER TABLE {}.{}
             ADD COLUMN IF NOT EXISTS geom_border geometry;""").format(
            schema_q, table_q))

    db.sendQuery(
        sql.SQL("""
                UPDATE
                    {schema}.{table}
                SET
                    geom_border = b.geom::geometry(linestring,28992)
                FROM
                    (
                        SELECT
                            {id_col},
                            st_setSRID(
                                st_makeline(
                                    ARRAY[st_makepoint(
                                        st_xmax({geom_col}),
                                        st_ymin({geom_col})
                                    ),
                                    st_makepoint(
                                        st_xmin({geom_col}),
                                        st_ymin({geom_col})
                                    ),
                                    st_makepoint(
                                        st_xmin({geom_col}),
                                        st_ymax({geom_col})
                                    ) ]
                                ),
                                28992
                            ) AS geom
                        FROM
                            {schema}.{table}
                    ) b
                WHERE
                    {schema}.{table}.{id_col} = b.{id_col};
                """).format(schema=schema_q,
                            table=table_q,
                            geom_col=geom_col_q,
                            id_col=id_col_q))

    sql_query = sql.SQL("""
            CREATE INDEX IF NOT EXISTS {idx_name} ON {schema}.{table} USING gist (geom_border);
            SELECT populate_geometry_columns({name}::regclass);
            """).format(idx_name=sql.Identifier(table + "_" + geom_col +
                                                "_border_idx"),
                        schema=schema_q,
                        table=table_q,
                        name=sql.Literal(schema + '.' + table))
    db.sendQuery(sql_query)

    db.vacuum(schema, table)
示例#17
0
 def test_string(self):
     self.assertEqual(sql.SQL('foo').string, 'foo')
示例#18
0
def insert_story(story, commit=True):
    conn, cur = get_connection_and_cursor()

    query = sql.SQL("""INSERT INTO \"Stories\" (\"title\", \"byline\",
        \"summary\", \"top_story\", \"thumbnail\", \"url\", \"num_related\")
        VALUES({0}, {1}, {2}, {3}, {4}, {5}, {6})
        ON CONFLICT DO NOTHING;
        """).format(
        sql.SQL("\'" + story.title + "\'"),
        sql.SQL(("\'" + story.byline + "\'") if story.byline else "NULL"),
        sql.SQL(("\'" + story.summary + "\'") if story.summary else "NULL"),
        sql.SQL(str(story.top_story)),
        sql.SQL(("\'" + story.thumbnail +
                 "\'") if story.thumbnail else "NULL"),
        sql.SQL(("\'" + story.url + "\'") if story.url else "NULL"),
        sql.SQL(str(story.num_related)))
    query_string = query.as_string(conn)
    cur.execute(query_string)

    query = sql.SQL("""SELECT \"ID\" FROM \"Stories\"
        WHERE \"title\"={0}""").format(sql.SQL("\'" + story.title + "\'"))
    query_string = query.as_string(conn)
    cur.execute(query_string)
    story_id = cur.fetchall()[0]['ID']

    if story.tagged:
        for tag in story.tags:
            query = sql.SQL("""INSERT INTO \"Tags\" (\"tag\")
                    VALUES({0})
                    ON CONFLICT DO NOTHING;
                    """).format(sql.SQL("\'" + tag + "\'"))
            query_string = query.as_string(conn)
            cur.execute(query_string)

            query = sql.SQL("""SELECT \"ID\" FROM \"Tags\"
                    WHERE \"tag\"={0};""").format(sql.SQL("\'" + tag + "\'"))
            query_string = query.as_string(conn)
            cur.execute(query_string)
            tag_id = cur.fetchall()[0]['ID']

            query = sql.SQL("""INSERT INTO \"Stories_Tags\"
                    (\"story_ID\", \"tag_ID\")
                    VALUES({0}, {1})
                    ON CONFLICT DO NOTHING;
                    """).format(sql.SQL(str(story_id)), sql.SQL(str(tag_id)))
            query_string = query.as_string(conn)
            cur.execute(query_string)

    if commit:
        conn.commit()
示例#19
0
 def test_eq(self):
     self.assertTrue(sql.SQL('foo') == sql.SQL('foo'))
     self.assertTrue(sql.SQL('foo') != sql.SQL('bar'))
     self.assertTrue(sql.SQL('foo') != 'foo')
     self.assertTrue(sql.SQL('foo') != sql.Literal('foo'))
示例#20
0
import logging
from typing import Generator

from kafka import KafkaConsumer
from psycopg2 import sql

from .gp import get_pg_connection
from .checker import CheckResult
from .settings import build_settings, cert_files

logger = logging.getLogger(__name__)


SELECT_RULE = sql.SQL('select id from monitoring.rules where url = %s and method = %s and regexp = %s')


INSERT_CHECK_RESULT = sql.SQL(
    'insert into monitoring.checks '
    '(utc_time, rule_id, response_time, status_code, regexp_result, failed) '
    'values (%s, %s, %s, %s, %s, %s)'
)


def read_kafka_records(consumer: KafkaConsumer) -> Generator[CheckResult, None, None]:
    raw_msgs = consumer.poll(timeout_ms=1000)
    for tp, msgs in raw_msgs.items():
        for msg in msgs:
            yield CheckResult.deserialize(msg.value)


# With code snippets from https://help.aiven.io/en/articles/489572-getting-started-with-aiven-kafka
示例#21
0
 def test_multiply(self):
     obj = sql.SQL("foo") * 3
     self.assertTrue(isinstance(obj, sql.Composed))
     self.assertEqual(obj.as_string(self.conn), "foofoofoo")
示例#22
0
else:
    args['entry_type'] = 'UNKNOWN'

#Open the connection.
conn = pg.connect('dbname={0} user={1} password={2}'.format(
    DB_NAME, DB_USER, DB_PASS))

#Open a cursor.
cur = conn.cursor()

if args['entry_type'] != 'UNKNOWN':
    #Read query file.
    newTransaction = sql.SQL(open(
        'sql/add-transaction.sql',
        'r').read()).format(sql.Literal(args['entry_type']),
                            sql.Literal(args['category']),
                            sql.Literal(args['date'] + ' ' + args['time']),
                            sql.Literal(args['amount']),
                            sql.Literal(args['vendor']))

    #Execute the insert.
    cur.execute(newTransaction)

#Print HTML headers and content.
print 'Content-Type: text/html\n'

page_body = open('html/index.html', 'r').read()

print page_body

#Commit transactions and clean up.
示例#23
0
 def test_sum(self):
     obj = sql.Composed([sql.SQL("foo ")])
     obj = obj + sql.Literal("bar")
     self.assertTrue(isinstance(obj, sql.Composed))
     self.assertQuotedEqual(obj.as_string(self.conn), "foo 'bar'")
示例#24
0
 def test_must_be_composable(self):
     self.assertRaises(TypeError, sql.SQL("select {0};").format, 'foo')
     self.assertRaises(TypeError, sql.SQL("select {0};").format, 10)
示例#25
0
 def test_compose_literal(self):
     s = sql.SQL("select {0};").format(sql.Literal(dt.date(2016, 12, 31)))
     s1 = s.as_string(self.conn)
     self.assertEqual(s1, "select '2016-12-31'::date;")
示例#26
0
 def test_no_modifiers(self):
     self.assertRaises(ValueError, sql.SQL("select {a!r};").format, a=10)
     self.assertRaises(ValueError, sql.SQL("select {a:<};").format, a=10)
示例#27
0
 def test_percent_escape(self):
     s = sql.SQL("42 % {0}").format(sql.Literal(7))
     s1 = s.as_string(self.conn)
     self.assertEqual(s1, "42 % 7")
示例#28
0
 def test_eq(self):
     self.assertTrue(sql.Identifier('foo') == sql.Identifier('foo'))
     self.assertTrue(sql.Identifier('foo') != sql.Identifier('bar'))
     self.assertTrue(sql.Identifier('foo') != 'foo')
     self.assertTrue(sql.Identifier('foo') != sql.SQL('foo'))
示例#29
0
 def test_compose_badnargs(self):
     self.assertRaises(IndexError, sql.SQL("select {0};").format)
示例#30
0
def add_favorit_product():
    """Add product in favorite for user"""

    user = check_auth(request.headers, __name__)
    if user != True:
        return user
    user = authorize.get(request.headers.get('UserToken'))

    vozvrat = {}
    try:
        database = Database(config)
    except TypeError:
        vozvrat["messageError"] = "Нет подключения к БД"
        return jsonify(vozvrat)

    file = request.get_json(silent=True)
    if file != None:
        if file.get("users_product_id") == None or type(
                file.get("users_product_id")) != int:
            return jsonify({
                "messageError":
                "Выберете товар, который нужно добавить в избранное"
            })
        favorite = {
            "user_id": user.get_id(),
            "users_product_id": int(file.get("users_product_id"))
        }

        query = sql.SQL(
            "SELECT {column} FROM {table} WHERE {condition}").format(
                table=sql.Identifier("public", "favorit_products"),
                column=sql.SQL(',').join(sql.Identifier(i) for i in ["id"]),
                condition=sql.SQL(
                    'user_id={user_id} and users_product_id={users_product_id}'
                ).format(user_id=sql.Literal(favorite['user_id']),
                         users_product_id=sql.Literal(
                             favorite['users_product_id'])))

        vozvrat = database.select_data(query)

        if type(vozvrat) != list:
            return vozvrat

        if len(vozvrat) == 0:
            query = sql.SQL(
                "INSERT INTO {table}({column}) VALUES({value})").format(
                    table=sql.Identifier("public", "favorit_products"),
                    column=sql.SQL(',').join(
                        sql.Identifier(i)
                        for i in ["user_id", "users_product_id"]),
                    value=sql.SQL(',').join(
                        sql.Literal(i) for i in
                        [user.get_id(),
                         int(file.get("users_product_id"))]))

            vozvrat = database.insert_data(query)
            if vozvrat != True:
                return vozvrat
            vozvrat = {"is_favorit": True}
        else:
            query = sql.SQL("DELETE FROM {table} WHERE id={id}").format(
                table=sql.Identifier("public", "favorit_products"),
                id=sql.Literal(vozvrat[0][0]))

            vozvrat = database.insert_data(query)
            if vozvrat != True:
                return vozvrat
            vozvrat = {"is_favorit": False}
    else:
        vozvrat["messageError"] = "JSON отсутсвует"

    return jsonify(vozvrat)