Exemplo n.º 1
0
def move_data(dbset):
    try:
        db = DB(**dbset)
        db.begin()
        query = db.query("SELECT bluetooth.move_raw_data();")
        if query.getresult()[0][0] != 1:
            db.rollback()
            raise DatabaseError(
                'bluetooth.move_raw_data did not complete successfully')
        query = db.query("TRUNCATE bluetooth.raw_data;")
        query = db.query("SELECT king_pilot.load_bt_data();")
        if query.getresult()[0][0] != 1:
            db.rollback()
            raise DatabaseError(
                'king_pilot.load_bt_data did not complete successfully')
        db.query(
            'DELETE FROM king_pilot.daily_raw_bt WHERE measured_timestamp < now()::DATE;'
        )
        db.commit()
    except DatabaseError as dberr:
        LOGGER.error(dberr)
        db.rollback()
    except IntegrityError:
        LOGGER.critical(
            'Moving data failed due to violation of a constraint. Data will have to be moved manually'
        )
    finally:
        db.close()
Exemplo n.º 2
0
def update_configs(all_analyses, dbset):
    '''
    Syncs configs from blip server with database and returns configs to pull 
    data from. 
    :param all_analyses:
        List of blip configurations
    :param dbset:
        Dictionary to connect to PostgreSQL database
    '''

    db = DB(**dbset)
    db.begin()
    db.query('''TRUNCATE bluetooth.all_analyses_day_old;
    INSERT INTO bluetooth.all_analyses_day_old SELECT * FROM bluetooth.all_analyses;'''
             )
    db.commit()
    analyses_pull_data = {}
    for report in all_analyses:
        report.outcomes = [outcome.__json__() for outcome in report.outcomes]
        report.routePoints = [
            route_point.__json__() for route_point in report.routePoints
        ]
        row = dict(device_class_set_name=report.deviceClassSetName,
                   analysis_id=report.id,
                   minimum_point_completed=db.encode_json(
                       report.minimumPointCompleted.__json__()),
                   outcomes=report.outcomes,
                   report_id=report.reportId,
                   report_name=report.reportName,
                   route_id=report.routeId,
                   route_name=report.routeName,
                   route_points=report.routePoints)
        #If upsert fails, log error and continue, don't add analysis to analyses to pull
        try:
            upserted = db.upsert('bluetooth.all_analyses',
                                 row,
                                 pull_data='included.pull_data')
            analyses_pull_data[upserted['analysis_id']] = {
                'pull_data': upserted['pull_data'],
                'report_name': upserted['report_name']
            }
        except IntegrityError as err:
            LOGGER.error(err)

    db.close()

    analyses_to_pull = {
        analysis_id: analysis
        for (analysis_id, analysis) in analyses_pull_data.items()
        if analysis['pull_data']
    }
    return analyses_to_pull
Exemplo n.º 3
0
def db_input(tweet):
	tweet = tweet.split(" ");
	hashtags = []
	words = []
	for word in tweet:
		if len(word) > 3 and special_match(word, plaintext) == True:
			word = word.lower()
			if "#" == word[0]:
				hashtags.append(word)
			else:
				words.append(word)
	insert_update = ("with w as (insert into words (word) values ('{}') on conflict (word) do update set word = words.word returning (wid)), h as (insert into hashtags (hashtag) values ('{}') on conflict (hashtag) do update set hashtag = hashtags.hashtag returning (hid)) insert into links (wid, hid, weight) values ((select * from w), (select * from h), 1) on conflict (wid, hid) do update set weight = links.weight + 1")
	db = DB(dbname=database, host=host, post=int(port), user=user, passwd=password)
	db.begin()
	for word in words:
		for hashtag in hashtags:
			db.query(insert_update.format(word, hashtag))
	db.commit()
Exemplo n.º 4
0
# db.query("ALTER TABLE "+config.get('summary_stats', 'shp_table_name')+" ADD column attributes TEXT")
#
# print "importing raster"
# db.query("DROP TABLE IF EXISTS "+config.get('summary_stats', 'raster_table_name'))
# p1 = subprocess.Popen(['/usr/bin/raster2pgsql', '-d', '-s', config.get('summary_stats', 'raster_file_srs'), '-t', '100x100',  config.get('summary_stats', 'raster_file'),
#                        config.get('summary_stats', 'raster_table_name')], stdout=subprocess.PIPE)
# p2 = subprocess.Popen(['/usr/bin/psql', '-p', '5432', config.get('database', 'dbname'), '-U', config.get('database', 'user'), '-h', '127.0.0.1'],
#                       stdin=p1.stdout, stdout=subprocess.PIPE, env=psql_env)
# p1.stdout.close()  # Allow p1 to receive a SIGPIPE if p2 exits.
# output,err = p2.communicate()
#
# db.query('CREATE INDEX dem_st_convexhull_idx ON '+config.get('summary_stats', 'raster_table_name')+' '
#                                'USING gist ((st_convexhull(rast)) public.gist_geometry_ops_2d)')

print("updating attributes")
q = db.query("SELECT count(*) as count_all from " +
             config.get('summary_stats', 'shp_table_name'))
count_all = q.dictresult()[0]["count_all"]
steps = count_all / 500
for s in range(0, steps):
    try:
        db.begin()
        db.query('select raster.updtae_attributes($1, $2, $3, $4)',
                 (config.get('summary_stats', 'raster_table_name'),
                  config.get('summary_stats', 'shp_table_name'), 500, s * 500))
        db.commit()
        print("processed " + str((s * 500)))
    except Exception, e:
        print(str(e))
        pass
Exemplo n.º 5
0
# db.query("ALTER TABLE "+config.get('summary_stats', 'shp_table_name')+" ADD column attributes TEXT")
#
# print "importing raster"
# db.query("DROP TABLE IF EXISTS "+config.get('summary_stats', 'raster_table_name'))
# p1 = subprocess.Popen(['/usr/bin/raster2pgsql', '-d', '-s', config.get('summary_stats', 'raster_file_srs'), '-t', '100x100',  config.get('summary_stats', 'raster_file'),
#                        config.get('summary_stats', 'raster_table_name')], stdout=subprocess.PIPE)
# p2 = subprocess.Popen(['/usr/bin/psql', '-p', '5432', config.get('database', 'dbname'), '-U', config.get('database', 'user'), '-h', '127.0.0.1'],
#                       stdin=p1.stdout, stdout=subprocess.PIPE, env=psql_env)
# p1.stdout.close()  # Allow p1 to receive a SIGPIPE if p2 exits.
# output,err = p2.communicate()
#
# db.query('CREATE INDEX dem_st_convexhull_idx ON '+config.get('summary_stats', 'raster_table_name')+' '
#                                'USING gist ((st_convexhull(rast)) public.gist_geometry_ops_2d)')

print("updating attributes")
q = db.query("SELECT count(*) as count_all from "+config.get('summary_stats', 'shp_table_name'))
count_all = q.dictresult()[0]["count_all"]
steps = count_all/500
for s in range(0, steps):
    try:
        db.begin()
        db.query('select raster.updtae_attributes($1, $2, $3, $4)', (config.get('summary_stats', 'raster_table_name'),
                                                             config.get('summary_stats', 'shp_table_name'),
                                                                     500, s*500))
        db.commit()
        print("processed "+str((s * 500)))
    except Exception, e:
        print(str(e))
        pass