Ejemplo n.º 1
0
def db_init():

    connection_url = ('postgresql://' + os.environ['db_user'] + ':' +
                      os.environ['db_password'] + '@postgres:' +
                      os.environ['db_port'] + '/' + os.environ['db_database'])

    print('Initializing Postgres Object...')
    db = postgres.Postgres(url=connection_url)
    print('Ensuring timescaledb ext. is enabled')
    db.run("CREATE EXTENSION IF NOT EXISTS timescaledb;")
    print("Ensuring tables are setup properly")
    db.run("""
           CREATE TABLE IF NOT EXISTS can (
               time timestamptz NOT NULL,
               can_interface text NOT NULL,
               can_id text NOT NULL,
               can_data text NOT NULL);""")

    print("Ensuring can data table is a timescaledb hypertable")
    db.run("""
           SELECT create_hypertable('can', 'time', if_not_exists => TRUE,  
           migrate_data => TRUE);""")

    print("Finished setting up tables")

    return db
Ejemplo n.º 2
0
def create_tileset(city_code, layer_name):
    # Ждём, когда освободится очередь на загрузку
    while True:
        if check_queue_status() == 'OK':
            break
        else:
            time.sleep(60)

    # Создаём tileset
    tile_id = userName + "." + city_code + "-" + layer_name
    create_cmd = "tilesets create " + tile_id + " --recipe recipe.json --name " + city_code + "-" + layer_name
    result = run_cmd(create_cmd)

    # Добавляем в очередь новый слой
    tiles_queue.append(tile_id)

    # Удаляем tileset, если такой уже есть
    if "already exists" in json.loads(result)["message"]:
        del_cmd = "tilesets delete " + tile_id + " -f"
        run_cmd(del_cmd)
        run_cmd(create_cmd)

    # Публикуем tileset
    pub_cmd = "tilesets publish " + tile_id
    run_cmd(pub_cmd)

    # Загружаем связь города с вектором в БД
    p = postgres.Postgres()
    p.upload_layer(layer_name)

    # Удаляем файл рецепта
    #remove_recipe()

    return tile_id
def summarize_events():
    db = postgres.Postgres(DB)
    for suffix in TABLE_SUFFIXES:
        db.run(Q_DAILY_DEVICES_CREATE_TABLE.format(suffix=suffix))
        db.run(Q_MD_USERS_CREATE_TABLE.format(suffix=suffix))
        day_first = db.one(Q_GET_FIRST_AVAILABLE_DAY.format(suffix=suffix))
        # Summarize the latest days that are not yet summarized.
        day_from = db.one(Q_GET_FIRST_UNPROCESSED_DAY.format(suffix=suffix))
        if day_from is None:
            day_from = day_first
            if day_from is None:
                raise RuntimeError('no events in db')
        day_until = db.one(Q_GET_LAST_AVAILABLE_DAY.format(suffix=suffix))
        days = {"day_from": day_from, "day_until": day_until, "suffix": suffix}
        print "SUMMARIZING FROM", day_from, "UNTIL", day_until, "FOR SUFFIX", suffix
        # Update daily device activity.
        print "  UPDATING DAILY ACTIVE DEVICES SUMMARY"
        db.run(Q_DAILY_DEVICES_CLEAR.format(**days))
        db.run(Q_DAILY_DEVICES_SUMMARIZE.format(**days))
        # Update multi-device-user assessments.
        print "  UPDATING MULTI-DEVICE USERS SUMMARY"
        db.run(Q_MD_USERS_CLEAR.format(**days))
        db.run(Q_MD_USERS_SUMMARIZE.format(**days))
        # Expire old data
        print "EXPIRING", day_first, "FOR SUFFIX", suffix
        db.run(
            Q_DAILY_DEVICES_EXPIRE.format(suffix=suffix, day_first=day_first))
        db.run(Q_MD_USERS_EXPIRE.format(suffix=suffix, day_first=day_first))

        print "VACUUMING daily_activity_per_device{suffix} AND daily_multi_device_users{suffix}".format(
            suffix=suffix)
        db.run(Q_VACUUM_TABLES.format(suffix=suffix))
Ejemplo n.º 4
0
def import_events(force_reload=False):
    s3 = boto.s3.connect_to_region(S3_REGION).get_bucket(S3_BUCKET)
    db = postgres.Postgres(DB_URI)
    db.run(Q_DROP_CSV_TABLE)
    db.run(Q_CREATE_COUNTS_TABLE)
    days = []
    for key in s3.list(prefix=S3_PREFIX):
        filename = path.basename(key.name)
        day = "-".join(filename[:-4].split("-")[-3:])
        date = datetime.strptime(day, "%Y-%m-%d")
        if date >= COUNTS_BEGIN:
            if force_reload:
                days.append(day)
            else:
                if not db.one(Q_CHECK_FOR_DAY.format(day=day)):
                    days.append(day)
    days.sort(reverse=True)
    print "FOUND", len(days), "DAYS"
    for day in days:
        print day
        print "  COPYING CSV"
        db.run(Q_CREATE_CSV_TABLE)
        db.run(Q_COPY_CSV.format(day=day))
        print "  CLEARING"
        db.run(Q_CLEAR_DAY.format(day=day))
        print "  INSERTING"
        db.run(Q_INSERT_COUNTS)
        db.run(Q_DROP_CSV_TABLE)
    print "VACUUMING"
    db.run(Q_VACUUM_COUNTS)
Ejemplo n.º 5
0
def geocode_addresses_in_threads(city_code,houses,threads_num):
	print("Start get houses location. Time:",datetime.now())
	p = postgres.Postgres()

	bbox_list = p.send_query("select bbox from cities where code='"+city_code+"'")[0]
	bbox = ','.join([str(i) for i in bbox_list])
	
	manager = multiprocessing.Manager()
	results = manager.list()
	chunks = numpy.array_split(houses,threads_num)
	processes = []

	for i in range(0,threads_num):
		print("Houses location. Part: "+str(i+1)+" len: "+str(len(chunks[i])))
		p = multiprocessing.Process(target=geocode_addresses, args=(chunks[i], results, str(i+1), bbox))
		processes.append(p)
		p.start()

	for process in processes:
		process.join()
		process.close()

	print("Finish get houses location. Time:",datetime.now())

	return list(results)
Ejemplo n.º 6
0
    def run(m, cfg):
        host = cfg.primaryHost
        loadgen = cfg.postgresClient

        m += host
        m += loadgen
        m += HostInfo(host)
        # Creating the db takes time, so we don't clean the file
        # system.  We avoid any cruft that may be there already by
        # putting the DB in a subdirectory.
        fs = FileSystem(host, cfg.fs, clean = False)
        m += fs

        dbdir = fs.path + "0/postgres"
        pgPath = os.path.join(cfg.benchRoot, "postgres")
        pgBuild = getBuild(cfg)
        pgOpts = {"shared_buffers": postgres.PGVal(cfg.bufferCache, "MB")}
        log2NumLockPartitions = int(math.log(cfg.lockPartitions, 2))
        if cfg.lockPartitions != 1 << log2NumLockPartitions:
            raise ValueError("numLockPartitions must be a power of 2, got %r" %
                             cfg.numLockPartitions)
        pgOpts["log2_num_lock_partitions"] = log2NumLockPartitions
        if cfg.sleep == "sysv":
            pgOpts["semas_per_set"] = cfg.semasPerSet
        pg = postgres.Postgres(host, pgPath, pgBuild, dbdir,
                               malloc = cfg.malloc, **pgOpts)
        m += postgres.InitDB(host, pg).addTrust(loadgen)
        m += pg

        if cfg.hotplug:
            # Because the number of cores and the number of clients is
            # the same, we don't strictly need hotplug
            m += SetCPUs(host = host, num = cfg.cores)
        # XXX Make configurable (at least iface name)
#        m += IXGBE(host, "eth0", queues = "n*NCPU/(NRX if rx else NTX)")
        # The ixgbe driver assigns flows to queues sequentially.
        # Since we only have cfg.cores flows, make sure a sequential
        # assignment spans all the online cores.  However, this does
        # not spread things out if we have more queues than cores.
        m += IXGBE(host, "eth0", queues = "n%min(NCPU, NRX if rx else NTX)")

        sysmon = ExplicitSystemMonitor(host)
        m += sysmon
        for trial in range(cfg.trials):
            m += PostgresLoad(loadgen, trial, pg, cfg.cores, cfg.cores,
                              cfg.rows, cfg.partitions, cfg.batchSize,
                              cfg.randomWritePct, sysmon)
        m.run()
Ejemplo n.º 7
0
    def connect_to_database(self) -> None:
        """
        Create a New Database.
        """

        logger.info("connecting to database")
        self.db = postgres.Postgres(config.database)

        logger.info("creating missing tabels")
        self.db.run(
            """
            CREATE TABLE IF NOT EXISTS settings (
                server_id BIGINT PRIMARY KEY,
                prefix text,
                modlog BIGINT
            )
        """
        )
Ejemplo n.º 8
0
def connect_db():

    import postgres
    import os
    import sys
    from time import sleep
    from psycopg2 import OperationalError

    # Try to connect to the DB. It may be starting up so we should try a few timees
    # before failing. Currently trying every second 60 times
    tries = 0
    maxtries = 60
    sleeptime = 1
    db_connected = False
    connectionurl = 'postgresql://' + os.environ['db_user'] + ':' + os.environ['db_password'] + \
        '@postgres:' + os.environ['db_port'] + '/' + os.environ['db_database']
    while (not db_connected):
        try:
            print("Attempting connection to database")
            db = postgres.Postgres(url=connectionurl)
        except OperationalError as e:
            if (tries < maxtries):
                print("Database connection attempt", tries,
                      "failed. Database may still be starting. Sleeping",
                      sleeptime, "s and trying again")
                print(e)
                tries = tries + 1
                sleep(sleeptime)
            else:
                print("FATAL: Could not connect to db after", tries,
                      "tries. Exiting")
                sys.exit(-1)
        else:
            print("Connection successful")
            db_connected = True

    setup_db_tables(db)
    sys.stdout.flush()

    return db
Ejemplo n.º 9
0
#Initialize postgres database if "CELL_LOG" environment variable is set to log
#to database.

if((log_env == 'DB') or (log_env == 'DB,CSV') or (log_env == 'CSV,DB')):

    import postgres

    global db

    connection_url = ('postgresql://' + os.environ['db_user'] + ':' + 
                     os.environ['db_password'] + '@postgres:' + 
                     os.environ['db_port'] + '/' + os.environ['db_database'] )

    print('Initializing Postgres Object...')
    db = postgres.Postgres(url = connection_url)

    print('Ensuring timescaledb ext. is enabled')
    db.run("CREATE EXTENSION IF NOT EXISTS timescaledb;")
    print("Ensuring tables are setup properly")
    db.run("""
            CREATE TABLE IF NOT EXISTS cell (
              time timestamptz UNIQUE NOT NULL,
            signal int2 NOT NULL,
            cell_tech text NOT NULL);""")

    print("Ensuring cell data table is a timescaledb hypertable")
    db.run("""
            SELECT create_hypertable('cell', 'time', if_not_exists => TRUE,  
            migrate_data => TRUE);""")
Ejemplo n.º 10
0
def main(arguments: argparse.Namespace) -> ExitCode:
    logger.remove()
    logger.add(sys.stderr, level=arguments.logging_level)
    logger.debug("Called with {!r}", arguments)

    if not (arguments.postgres_connection_string
            and arguments.synapse_auth_token):
        logger.error(
            "Postgres connection string and Synapse auth token can not be empty"
        )
        return ExitCode.Failure

    try:
        db = postgres.Postgres(arguments.postgres_connection_string)
    except (psycopg2.OperationalError, psycopg2.InternalError):
        logger.exception(
            "Connecting to database using {!r} failed:",
            arguments.postgres_connection_string,
        )
        return ExitCode.Failure

    session = requests.session()
    session.headers["Authorization"] = "Bearer " + arguments.synapse_auth_token

    before_date = get_delta_date(int(arguments.delta))
    before_date_string = before_date.isoformat(sep=" ", timespec="seconds")
    logger.info("Purging events up to: {} UTC", before_date_string)

    # Purge room history for all rooms
    rooms = get_room_record_ids(db)
    room_count = len(rooms)
    for index, room_id in enumerate(rooms, start=1):
        logger.info("({}/{}) Processing room: {!r}...", index, room_count,
                    room_id)
        event_id = get_last_event_id(db, room_id, before_date)
        if event_id is None:
            logger.info(
                "No event ID before: {} UTC for room: {!r}, skipping",
                before_date_string,
                room_id,
            )
            continue

        logger.info(
            "Last event ID before: {} UTC for room {!r}: {!r}",
            before_date_string,
            room_id,
            event_id,
        )
        purge_id = purge_history(session, arguments.api_url, room_id, event_id)
        if not purge_id:
            logger.warning("Failed to purge room: {!r}: received no purge ID",
                           room_id)
            continue

        logger.info("Purging room: {!r} in progress: {!r}...", room_id,
                    purge_id)
        result = wait_for_purge(session, arguments.api_url, purge_id)
        logger.info("Purged room: {!r} with status: {!r}", room_id, result)

    logger.info("Purging local media older than: {} UTC...",
                before_date_string)
    local_media = get_local_media_record_ids(db, before_date)
    important_files = get_important_media_ids(db)

    # Purge local media manually
    old_media = set(local_media) - important_files
    old_media_count = len(old_media)
    logger.info("{} media to be cleaned...", len(old_media))
    for index, media_id in enumerate(old_media, start=1):
        logger.info("({}/{}) Processing media: {!r}...", index,
                    old_media_count, media_id)
        paths = get_local_media_paths(arguments.media_store, media_id)
        for path in paths:
            if not os.path.isfile(path):
                logger.debug("{!r} could not be found or is not a file", path)
                continue
            os.remove(path)

        delete_local_media_record(db, media_id)

    # Purge remote media
    logger.info("Purging cached remote media older than: {} UTC...",
                before_date_string)
    result = purge_remote_media(session, arguments.api_url, before_date)
    logger.info("Purged cached remote media: {!r}", result)
    return ExitCode.Success
Ejemplo n.º 11
0
import odo

import pycomic
imp.reload(pycomic)

'''Uploads from .csv to comics objects, then goes into db'''

# get list of comics objects from .csv

df1=pd.read_csv('~/Desktop/Comics/comics_db.csv')
df=df1.where((pd.notnull(df1)),None) # replace nan with Nones
library=pycomic.df_to_objects(df)

# initialize Database

db=postgres.Postgres('postgresql://*****:*****@localhost/test')

# DB table already created, you have been warned
#db.run('CREATE TABLE comics (title varchar(80), volume int, issue int, issue_end int, year int, arc varchar(80), publisher varchar(30), size real, comments varchar(120), read bool, story_rank real, art_rank bool, filename varchar(80), path varchar(120));')	

#db.run('CREATE TABLE stupid (x varchar(90), y INT);')

#db.run('COPY comics (title) FROM /Users/ksakamoto/Destop/Comics/comics_db.csv WITH csv')

fil='/Users/ksakamoto/Documents/comics.csv'
f=odo.resource(fil)
dshape=odo.discover(f)

t=odo.odo(fil, 'postgresql://*****:*****@localhost/test::comics', dshape=dshape)

Ejemplo n.º 12
0
    ';sban <UserID> <Reason (optional)> - Bans the given user without a message.\n'
    ';tempban <UserID> <Ban Length> <Reason (optional)> - Bans the given user with a message for the given amount of time. Ban length can be in hours or of the form "XwXdXh"\n'
    ';banstatus <UserID> - returns how long a tempbanned user will remain banned.\n'
    ';unban <UserID> - Unbans the given user.\n'
    ';locate <Regex> - Searches the user list for users with Names and Nicknames that match the given Regex. If the Regex given is a number, it also searches discriminators.\n'
    ';clear <MessageID (optional)> - Marks each message above this or the given message as read, until  hitting a marked message, or a message not sent by me.'
)

warninglist = []
removelist = []
mutelist = []
watchlist = {}
exceptionlist = []
mention_dict = loadMentions()
keywordsFile = loadKeywords()
db = postgres.Postgres(url=os.environ.get('DATABASE_URL'))
db.run("CREATE TABLE IF NOT EXISTS forbidden (words text)")
db.run("CREATE TABLE IF NOT EXISTS vile (words text)")
db.run("CREATE TABLE IF NOT EXISTS automute (words text)")
db.run("CREATE TABLE IF NOT EXISTS tempbans (id bigint PRIMARY KEY, time int)")
db.run(
    "CREATE TABLE IF NOT EXISTS usernotes (id bigint PRIMARY KEY, linkedact text, notes text)"
)
db.run("CREATE TABLE IF NOT EXISTS exempteds (id bigint)")

client = discord.Client()
mainServer = None
commandChn = None
reportChn = None
removeChn = None
logChn = None
Ejemplo n.º 13
0
    def initialize(self):

        self.args = None
        logger.debug("connecting to postgres")
        self.pg = postgres.Postgres()
Ejemplo n.º 14
0
    stations_file.close()

    # Записываем результат в файл
    os.system("mkdir ../out/stops_distance")
    geopandas.GeoDataFrame(features).to_file(
        "../out/stops_distance/stops_distance.geojson", driver='GeoJSON')


# ===========================
if __name__ == '__main__':

    print("Start", datetime.now())

    # Читаем параметры из файла параметров
    init()
    postgres = postgres.Postgres()

    #== Step 1: Загрузка данных по маршрутам и остановкам

    # # Step 1.1: Загрузка маршрутов и остановок
    # load_route_and_stations()

    # # Step 1.2: Выгрузка в формат geojson остановок и маршрутов
    # generate_stations_geojson()
    # generate_routes_geojson()

    # # Step 1.4: Загрузка данных в БД
    # postgres.upload_city()
    # postgres.upload_stations()
    # postgres.upload_routes()
    # postgres.upload_lnk_station_routes()
Ejemplo n.º 15
0
async def run(loop):

    print('Connecting to the database')
    connectionurl = 'postgresql://' + os.environ['db_user'] + ':' + os.environ['db_password'] + \
        '@postgres:' + os.environ['db_port'] + '/' + os.environ['db_database']

    # Try to connect to the DB. It may be starting up so we should try a few times
    # before failing. Currently trying every second 60 times
    tries = 0
    maxtries = 60
    sleeptime = 1
    db_connected = False
    while not db_connected:
        try:
            db = postgres.Postgres(url=connectionurl)
        except OperationalError as e:
            if tries < maxtries:
                print('Database connection attempt', tries,
                      'failed. Database may still be starting. Sleeping',
                      sleeptime, 's and trying again')
                print(e)
                tries = tries + 1
                time.sleep(sleeptime)
            else:
                print('FATAL: Could not connect to db after', tries,
                      'tries. Exiting')
                sys.exit(-1)
        else:
            print('DB successfully connected')
            db_connected = True

    # Sleep for 2 seconds to allows gps2tsdb to create the gps table
    time.sleep(5)

    # This should be replaced query that deletes all lines from the device 'fake gps' or whatever but until
    # gps device is recorded this is what we got
    print('Truncating GPS database')
    db.run('TRUNCATE gps;')

    print('Setting up NATS')
    # Connect to NATS server
    nc = NATS()
    await nc.connect('nats://nats:4222')

    # Notifies the non-callback function that we are finished
    # Yeah I know it's not pythonic. Open a PR smart guy
    fin = asyncio.Future()

    async def message_handler(msg):
        nats_gps_point = json.loads(msg.data.decode())
        nonlocal fin

        # Debugging
        #print('NATS type:', type(nats_gps_point), '\nmessage:', nats_gps_point)

        # Check if this is a control message signifying the end of the transmission
        if nats_gps_point == 'END':
            print('End control message received. No more gps points to check')
            fin.set_result(message_handler.points_received)
            return

        # Check for NAT's point
        print('Verifying that nats', nats_gps_point['time'],
              ' is in the database')
        rst = db.one('SELECT * FROM gps where time = %s;',
                     (nats_gps_point['time'], ))
        if rst is not None and len(rst) == 3 and rst.lat == nats_gps_point[
                'lat'] and rst.lng == nats_gps_point['lon']:
            print('nats timestamp', nats_gps_point['time'],
                  'successfully entered into the database')
        else:
            print('nats GPS point', nats_gps_point,
                  '\nwas not successfully entered into database!\ndb entry:',
                  rst)
            fin.set_result(-1)
            sys.exit(-1)

        # Counter just for fun
        if not hasattr(message_handler, 'points_received'):
            message_handler.points_received = 0  # it doesn't exist yet, so initialize it
        message_handler.points_received = message_handler.points_received + 1
        sys.stdout.flush()
        return

    print('setting up gps message callback')
    sid = await nc.subscribe('gps', cb=message_handler)
    print('gps message callback setup')
    sys.stdout.flush()

    await fin
    # If fin is a positive number, the test was successful and the number is the number of points
    # verified. If it is negative, the test failed
    if fin.result() > 0:
        print('End message received. Exiting')
        print('Successfully verified %d points' % fin.result())
    else:
        print('Test failed')
    await nc.close()
Ejemplo n.º 16
0
def run(s3_prefix, event_type, temp_schema, temp_columns, perm_schema, perm_columns, id_column="uid",
        before_import=nop, after_day=nop, after_import=nop, day_from=None, day_until=None):

    def drop_temporary_table():
        db.run(Q_DROP_TEMPORARY_TABLE.format(event_type=event_type))

    def create_events_tables():
        for rate in SAMPLE_RATES:
            db.run(Q_CREATE_EVENTS_TABLE.format(event_type=event_type,
                                                suffix=rate["suffix"],
                                                schema=perm_schema))

    def get_max_day():
        result = db.one(Q_GET_MAX_DAY.format(event_type=event_type))
        if result:
            return datetime.strftime(result, "%Y-%m-%d")
        return result

    def is_candidate_day(day):
        return (not day_from or day_from <= day) and (not day_until or day_until >= day)

    def is_day_populated(day):
        return bool(db.one(Q_CHECK_FOR_DAY.format(event_type=event_type, day=day)))

    def get_unpopulated_days():
        days = []
        message = "FINDING UNPOPULATED DAYS"
        if day_from:
            message += " FROM {day_from}".format(day_from=day_from)
        if day_until:
            message += " UNTIL {day_until}".format(day_until=day_until)
        print message
        for key in s3.list(prefix=s3_prefix):
            filename = path.basename(key.name)
            # Ignore the last four characters (".csv") then join the last
            # three parts ("YYYY-MM-DD") of the hyphen-split string.
            day = "-".join(filename[:-4].split("-")[-3:])
            if is_candidate_day(day) and not is_day_populated(day):
                days.append(day)
        return days

    def get_timestamp(which):
        return db.one(Q_GET_TIMESTAMP.format(which=which, event_type=event_type))

    def print_timestamp(which):
        print "  {which} timestamp".format(which=which), get_timestamp(which)

    def import_day(day):
        print day
        print "  COPYING CSV"
        db.run(Q_CREATE_CSV_TABLE.format(event_type=event_type, schema=temp_schema))
        s3_path = s3_uri.format(day=day)
        db.run(Q_COPY_CSV.format(event_type=event_type,
                                 columns=temp_columns,
                                 s3_path=s3_path,
                                 CREDENTIALS=CREDENTIALS))
        print_timestamp("MIN")
        print_timestamp("MAX")
        for rate in SAMPLE_RATES:
            print " ", TABLE_NAMES["perm"].format(event_type=event_type, suffix=rate["suffix"])
            print "    CLEARING"
            db.run(Q_CLEAR_DAY.format(event_type=event_type,
                                      suffix=rate["suffix"],
                                      day=day))
            print "    INSERTING"
            db.run(Q_INSERT_EVENTS.format(event_type=event_type,
                                          columns=perm_columns,
                                          id_column=id_column,
                                          suffix=rate["suffix"],
                                          percent=rate["percent"],
                                          day=day,
                                          max_day=max_day,
                                          months=rate["months"]))
        after_day(db, day,
                  TABLE_NAMES["temp"].format(event_type=event_type),
                  TABLE_NAMES["perm"].format(event_type=event_type, suffix="{suffix}"),
                  SAMPLE_RATES)
        drop_temporary_table()

    def expire_events():
        for rate in SAMPLE_RATES:
            table_name = TABLE_NAMES["perm"].format(event_type=event_type, suffix=rate["suffix"])
            print "EXPIRING", table_name, "FOR", max_day, "+", rate["months"], "MONTHS"
            db.run(Q_DELETE_EVENTS.format(event_type=event_type,
                                          suffix=rate["suffix"],
                                          day=max_day,
                                          months=rate["months"]))
            print "VACUUMING AND ANALYZING", table_name
            db.run(Q_VACUUM_TABLES.format(event_type=event_type,
                                          suffix=rate["suffix"]))

    s3 = boto.s3.connect_to_region("us-west-2").get_bucket(S3_BUCKET)
    db = postgres.Postgres(DB_URI)
    s3_uri = "s3://" + S3_BUCKET + "/" + s3_prefix + "-{day}.csv"

    before_import(db, SAMPLE_RATES)
    drop_temporary_table()
    create_events_tables()
    max_extant_day = get_max_day()
    if not day_from:
        day_from = max_extant_day
    unpopulated_days = get_unpopulated_days()
    unpopulated_days.sort(reverse=True)
    if max_extant_day > unpopulated_days[0]:
        max_day = max_extant_day
    else:
        max_day = unpopulated_days[0]
    print "FOUND", len(unpopulated_days), "DAYS"
    for day in unpopulated_days:
        import_day(day)
    expire_events()
    after_import(db, SAMPLE_RATES, max_day)
Ejemplo n.º 17
0
SMS_SEND_TEMPLATE = """<request>
    <authentication apikey="{}" />
    <data>
        <message>
            <sendername>CoronaKram</sendername>
            <text encoding="utf-8"><![CDATA[{}]]></text>
            <recipients>
                <msisdn>45{}</msisdn>
            </recipients>
        </message>
    </data>
</request>"""

db = postgres.Postgres("host={} user={} password={}".format(
    os.getenv("DB_HOST", "db"),
    os.getenv("DB_USER", "postgres"),
    os.getenv("DB_PASSWORD"),
))


class Receiver(BaseModel):
    phone_number: str
    timestamp: str = None


class Message(BaseModel):
    name: str = None
    text: str
    flag: bool = False
    receiver: str = None
    time: str = None