Beispiel #1
0
def fill_default_wake_word():
    query1 = 'insert into device.wake_word (' \
             'id,' \
             'setting_name,' \
             'display_name,' \
             'engine)' \
             'values (%s, %s, %s, %s)'
    query2 = 'insert into device.wake_word_settings(' \
             'wake_word_id,' \
             'sample_rate,' \
             'channels,' \
             'pronunciation,' \
             'threshold,' \
             'threshold_multiplier,' \
             'dynamic_energy_ratio)' \
             'values (%s, %s, %s, %s, %s, %s, %s)'
    wake_words = [
        (hey_mycroft, 'Hey Mycroft', 'Hey Mycroft', 'precise'),
        (christopher, 'Christopher', 'Christopher', 'precise'),
        (ezra, 'Hey Ezra', 'Hey Ezra', 'precise'),
        (jarvis, 'Hey Jarvis', 'Hey Jarvis', 'precise')
    ]
    wake_word_settings = [
        (hey_mycroft, '16000', '1', 'HH EY . M AY K R AO F T', '1e-90', '1', '1.5'),
        (christopher, '16000', '1', 'K R IH S T AH F ER .', '1e-25', '1', '1.5'),
        (ezra, '16000', '1', 'HH EY . EH Z R AH', '1e-10', '1', '2.5'),
        (jarvis, '16000', '1', 'HH EY . JH AA R V AH S', '1e-25', '1', '1.5')
    ]
    with db.cursor() as cur:
        execute_batch(cur, query1, wake_words)
        execute_batch(cur, query2, wake_word_settings)
Beispiel #2
0
def fill_subscription_table():
    query = 'insert into account.account_membership(' \
            'account_id, ' \
            'membership_id, ' \
            'membership_ts_range, ' \
            'payment_account_id,' \
            'payment_method,' \
            'payment_id) ' \
            'values (%s, %s, %s, %s, %s, %s)'

    def map_subscription(user_uuid):
        subscr = subscription[user_uuid]
        stripe_customer_id = subscr['stripe_customer_id']
        start = format_timestamp(subscr['last_payment_ts'])
        subscription_ts_range = '[{},)'.format(start)
        subscription_type = subscr['type']
        if subscription_type == 'MonthlyAccount':
            subscription_type = 'month'
        elif subscription_type == 'YearlyAccount':
            subscription_type = 'year'
        subscription_uuid = get_subscription_uuid(subscription_type)
        return user_uuid, subscription_uuid, subscription_ts_range, stripe_customer_id, 'Stripe', 'subscription_id'
    with db.cursor() as cur:
        account_subscriptions = (map_subscription(user_uuid) for user_uuid in subscription)
        execute_batch(cur, query, account_subscriptions, page_size=1000)
 def search_hmd(self,f):
     h=hmd()
     h.read_hmd(f)
     with self.con:
         labs=[{'xsp':xsp,'sec':sec.vals['LABEL'],'hmd':f} for sec in h.sects.values() for xsp in sec.xsp_vals()]#dict for each sec+xsp in hmd
         #array_append(null,val) gives null. added not null constraint to hmds col.
         execute_batch(self.cur,'update gtest.requested set hmds=array_append(hmds,%(hmd)s) where sec=%(sec)s and xsp=%(xsp)s and not %(hmd)s=any(hmds);',labs)
Beispiel #4
0
def csv_to_db():

    with open('books.csv', newline='') as f:
        filereader = csv.reader(f)
        data = [tuple(row) for row in filereader]

    try:
        conn = psycopg2.connect(
            dbname='djhp6mrfhj0rf',
            user='******',
            password=
            '******',
            host='ec2-54-217-235-87.eu-west-1.compute.amazonaws.com',
            port='5432')

        cur = conn.cursor()

        sql_query = """INSERT INTO books(isbn, title, author, year)
                       VALUES (%s, %s, %s, %s)"""

        execute_batch(cur, sql_query, data)
        conn.commit()
        print("Table created successfully...")

    except (Exception, psycopg2.Error) as error:
        print("Error has occured: ", error)
    finally:
        if (conn):
            cur.close()
            conn.close()
            print("Connection closed")

    return data
Beispiel #5
0
 def _add_subgenre(self, subgenre):
     query = """
         INSERT INTO subgenre (name) VALUES (%s) ON CONFLICT DO NOTHING
         """
     with self._db_conn.cursor() as cursor:
         execute_batch(cursor, query, subgenre)
     self._db_conn.commit()
Beispiel #6
0
def write_points_timescale(n):
    initial_time = time.time()
    empresas = ['fcetina', 'megabit', 'megabit2']
    db_conn = psycopg2.connect(dbname='trafico-wisphub',
                               user='******',
                               host='3.132.97.208',
                               password='******')
    cursor = db_conn.cursor()
    insert_query = "INSERT INTO ip_visited (time, id_empresa, empresa, cliente_id, ip_visited, download) VALUES (%s, %s, %s, %s, %s, %s)"
    my_data = []
    for i in range(n):
        ip_random = socket.inet_ntoa(
            struct.pack('>I', random.randint(1, 0xffffffff)))
        dowload_bits = random.randrange(20000)
        upload_bits = random.randrange(1000)
        cliente_id = random.randrange(1000, 1100)
        empresa_id = random.randrange(1, 3)
        empresa_slug = empresas[empresa_id - 1]

        record_to_insert = ("now()", empresa_id, empresa_slug, cliente_id,
                            ip_random, dowload_bits)
        my_data.append(record_to_insert)

    # cursor.executemany(insert_query, my_data)
    extras.execute_batch(cursor, insert_query, my_data, 10000)
    db_conn.commit()
    cursor.close()
    db_conn.close()
    end_time = time.time()
    time_elapsed = end_time - initial_time

    print "Se escribieron {0} puntos en {1} s".format(str(n), time_elapsed)
Beispiel #7
0
    def run(self):

        cur = self.con.cursor()

        try:

            with self.con:  #with makes con commit here

                for i, v in enumerate(self.queries):

                    if self.isCanceled():

                        return False

                    execute_batch(self.cur, v, self.args[i])

                    self.setProgress(
                        100 * float(i) / len(self.queries)
                    )  #setProgress takes float from 0 to 100 and emits progressChanged signal

            return True

        except Exception as e:

            self.err = e

            return False
Beispiel #8
0
    def execute_sql(self,
                    sql,
                    args=None,
                    commit=False,
                    fetch=False,
                    batch_insert=False):

        db_conn = self.get_db_connection()
        cursor = db_conn.cursor()

        if batch_insert:
            ext.execute_batch(cursor, sql, args)
        else:
            if args:
                cursor.execute(sql, args)
            else:
                cursor.execute(sql)

        retval = None
        if fetch:
            retval = cursor.fetchall()

        if commit:
            db_conn.commit()

        cursor.close()
        db_conn.close()

        return retval
Beispiel #9
0
def upsert(city, resource_type, resources_info, google_sheet_id):

    try:

        city = city.strip().lower()
        resource_type = resource_type.strip().lower()

        connection = get_connection()
        cursor = connection.cursor()

        delete(city, resource_type=resource_type, cursor=cursor)

        database_values = []
        insert_query = f"INSERT INTO {RESOURCE_TABLE_NAME} (city, resource_type, google_sheet_id, location, raw_obj) VALUES (%s, %s, %s, ST_MakePoint(%s, %s), %s)"
        for resource_info in resources_info:

            latitude, longitude = get_lat_long(resource_info)

            database_values.append((city, resource_type, google_sheet_id,
                                    longitude, latitude, resource_info))

        execute_batch(cursor, insert_query, database_values)

        connection.commit()

    finally:

        if cursor is not None:
            cursor.close()
Beispiel #10
0
    def read(self, filename):
        from numpy import ndarray
        from psycopg2.extras import execute_batch, Json
        from ase.io import read

        def info(frame):
            frame_info = frame.info.copy()
            frame_info['total_energy'] = frame.get_total_energy()

            return {
                k: Json({
                    kk: (vv.tolist() if isinstance(vv, ndarray) else vv)
                    for kk, vv in v.items()
                })
                for k, v in {
                    'info': frame_info,
                    'atom': frame.arrays
                }.items()
            }

        frames = [info(f) for f in read(filename, ':')]
        with self.db, self.db.cursor() as cursor:
            execute_batch(
                cursor,
                "insert into frame_raw (info, atom) values (%(info)s, %(atom)s)",
                frames,
                page_size=500)
        return len(frames)
Beispiel #11
0
 def _insertHRSample(sqlcmd1, sqlcmd2, signals):
     """
     Insert HR into database
     :param sqlcmd1:
     :param sqlcmd2:
     :param signals:
     :return:
     """
     logger.warn('fxn _insert_sample')
     for signal in signals:
         _sqlcmd1 = sqlcmd1.format(a, signal[0], datetime.now())
         try:
             conn = psycopg2.connect(host=postgres_config['host'],
                                     database=postgres_config['database'],
                                     port=postgres_config['port'],
                                     user=postgres_config['user'],
                                     password=postgres_config['password'])
             cur = conn.cursor()
             logger.warn(_sqlcmd1)
             cur.execute(_sqlcmd1)
             extras.execute_batch(cur, sqlcmd2, signal[1])
             cur.execute("DEALLOCATE inserts")
             conn.commit()
             cur.close()
             conn.close()
         except Exception as e:
             logger.warn('Exception %s' % e)
Beispiel #12
0
 def save_projects(self, projects: List[Project]):
     with self._conn, self._conn.cursor() as cursor:
         values = [(p.name.name, p.downloads.value) for p in projects]
         execute_batch(
             cursor,
             "INSERT INTO projects(name, downloads) VALUES (%s, %s) ON CONFLICT DO NOTHING",
             values)
Beispiel #13
0
def process_song_file(cur, filepath):
    """Parse and insert relevant song data from song files into the database.

    Arguments:
    cur -- database cursor used to insert data
    filepath -- path to JSON file containing song data
    """
    # open song file
    df = pd.read_json(filepath, lines=True)

    # efficiently insert song records in batchs by minimizing server round-trips
    song_data = [
        tuple(x) for x in df[
            ["song_id", "title", "artist_id", "year", "duration"]].values
    ]
    execute_batch(cur, song_table_insert, song_data)

    # efficiently insert artist records in batchs by minimizing server round-trips
    artist_data = [
        tuple(x) for x in df[[
            "artist_id", "artist_name", "artist_location", "artist_latitude",
            "artist_longitude"
        ]].values
    ]
    execute_batch(cur, artist_table_insert, artist_data)
Beispiel #14
0
def fill_wake_word_settings_table():
    query = 'insert into device.wake_word_settings(' \
            'wake_word_id,' \
            'sample_rate,' \
            'channels,' \
            'pronunciation,' \
            'threshold,' \
            'threshold_multiplier,' \
            'dynamic_energy_ratio)' \
            'values (%s, %s, %s, %s, %s, %s, %s)'

    def map_wake_word_settings(user_uuid):
        user_setting = user_settings[user_uuid]
        wake_word_id = users[user_uuid]['wake_word_id']
        sample_rate = user_setting['sample_rate']
        channels = user_setting['channels']
        pronunciation = user_setting['pronunciation']
        threshold = user_setting['threshold']
        threshold_multiplier = user_setting['threshold_multiplier']
        dynamic_energy_ratio = user_setting['dynamic_energy_ratio']
        return wake_word_id, sample_rate, channels, pronunciation, threshold, threshold_multiplier, dynamic_energy_ratio
    with db.cursor() as cur:
        account_wake_word_settings = (map_wake_word_settings(user_uuid) for user_uuid in users if user_uuid in user_settings)
        account_wake_word_settings = (wks for wks in account_wake_word_settings if wks[0] not in (hey_mycroft, christopher, ezra, jarvis))
        execute_batch(cur, query, account_wake_word_settings, page_size=1000)
Beispiel #15
0
    def save_to_database(self, songs):
        sql_song = "INSERT INTO songs_info(title,\
                      artist, source) VALUES \
                      (%s,%s,%s)"
        sql_fp = "INSERT INTO fingerprints(song_id, \
                    pair_key, pair_value) VALUES\
                    (%s, %s, %s)"
        
        try:
            conn = psycopg2.connect(host=self.host, dbname=self.database, \
                                    user=self.user, password=self.password)
            cur = conn.cursor()
            for song in songs:
                # insert song's info the the songs_info table
                cur.execute(sql_song, (song.title, song.artist, song.path))
                # retrieve the added song's id 
                cur.execute("SELECT LASTVAL();")
                last_id = cur.fetchone()[0]
               
                # insert the hash key and values of the pairs generated from the 
                # constellation map
                hash_values = song.hash_values
                fingerprints = [(last_id, "({},{}):{}".format(h[0], h[1], h[2]),\
                                h[3]) for h in hash_values]
                # execute groups of statements in fewer server roundtrips.
                # for faster insertion speed
                # cur.executemany(sql_fp, fingerprints)
                execute_batch(cur, sql_fp, fingerprints)
                conn.commit()
            cur.close()
            conn.close()

        except(Exception, psycopg2.DatabaseError) as error:
            print(error)
Beispiel #16
0
    def fill_commit_parents(self, commit_info_list, autocommit=True):
        '''
		Creating table if necessary.
		Filling commit parenthood in table.
		'''

        tracked_data = {'latest_commit_time': 0, 'empty': True}

        def transformed_list(orig_gen):
            for c in orig_gen:
                tracked_data['last_commit'] = c
                tracked_data['empty'] = False
                tracked_data['latest_commit_time'] = max(
                    tracked_data['latest_commit_time'], c['time'])
                c_id = c['sha']
                for r, p_id in enumerate(c['parents']):
                    yield (c_id, p_id, r)

        if self.db.db_type == 'postgres':
            extras.execute_batch(
                self.db.cursor, '''
				INSERT INTO commit_parents(child_id,parent_id,rank)
					VALUES(
							(SELECT id FROM commits WHERE sha=%s),
							(SELECT id FROM commits WHERE sha=%s),
							%s)
				ON CONFLICT DO NOTHING;
				''', transformed_list(commit_info_list))

        else:
            self.db.cursor.executemany(
                '''
				INSERT OR IGNORE INTO commit_parents(child_id,parent_id,rank)
					VALUES(
							(SELECT id FROM commits WHERE sha=?),
							(SELECT id FROM commits WHERE sha=?),
							?);
				''', transformed_list(commit_info_list))

        if not tracked_data['empty']:
            repo_id = tracked_data['last_commit']['repo_id']
            latest_commit_time = datetime.datetime.fromtimestamp(
                tracked_data['latest_commit_time'])
            if self.db.db_type == 'postgres':
                self.db.cursor.execute(
                    '''INSERT INTO table_updates(repo_id,table_name,latest_commit_time) VALUES(%s,'commit_parents',%s) ;''',
                    (repo_id, latest_commit_time))
                self.db.cursor.execute(
                    '''UPDATE repositories SET latest_commit_time=%s WHERE id=%s;''',
                    (latest_commit_time, repo_id))
            else:
                self.db.cursor.execute(
                    '''INSERT INTO table_updates(repo_id,table_name,latest_commit_time) VALUES(?,'commit_parents',?) ;''',
                    (repo_id, latest_commit_time))
                self.db.cursor.execute(
                    '''UPDATE repositories SET latest_commit_time=? WHERE id=?;''',
                    (latest_commit_time, repo_id))

        if autocommit:
            self.db.connection.commit()
Beispiel #17
0
def load_to_staging(parsed_rows, table, print_only=False, local=True):
    """ loads parsed rows from ODS into Aurora db """

    with open(os.path.join(config["sql"]["staged"], f'{table}.sql'),
              'r',
              encoding='utf') as f:
        query = f.read()
        if print_only:
            print(query)
            return

    cred = config['destination']
    conn = psycopg2.connect(dbname=cred['dbname'],
                            user=cred['username'],
                            password=cred['password'],
                            host=cred['host'],
                            port=cred['port'])

    schema = config['sql']['schema']

    print(f'Inserting data into {schema}.{table}')
    with conn.cursor() as cursor:
        delete_command = f'DELETE FROM {schema}."{table}"'

        cursor.execute(delete_command)
        try:
            extras.execute_batch(cursor, query, parsed_rows)
        except:
            print(cursor.query)
            raise

    print(f'Done inserting into {schema}.{table}')
    conn.commit()
    conn.close()
Beispiel #18
0
def consume(cursor, kafka_consumer: KafkaConsumer, table: str, batch_size: int)->None:
    """Poll kafka_counsumer continously and write to DB

    Args:
        cursor: A psycopg2 cursor previously initialized
        kafka_consumer (kafka.KafkaConsumer): A previously intialized KafkaConsumer obj.

    """
    while True:
        try:
            extras.execute_batch(cursor,
                f"""
                    INSERT INTO {table}(address, latency, is_valid, status_code)
                    VALUES(
                        %(address)s,
                        %(latency)s,
                        %(is_valid)s,
                        %(status_code)s
                    )
                """,
                message_iter(kafka_consumer),
                page_size=batch_size
            )
            kafka_consumer.commit()
        except psycopg2.DataError as ex:
            logger.error(ex)
def add_tracks(client, cursor, tracks, user_id=None, ratings=False):
    """Adds a list of tracks to the database

    :param client: A Spotipy client that is authenticated
    :param cursor: Postgres database connection that tracks should be added to
    :param tracks: List of dictionaries with track information to add to the
        database
    :param user_id: The user ID if ratings are being added for a user
    :param ratings: Default False, if True the order of the tracks in the list
        will be added as a ranking for the current user
    :return: None
    """
    if not tracks:
        return None
    execute_batch(
        cursor, """
        INSERT INTO tracks(id, name, popularity)
        VALUES(%(id)s, %(name)s, %(popularity)s)
        ON CONFLICT (id)
            DO UPDATE
            SET popularity = %(popularity)s
        """, tracks)
    add_audio_features(client, cursor, tracks)
    if ratings:
        add_ratings(cursor, tracks, user_id)
Beispiel #20
0
    def insert_stars(self, stars_list, commit=True, db=None):
        '''
		Inserts starring events.
		commit defines the behavior at the end, commit of the transaction or not. Committing externally allows to do it only when all stars for a repo have been added
		'''
        if db is None:
            db = self.db
        if db.db_type == 'postgres':
            extras.execute_batch(
                db.cursor, '''
				INSERT INTO stars(starred_at,login,repo_id,identity_type_id,identity_id)
				VALUES(%s,
						%s,
						%s,
						(SELECT id FROM identity_types WHERE name='github_login'),
						(SELECT id FROM identities WHERE identity=%s AND identity_type_id=(SELECT id FROM identity_types WHERE name='github_login'))
					)
				ON CONFLICT DO NOTHING
				;''', ((s['starred_at'], s['login'], s['repo_id'], s['login'])
            for s in stars_list))
        else:
            db.cursor.executemany(
                '''
					INSERT OR IGNORE INTO stars(starred_at,login,repo_id,identity_type_id,identity_id)
					VALUES(?,
							?,
							?,
							(SELECT id FROM identity_types WHERE name='github_login'),
							(SELECT id FROM identities WHERE identity=? AND identity_type_id=(SELECT id FROM identity_types WHERE name='github_login'))
						);''', ((s['starred_at'], s['login'], s['repo_id'], s['login'])
              for s in stars_list))

        if commit:
            db.connection.commit()
Beispiel #21
0
def person_gen():
    cur.execute('SELECT id FROM film_work')
    film_works_ids = []
    for data in cur.fetchall():
        film_works_ids.append(data[0])

    persons_ids = [str(uuid.uuid4()) for _ in range(600_000)]

    print("insert persons")
    execute_batch(cur,
                  "INSERT INTO person (id) VALUES (%s)",
                  [(i, ) for i in persons_ids],
                  page_size=5_000)
    conn.commit()
    print("persons has been inserted")
    film_work_person_data = []

    for film_work_id in film_works_ids:
        for person_id in random.sample(persons_ids, 5):
            film_work_person_data.append(
                (str(uuid.uuid4()), film_work_id, person_id), )

    print("insert relations")
    execute_batch(
        cur,
        "INSERT INTO person_film_work (id, film_work_id, person_id) VALUES (%s, %s, %s)",
        film_work_person_data,
        page_size=5000)
    conn.commit()
    cur.close()
    conn.close()
Beispiel #22
0
def execute_batch(conn, df, table, page_size=100):
    """ Queries database to get new user data to predict on.
    Args:
        conn (psycopg2 Connection): An open connection to a PostgreSQL database
        df (Pandas Dataframe): The Pandas DataFrame to load into the PostgreSQL database where the columns
                               match and conform to the data in create_table.sql
        table (Str): The table name to store the data in the PostgreSQL database
        page_size (Int, default 100): The number of rows to include when updating the table.

    Returns:
        X (Pandas DataFrame): The predictor (X) columns with the object columns converted to integers 
                              using ordinal encoding.
        churn_data.df (Pandas DataFrame): The original dataframe of predictors without any conversions
        churn_data.target (Series of Bools): The target (y) column of whether or not a user has churned
    """
    # Code example adapted from:
    # https://naysan.ca/2020/05/09/pandas-to-postgresql-using-psycopg2-bulk-insert-performance-benchmark/

    tuples = [tuple(x) for x in df.to_numpy()]
    cols = ','.join(list(df.columns))
    query = """INSERT INTO %s(%s) VALUES(%%s,%%s,%%s,%%s,%%s,%%s,%%s,%%s,%%s,%%s
                                         ,%%s,%%s,%%s,%%s,%%s,%%s,%%s,%%s,%%s,%%s
                                         ,%%s,%%s,%%s,%%s,%%s)""" % (table,
                                                                     cols)
    cursor = conn.cursor()
    try:
        extras.execute_batch(cursor, query, tuples, page_size)
        conn.commit()
    except (Exception, pg2.DatabaseError) as error:
        print("Error: %s" % error)
        conn.rollback()
        cursor.close()
        return 1
    cursor.close()
    def save(self, messages: List[Message]):
        """
        Save the messages to the data store. The messages must all be of the same message type.

        :param List[Message] messages: a list of Message objects
        :return: None
        """
        if messages:
            statement = Message.insert_statement()
            try:
                with self.connection as conn:
                    with conn.cursor() as cursor:

                        all_messages = [{
                            **vars(message)
                        } for message in messages]

                        try:
                            logger.info(
                                f"Inserting {len(all_messages)} messages. "
                                f"(from: {all_messages[0]['device_timestamp'].isoformat()} "
                                f"to: {all_messages[-1]['device_timestamp'].isoformat()})"
                            )
                            logger.debug(pformat(all_messages))
                            execute_batch(cursor, statement, all_messages)
                        except Exception as e:
                            logger.warning(
                                "Unable to insert row, checking to ensure table exists."
                            )
                            if not self.table_exists():
                                self.create_table()
                                execute_batch(cursor, statement, all_messages)
            except Exception as e:
                raise StorageError(
                    f"Unable to store messages [{messages}]") from e
Beispiel #24
0
def execute_in_db(query,
                  return_results=False,
                  return_first_only=False,
                  args=None,
                  batch_insert=False):
    conn, cursor = get_db()

    if args and batch_insert:
        ext.execute_batch(cursor, query, args)
    elif args:
        cursor.execute(query, args)
    else:
        cursor.execute(query)

    if return_first_only:
        results = [row[0] for row in cursor.fetchall()]
    elif return_results:
        results = cursor.fetchall()
    else:
        results = None

    conn.commit()

    cursor.close()
    conn.close()

    return results
Beispiel #25
0
def populate_actor(conn, *, count=500):
    print('populate: `actor`...', end=' ', flush=True)
    actors = range(count)

    # keep things simple here
    first_names = (f'actor {i} first' for i in actors)
    last_names = (f'actor {i} last' for i in actors)
    # 	infos = (f'some info about actor {i}' for i in actors) # wound up not including this
    genders = np.random.choice(['m', 'f'], size=count, replace=True).tolist()
    ages = np.random.randint(20, 81, count).tolist()

    actor_insert_list = list(zip(first_names, last_names, genders, ages))

    del ages, genders
    with conn.cursor() as cur:
        try:
            execute_batch(
                cur, """
			INSERT INTO actor
				(first_name, last_name, gender, age)
			VALUES (%s, %s, %s, %s);
			""", actor_insert_list)
            conn.commit()
            printc('g', f'successfully inserted {count} actors')
        except Exception as e:
            print('populate actor: insert actors: exception occurred:',
                  repr(e))
            conn.rollback()
Beispiel #26
0
def populate_director(conn, *, count=500):
    print('populate: `director`...', end=' ', flush=True)
    directors = range(count)

    # keep things simple here
    first_names = (f'director {i} first' for i in directors)
    last_names = (f'director {i} last' for i in directors)
    ages = np.random.randint(30, 81, count).tolist()
    director_insert_list = list(zip(first_names, last_names, ages))

    del ages

    with conn.cursor() as cur:
        try:
            execute_batch(
                cur, """
			INSERT INTO director
				(first_name, last_name, age)
			VALUES (%s, %s, %s);
			""", director_insert_list)
            conn.commit()
            printc('g', f'successfully inserted {count} directors')
        except Exception as e:
            print('populate director: insert directors: exception occurred:',
                  repr(e))
            conn.rollback()
Beispiel #27
0
def insert_stg(file_input, query_input):
    conn = con.connect_db().postgres
    cursor = conn.cursor()
    try:
        file_path = './output_file/{}'.format(file_input)
        data_parquet = pd.read_parquet(file_path, engine='pyarrow')
        sql = pd.read_sql_query(
            '''select max(num_exec) num_exec from aux.log_control where ind_status  <> 'success';''',
            conn)
        data_parquet['num_exec'] = sql['num_exec'][0]
        print('inserting file {}'.format(file_input))
        log.logger.info('inserting file {}'.format(file_input))
        query_insert = query_input.format(
            columns=(', '.join('"' + item + '"'
                               for item in data_parquet.columns)),
            values=', '.join(
                ['%s' for i in range(0, len(data_parquet.columns))]))
        try:
            pse.execute_batch(cursor,
                              query_insert,
                              data_parquet.loc[:].values,
                              page_size=10000)
            conn.commit()
            print('inserted <{}> rows'.format(data_parquet.shape[0]))
            log.logger.info('inserted <{}> rows'.format(data_parquet.shape[0]))
        except Exception as erro:
            log.logger.error(erro)
            log.logger.error(list(data_parquet.values))
            sys.exit()
    except IOError as erro:
        log.logger.error(erro)
        conn.close()
    conn.close()
Beispiel #28
0
 def extractManufacturingOrders(self):
     """
 Export manufacturing orders from frePPle.
 We export:
   - approved manufacturing orders.
   - proposed manufacturing orders that start within the next day.
 """
     print("Start exporting manufacturing orders")
     self.cursor_frepple.execute("""
   select
     item_id, location_id, operation_id, quantity, startdate, enddate
   from operationplan
   where type = 'MO'
     and (
       status = 'approved'
       or (status = 'proposed' and startdate < now() + interval '1 day')
       )
   order by operation_id
   """)
     output = [i for i in self.cursor_frepple.fetchall()]
     execute_batch(
         self.cursor_erp,
         """
         insert into test
         (item, location, location2, quantity, startdate, enddate)
         values (%s, %s, %s, %s, %s, %s)
         """,
         output,
     )
Beispiel #29
0
 def extractDistributionOrders(self):
     """
     Export distribution orders from frePPle.
     We export:
       - approved distribution orders.
       - proposed distribution orders that start within the next day and with a total cost less than 500$.
     """
     print("Start exporting distribution orders")
     self.cursor_frepple.execute("""
   select
     item_id, destination_id, origin_id, quantity, startdate, enddate
   from operationplan
   inner join item on item_id = item.name
   where type = 'DO'
     and (
       status = 'approved' 
       or (status = 'proposed' and quantity * cost < 500 and startdate < now() + interval '1 day')
       )
   order by origin_id, destination_id
   """)
     output = [i for i in self.cursor_frepple.fetchall()]
     execute_batch(
         self.cursor_erp,
         """
         insert into test
         (item, location, location2, quantity, startdate, enddate)
         values (%s, %s, %s, %s, %s, %s)
         """,
         output,
     )
Beispiel #30
0
    def exportPegging(self):
        def getDemandPlan():
            for i in frepple.demands():
                if self.cluster != -1 and self.cluster != i.cluster:
                    continue
                if i.hidden or not isinstance(i, frepple.demand_default):
                    continue
                peg = []
                for j in i.pegging:
                    peg.append(
                        {
                            "level": j.level,
                            "opplan": j.operationplan.reference,
                            "quantity": j.quantity,
                        }
                    )
                yield (json.dumps({"pegging": peg}), i.name)

        logger.info("Exporting demand pegging...")
        starttime = time()
        with transaction.atomic(using=self.database, savepoint=False):
            cursor = connections[self.database].cursor()
            execute_batch(
                cursor,
                "update demand set plan=%s where name=%s",
                getDemandPlan(),
                page_size=200,
            )
        logger.info("Exported demand pegging in %.2f seconds" % (time() - starttime))
Beispiel #31
0
    def rotate_stored_data(self, wait=True):
        """Re-encrypt all the sensitive information stored in our database.

        This function is a special kind of "cron job" that returns one of two
        constants from the `liberapay.cron` module: `CRON_ENCORE`, indicating
        that the function needs to be run again to continue its work, or
        `CRON_STOP`, indicating that all the ciphertexts are up-to-date (or that
        it isn't time to rotate yet).

        Rows are processed in batches of 50. Timestamps are used to keep track of
        progress and to avoid overwriting new data with re-encrypted old data.

        The update only starts one week after the new key was generated, unless
        `wait` is set to `False`. This delay is to "ensure" that the previous
        key is no longer being used to encrypt new data.
        """
        update_start = self.fernet_rotation_start + self.KEY_ROTATION_DELAY
        if wait:
            if utcnow().date() < update_start:
                return CRON_STOP

        with website.db.get_cursor() as cursor:
            batch = cursor.all("""
                SELECT id, info
                  FROM identities
                 WHERE (info).ts <= %s
              ORDER BY (info).ts ASC
                 LIMIT 50
            """, (update_start,))
            if not batch:
                return CRON_STOP

            sql = """
                UPDATE identities
                   SET info = ('fernet', %s, current_timestamp)::encrypted
                 WHERE id = %s
                   AND (info).ts = %s;
            """
            args_list = [
                (self.rotate_message(r.info.payload), r.id, r.info.ts)
                for r in batch
            ]
            execute_batch(cursor, sql, args_list)

        return CRON_ENCORE
Beispiel #32
0
 def do_executemany(self, cursor, statement, parameters, context=None):
     if self.psycopg2_batch_mode:
         extras = self._psycopg2_extras()
         extras.execute_batch(cursor, statement, parameters)
     else:
         cursor.executemany(statement, parameters)