Example #1
0
def apify(filename, tablename=None):
    tablename = tablename or utils.get_name(filename)
    logger.info('Importing {0} to table {1}'.format(filename, tablename))
    utils.drop_table(tablename)
    utils.load_table(filename, tablename)
    utils.index_table(tablename, config.CASE_INSENSITIVE)
    logger.info('Finished importing {0}'.format(filename))
Example #2
0
def apify(filename, tablename=None):
    tablename = tablename or utils.get_name(filename)
    logger.info('Importing {0} to table {1}'.format(filename, tablename))
    utils.drop_table(tablename)
    utils.load_table(filename, tablename)
    utils.index_table(tablename, config.CASE_INSENSITIVE)
    logger.info('Finished importing {0}'.format(filename))
Example #3
0
 def handle_notification(self, data):
     client = boto3.client('s3')
     message = json.loads(data['Message'])
     for record in message.get('Records', []):
         bucket = record['s3']['bucket']
         key = record['s3']['object']
         path = urllib.parse.unquote_plus(key['key'])
         name, ext = os.path.splitext(path)
         if ext.lstrip('.') not in csvkit.convert.SUPPORTED_FORMATS:
             continue
         if record['eventName'].startswith('ObjectCreated'):
             fetch_key(client, bucket['name'], path)
         elif record['eventName'].startswith('ObjectRemoved'):
             utils.drop_table(name, metadata=db.metadata, engine=db.engine)
Example #4
0
 def handle_notification(self, data):
     client = boto3.client('s3')
     message = json.loads(data['Message'])
     for record in message.get('Records', []):
         bucket = record['s3']['bucket']
         key = record['s3']['object']
         path = urllib.parse.unquote_plus(key['key'])
         name, ext = os.path.splitext(path)
         name = name.replace('/', '-')
         if ext.lstrip('.') not in csvkit.convert.SUPPORTED_FORMATS:
             continue
         if record['eventName'].startswith('ObjectCreated'):
             fetch_key(client, bucket['name'], path)
         elif record['eventName'].startswith('ObjectRemoved'):
             utils.drop_table(name, metadata=db.metadata, engine=db.engine)
Example #5
0
File: tasks.py Project: 18F/autoapi
def apify(filename, tablename=None):
    try:
        filenames = glob.glob(filename, recursive=True)
    except TypeError:  # recursive glob in Python 3.5+ only
        filenames = glob.glob(filename)
    if len(filenames) > 1 and tablename:
        raise Exception("Can't specify a `tablename` for >1 file")
    for filename in filenames:
        _tablename = tablename or utils.get_name(filename)
        logger.info('Importing {0} to table {1}'.format(filename, _tablename))
        try:
            utils.drop_table(_tablename)
        except sa.exc.OperationalError as e:
            logger.debug('DROP TABLE {} failed, may not exist?'.format(
                _tablename))
            logger.debug(str(e))
        try:
            utils.load_table(filename, _tablename)
        except Exception as e:
            logger.error('Failed to load table from file {}'.format(filename))
            logger.error(str(e))
        logger.info('Finished importing {0}'.format(filename))
Example #6
0
def apify(filename, tablename=None):
    try:
        filenames = glob.glob(filename, recursive=True)
    except TypeError:  # recursive glob in Python 3.5+ only
        filenames = glob.glob(filename)
    if len(filenames) > 1 and tablename:
        raise Exception("Can't specify a `tablename` for >1 file")
    for filename in filenames:
        _tablename = tablename or utils.get_name(filename)
        logger.info('Importing {0} to table {1}'.format(filename, _tablename))
        try:
            utils.drop_table(_tablename)
        except sa.exc.OperationalError as e:
            logger.debug(
                'DROP TABLE {} failed, may not exist?'.format(_tablename))
            logger.debug(str(e))
        try:
            utils.load_table(filename, _tablename)
        except Exception as e:
            logger.error('Failed to load table from file {}'.format(filename))
            logger.error(str(e))
        logger.info('Finished importing {0}'.format(filename))
                              int(date_end[8:10]))

table_name = "action_20161001_20161231"

par = 20

# In[2]:

print(block_start)
print(block_end)

# In[3]:

recreate = True
if recreate:
    drop_table('action_20161001_20161231')
    create_action_table('action_20161001_20161231')

# In[4]:


def parse_action_create(dict_obj):
    if 'error' in dict_obj:
        return None
    directive = dict_obj['type']
    tx = dict_obj['transactionHash']
    block_num = dict_obj['blockNumber']
    tx_seq = dict_obj['transactionPosition']
    act_seq = 0
    dict_action = dict_obj['action']
    dict_result = dict_obj['result']
def create_txs_table(table_name):
    create_txs_sql = """CREATE TABLE {} ( `id` int(11) NOT NULL AUTO_INCREMENT,
                `source` char(42) NOT NULL, `target` char(42) NOT NULL, `amount` varchar(32) NOT NULL, 
                `tx` char(66) NOT NULL, `block_num` int(11) NOT NULL, `tx_seq` int(11) NOT NULL, 
                PRIMARY KEY (`id`), KEY `block_num_index` (`block_num`), 
                FULLTEXT `target_index` (`target`), FULLTEXT `source_index` (`source`), 
                FULLTEXT `tx_index` (`tx`) ) ENGINE=InnoDB""".format(
        table_name)
    exeSQL(create_txs_sql, True)


# In[140]:

recreate = True
if recreate:
    drop_table(table_name)
    create_txs_table(table_name)

# In[109]:


def fetch_fields_from_txs(txs):
    entries = []
    for tx in txs:
        tmp_en = [
            tx['from'], tx['to'], tx['value'], tx['hash'],
            str(int(tx['blockNumber'], 16)),
            str(int(tx['transactionIndex'], 16))
        ]
        entries.append(tmp_en)
    return entries