Beispiel #1
0
def uploadToS3(fobj, filename):
    log.debug('Filename is: {0}'.format(filename))
    conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
    bucket = conn.get_bucket('0nk38gf20cct6vytbg02_tfl_data')
    try:
        key = Key(bucket)
        key.key = 'xml/'+filename
        log.debug('Sending file {0} with key {1}'.format(filename, key.key))
        key.set_contents_from_file(fobj, reduced_redundancy=True)
        sendToSqs([filename], 'tfl_queue_parse')
    except:
        log.exception('Unable to upload file {0}'.format(filename))
Beispiel #2
0
def midnightRollHDF():
    olddir = os.path.join(HDF_DIR, 'store.h5')
    newdir = os.path.join(HDF_DIR, datetime.datetime.now().strftime('%Y%m%d-%H%M%S.h5'))
    #set indicies
    log.info('Compressing hdf file {0} to new hdf file {1}'.format(olddir, newdir))
    process = subprocess.Popen(("ptrepack", "--chunkshape=auto", "--propindexes", "--complevel=9", "--complib=bzip2", olddir, newdir), stdout=subprocess.PIPE)
    log.info('compression is complete with message {0}'.format(process.communicate()[0]))
    sendToSqs([newdir], 'tfl_queue_upload')
    log.info('New compressed hdf file sent to upload, deleting old hdf')
    try:
        if os.path.isfile(olddir):
            os.unlink(olddir)
    except:
        log.exceptio('Delete of old hdf failed!')
Beispiel #3
0
 def sendToSqs(self, filenames, name):
     return sendToSqs(filenames, name)