def get_uploaded(): # This will be really "select mediaid from media where comment->>stage='Upload' and comment->>status='Completed';" # SQL="""select mediaid from media where mediaid=12;""" #SQL="""select mediaid from media where comment->>stage='Upload' and comment->>status='Completed';""" #uploaded = glacier.update_db(SQL, 'select') #return uploaded d = glacier.db_data() uploaded = d.get_completed_volumes() return uploaded
def update_status(mediaId): stage = 'cleanedup' d = glacier.db_data() clause = "mediaid={0}".format(mediaId) row = d.select_volumes(clause) print row[0] if row[0] != None: row[0][1]['state']="cleanedup" s = json.dumps(row[1]) glacier.update_comment(s, mediaId)
def complete_single(): d = glacier.db_data() volumes = d.get_single_uploads() for vol in volumes: r = AsyncResult(vol[1]['celery_id']) if r.ready() == True: vol[1]['archiveId'] = r.result vol[1]['status'] = 'complete' s = json.dumps(vol[1]) d.update_comment(s,vol[0])
def get_hash(): d = glacier.db_data() volumes = d.get_vol_hash() for vol in volumes: r = AsyncResult(vol[1]['celery_id']) if r.ready() == True: vol[1]['hash'] = r.result vol[1]['status'] = "complete" s = json.dumps(vol[1]) d.update_comment(s, vol[0])
def complete_multi(): d = glacier.db_data() volumes = d.get_multi_uploads() for vol in volumes: r = AsyncResult(vol[1]['celery_id']) if r.ready() == True: if 'archiveId' in r.result: vol[1]['archiveid'] = r.result else: vol[1]['failed_parts'] = r.result
def submit_hash(): d = glacier.db_data() try: v = d.job_vols(jobid) except: raise for vol in v: fname = storage+'/'+vol[0] thash = glacier.hash_file.delay(fname) data = {'state': 'treehash', 'status': 'in-progress', 'celery_id': thash.task_id, 'path': fname, 'hash': '', 'failed_parts': '', 'upload_id': '', 'job_id': '', 'error_id': '', 'date': time, 'archiveId': ''} s = json.dumps(data) d.update_comment(s, vol[0])
def delete_disk_vol(mediaId): # This will be SQL="""select comment->>description,volume from media where mediaid={0};""".format(mediaId) # SQL="""select comment->>'celery_id',volumename from media where mediaid={0};""".format(mediaId) #SQL="""select comment->>'celery_id',volumename from media where mediaid={0};""".format(mediaId) #row = glacier.update_db(SQL, 'select') d = glacier.db_data() clause = "mediaid={0}".format(mediaId) row = d.select_volumes(clause) if row[0] != None: print "2",row[1] full_path = '{0}/{1}'.format (row[0], row[1]) print full_path try: os.remove(full_path) except OSError: pass else: print 'Incorrect file/path, please check if this volume {0} has been uploaded correctly'.format (row[0][1])
def upload_volume(): d = glacier.db_data() volumes = d.get_upload_vols() for vol in volumes: fname = vol[1]['path'] vol[1]['status'] = 'in-progress' vol[1]['state'] = 'multi-upload' fsize = os.stat(fname).st_size if fsize > chunksize: print "Working on %s" % fname r = 0 try: init_multi = glacier.upload_multi_init(fname,vault,str(chunksize)) vol[1]['upload_id'] = init_multi s = json.dumps(vol[1]) d.update_comment(s, vol[0]) except: raise try: up = glacier.upload_multi_exec.delay(fname,fsize,vault,init_multi,chunksize) except: raise else: try: u = glacier.upload_glacier.delay(fname,vault,fname) vol[1]['status'] = 'in-progress' vol[1]['state'] = 'single-upload' vol[1]['single_upload_id'] = u.id s = json.dumps(vol[1]) d.update_comment(s, vol[0]) except: raise