print("-" * 80) print("Conectando ao banco de dados...") db = mysql.connector.connect(user=CONFIG['DB']['USER'], password=CONFIG['DB']['PASS'], host=CONFIG['DB']['ENDPOINT'], database=CONFIG['DB']['DATABASE']) if (CONFIG['DOBUCKETUPLOAD']): s3Upload = [] upBucket = [] print("Estabelecendo conexao com os provedores de armazenamento...") for i in range(CONFIG['storage']['numsites']): print(" %s... " % (CONFIG['storage'][i]['PROVIDER'])) tmpS3Connection = getObjectStoreConnection(CONFIG['storage'][i], debug=False) tmpUpBucket = createAndGetObjectStoreBucket(CONFIG['storage'][i], tmpS3Connection, debug=False) s3Upload.append(tmpS3Connection) upBucket.append(tmpUpBucket) print("") print("-" * 80) print("Reparando os arquivos") print("-" * 80) cursor = db.cursor() query = "SELECT COUNT(sha1parte) from partes;" cursor.execute(query) rs = cursor.fetchall() cursor.close() numPartes = rs[0][0]
hashergeral.update(buf) hasher.update(buf) ifshare_propriedades['hashchunk' + str(chunk)] = hasher.hexdigest() ifshare_propriedades['hashgeral'] = hashergeral.hexdigest() fd_input.close() showMetadata(ifshare_propriedades) fd_output = open(arquivo_saida, "w") writeMetadata(fd_output, ifshare_propriedades) fd_output.close() s3connection = getObjectStoreConnection(CONFIG['Incoming'], debug=False) bucket = createAndGetObjectStoreBucket(CONFIG['Incoming'], s3connection, debug=False) uploader = MultipartUploader(arquivo_raw, ifshare_propriedades['tamanho'], ifshare_propriedades['arquivo'], bucket) uploader.put() uploader = MultipartUploader(arquivo_raw + ".ifshare", os.path.getsize(arquivo_raw + ".ifshare"), ifshare_propriedades['arquivo'] + ".ifshare", bucket, message="Subindo metadados") uploader.put() s3connection.close()
print("") print("-" * 80) print("Estabelecendo conexoes iniciais") print("-" * 80) if (CONFIG['DOBUCKETUPLOAD']): s3Upload = [] upBucket = [] print("Estabelecendo conexao com os provedores de armazenamento...") for i in range(CONFIG['storage']['numsites']): print(" %s... " % (CONFIG['storage'][i]['PROVIDER'])) tmpS3Connection = getObjectStoreConnection(CONFIG['storage'][i], debug=False) tmpUpBucket = createAndGetObjectStoreBucket(CONFIG['storage'][i], tmpS3Connection, debug=False) s3Upload.append(tmpS3Connection) upBucket.append(tmpUpBucket) print("") print("-" * 80) print("Atendendo pedidos") print("-" * 80) app.run(host="0.0.0.0", port=8008, use_reloader=True) print("") print("-" * 80) print("Encerrando as conexoes")
buf = fd_input.read(ifshare_propriedades['chunksize']) hashergeral.update(buf) hasher.update(buf) ifshare_propriedades['hashchunk' + str(chunk)] = hasher.hexdigest() ifshare_propriedades['hashgeral'] = hashergeral.hexdigest() fd_input.close() showMetadata(ifshare_propriedades) fd_output = open(arquivo_saida, "w") writeMetadata(fd_output, ifshare_propriedades) fd_output.close() s3connection = getObjectStoreConnection(CONFIG['Incoming'], debug=False) bucket = createAndGetObjectStoreBucket(CONFIG['Incoming'], s3connection, debug=False) uploader = MultipartUploader(arquivo_raw, ifshare_propriedades['tamanho'], ifshare_propriedades['arquivo'], bucket) uploader.put() uploader = MultipartUploader(arquivo_raw + ".ifshare", os.path.getsize(arquivo_raw + ".ifshare"), ifshare_propriedades['arquivo'] + ".ifshare", bucket, message="Subindo metadados") uploader.put() s3connection.close()