def __init__(self, host = config.get( 'fileload', 'host' ), authstr = config.get( 'fileload', 'token-ext'), user = config.get( 'fileload', 'user'), passw = config.get( 'fileload', 'passw'), service = config.get( 'fileload', 'service'), version = config.get( 'fileload', 'version')): self.host = host self.authstr = authstr self.user=user self.passw = passw self.service = service self.version = version
from filechunkio import FileChunkIO from fileupload.client import uploadfile import sys from file_encrypt import encrypt_file from Crypto import Random from Crypto.PublicKey import RSA from fileupload import config import key_value_db as pickle host = uploadfile() random_generator = Random.new().read key = RSA.generate(1024, random_generator) public_key = 'QZNy\x13m4-\xcc\xae@\xe4\x01$\x95\x1f' processeddb = os.path.dirname(os.path.realpath(__file__)) + '/' + config.get( 'pickle', 'pickledb') db = pickle.set_db(processeddb) gfilename = ' ' uploadstat = {} logger = logging.getLogger("File-Upload-Chunks") parser = argparse.ArgumentParser(description="Transfer large files to in chinks to FTP", prog="File-Upload-Using-Chinks") parser.add_argument("src", type=file, help="The file to transfer") parser.add_argument("ContainerName", help="The name of the folder where this should reside") parser.add_argument("-np", "--num-processes", help="Number of processors to use", type=int, default=2) parser.add_argument("-s", "--split", help="Split size, in Mb", type=int, default=50) parser.add_argument("-t", "--max-tries", help="Max allowed retries for http timeout", type=int, default=5) parser.add_argument("-v", "--verbose", help="Be more verbose", default=False, action="store_true") parser.add_argument("-q", "--quiet", help="Be less verbose (for use in cron jobs)", default=False, action="store_true")