示例#1
0
def abort_helper(r, args):
    if r.status_code == 401:
        if not args.api_key:
            stderr(BAD_AUTH_MSG)
        else:
            stderr(BAD_API_KEY_MSG)
        sys.exit(1)
示例#2
0
def get_update_message():
    r = requests.post(BASE_API + "check_for_cli_update",
                      data={"version": version.VERSION,
                            "api_version": version.API_VERSION})
    if r.status_code == 200:
        j = r.json()
        if j.get("message"):
            stderr(j["message"])
示例#3
0
def download_file_helper(url, input_path, auth=None):
    r = requests.get(url, stream=True, auth=auth)
    if r.status_code != 200:
        stderr("Failed to download file: %s" % r.json()["message"])
    original_filename = urlparse.urlparse(r.url).path.split("/")[-1]
    if os.path.isdir(input_path):
        local_full_path = os.path.join(input_path, original_filename)
    else:
        local_full_path = input_path
    with open(local_full_path, 'wb') as f:
        for chunk in r.iter_content(chunk_size=1024):
            if chunk:  # filter out keep-alive new chunks
                f.write(chunk)
                f.flush()
    print("Successfully downloaded %s to %s" % (original_filename, local_full_path))
示例#4
0
    def _check_for_update(self, args, fp):
        time_diff = None
        if args.credentials["updated_at"] is not None:
            last_update = datetime.datetime.strptime(args.credentials["updated_at"],
                                                     DATE_FORMAT)
            time_diff = datetime.datetime.now() - last_update

        if time_diff is None or time_diff.days >= 1:
            msg = get_update_message()
            if msg:
                stderr(msg)

            if args.api_key is None:
                args.credentials["updated_at"] = datetime.datetime.now().strftime(DATE_FORMAT)
                json.dump(args.credentials, open(fp, mode='w'))
示例#5
0
def upload_multipart(args, f):
    """
    Note, for large files we upload them one at a time
    using a special API.
    """
    creds = (args.credentials['api_key'], '')
    r0 = requests.get(BASE_API + "init_multipart_upload", auth=creds)
    if r0.status_code != 200:
        stderr("Failed to initiate large multipart upload (>5GB).")
        sys.exit(1)

    s3_bucket = r0.json()["s3_bucket"]
    callback_url = BASE_URL.rstrip("/") + r0.json()['callback_url']
    file_id = r0.json()["file_id"]

    # Upload to s3 using boto
    try:
        import awscli  # noqa
        import subprocess
    except ImportError:
        stderr("You must install the awscli package for files >5GB in size. "
               "On most systems, it can be installed with `pip install awscli`.")
        sys.exit(1)

    s3_path = "s3://" + s3_bucket + "/" + file_id
    print("Starting large (>5GB) file upload. Please be patient while the file transfers...")
    try:
        p = subprocess.Popen("aws s3 cp %s %s" % (f, s3_path),
                             stderr=subprocess.STDOUT, shell=True)
        p.wait()
    except KeyboardInterrupt:
        print("Upload successfully cancelled. Quitting.")
        p.sigterm()
        sys.exit(1)

    if p.returncode != 0:
        stderr("Failed to upload %s" % f)
        sys.exit(1)

    r1 = requests.post(callback_url, auth=creds,
                       headers={"Content-Type": "application/json"},
                       data=json.dumps({"s3_path": s3_path,
                                        "filename": os.path.basename(f)}))
    if r1.status_code != 200:
        stderr("Upload of %s failed. Please contact [email protected] "
               "if you experience further issues." % f)
        sys.exit(1)
    print("Successfully uploaded: %s\n" % f)
    print("    ###########################################################\n"
          "    ### Please note: Large file uploads may take several    ###\n"
          "    ### minutes to appear on the One Codex website. If a    ###\n"
          "    ### file does not appear after a longer period of time, ###\n"
          "    ### however, please contact us at [email protected].    ###\n"
          "    ###########################################################\n")
示例#6
0
    def __init__(self, args, check_for_update=True, creds_file=None):
        args.credentials = {}
        if creds_file is None:
            fp = os.path.expanduser('~/.onecodex')
        else:
            fp = creds_file
        if args.api_key is not None:
            if len(args.api_key) != 32:
                stderr("Invalid API key length (should be 32 characters)")
                sys.exit(1)

            args.credentials["api_key"] = args.api_key
            args.credentials["saved_at"] = None
            args.credentials["updated_at"] = None
            self._check_for_update(args, fp)
            return  # temp login; don't save credentials

        if args.which == 'login':
            if os.path.exists(fp):
                stderr("Credentials file already exists (~/.onecodex)")
                sys.exit(1)

        if args.which == 'logout':
            if os.path.exists(fp):
                os.remove(fp)
                print("Successfully removed One Codex credentials.")
                sys.exit(0)
            else:
                stderr("No One Codex API keys found.")
                sys.exit(1)

        if os.path.exists(fp):
            try:
                args.credentials = json.load(open(fp, mode='r'))
            except ValueError:
                stderr("Your ~/.onecodex credentials file appears to be corrupted. "
                       "Please delete it and re-authorize.")
                sys.exit(1)
        else:
            args.credentials["api_key"] = get_api_key()
            args.credentials["saved_at"] = datetime.datetime.now().strftime(DATE_FORMAT)
            args.credentials["updated_at"] = None
            json.dump(args.credentials, open(fp, mode='w'))

        # Finally perform a version check as needed
        if check_for_update:
            self._check_for_update(args, fp)
示例#7
0
def upload_direct(args, files):
    """
    Directly POST to S3. This does not use s3cmd or multipart uploads.
    """
    creds = (args.credentials['api_key'], '')

    if args.threads:
        semaphore = BoundedSemaphore(args.max_threads)
        if args.max_threads != DEFAULT_THREADS:
            print("Uploading with up to %d threads." % args.max_threads)

    # Get the initially needed routes
    r0 = requests.get(BASE_API + 'presign_upload', auth=creds)
    if r0.status_code == 401:
        if not args.api_key:
            stderr(BAD_AUTH_MSG)
        else:
            stderr(BAD_API_KEY_MSG)
        sys.exit(1)
    elif r0.status_code != 200:
        stderr("Failed to get upload signing credentials")
        sys.exit(1)

    j0 = r0.json()
    s3_url = j0['url']
    signing_url = BASE_URL.rstrip("/") + j0['signing_url']
    callback_url = BASE_URL.rstrip("/") + j0['callback_url']

    upload_threads = []
    upload_progress_bytes = Value('L', 0)
    upload_progress_lock = Lock()
    total_bytes = sum([os.path.getsize(f) for f in files])
    total_files = Value('i', len(files))
    for f in files:
        if args.threads and len(files) > 1:  # parallel uploads
            # Multi-threaded uploads
            t = Thread(target=upload_helper,
                       args=(f, s3_url, signing_url, callback_url,
                             creds, upload_progress_bytes, upload_progress_lock,
                             total_bytes, total_files, semaphore))
            upload_threads.append(t)
            t.start()
        else:  # serial uploads
            upload_helper(f, s3_url, signing_url, callback_url, creds,
                          upload_progress_bytes, upload_progress_lock,
                          total_bytes, total_files)

    if args.threads:
        for ut in upload_threads:
            ut.join()
示例#8
0
def analyses(args):
    if not args.raw and not args.table:
        api_helper(args, route="analyses")
    elif args.raw and args.table:
        stderr("Can only request raw or table data at the same time.")
        sys.exit(1)
    elif args.raw and not args.table:
        if len(args.analyses) == 0:
            stderr("No analysis specified. Please note the first argument "
                   "following --raw is an optional path for storing the raw "
                   "download.\nIf you do not want to specify this, append --raw "
                   "to the end of your command, e.g., `onecodex analyses <id> --raw`.")
            sys.exit(1)
        elif len(args.analyses) != 1:
            stderr("Can only request raw data on one Analysis at a time.")
            sys.exit(1)
        download_file_helper(BASE_API + "analyses/" + args.analyses[0] + "/raw",
                             input_path=args.raw,
                             auth=(args.credentials['api_key'], ''))
    elif args.table and not args.raw:
        if len(args.analyses) != 1:
            stderr("Can only request table data on one Analysis at a time.")
            sys.exit(1)
        api_helper(args, route="analyses", supplement="/table")
示例#9
0
def upload_helper(f, s3_url, signing_url, callback_url, creds,
                  upload_progress_bytes, upload_progress_lock,
                  total_bytes, total_files,
                  semaphore=None):
    # First get the signing form data
    if semaphore is not None:
        semaphore.acquire()

    stripped_filename = os.path.basename(f)
    r1 = requests.post(signing_url, data={"filename": stripped_filename, "via_api": "true"},
                       auth=creds)
    if r1.status_code != 200:
        try:
            stderr("Failed upload: %s" % r1.json()["msg"])
        except:
            stderr("Upload failed. Please contact [email protected] for "
                   "assistance if you continue to experience problems.")
        sys.exit(1)
    file_uuid = r1.json()['key'].split("/")[-2][5:]

    # Coerce to str or MultipartEncoder fails
    # Need a list to preserve order for S3
    fields = []
    for k, v in r1.json().items():
        fields.append((str(k), str(v)))

    fields.append(("file", (stripped_filename, open(f, mode='rb'), "text/plain")))
    e = MultipartEncoder(fields)
    m = MultipartEncoderMonitor(e, lambda x: upload_callback(x, upload_progress_bytes,
                                                             upload_progress_lock,
                                                             total_bytes=(total_bytes + 8192),
                                                             n_files=total_files))
    max_retries = 3
    n_retries = 0
    while n_retries < max_retries:
        try:
            r2 = requests.post(s3_url, data=m, headers={"Content-Type": m.content_type})
            if r2.status_code != 201:
                stderr("Upload failed. Please contact [email protected] for assistance.")
                sys.exit(1)
            break
        except requests.exceptions.ConnectionError:
            n_retries += 1
            if n_retries == max_retries:
                stderr("The command line client is experiencing connectivity issues and "
                       "cannot complete the upload of %s at this time. Please try again "
                       "later. If the problem persists, contact us at [email protected] "
                       "for assistance." % stripped_filename)
                sys.exit(1)

    # Finally, issue a callback
    r3 = requests.post(callback_url, auth=creds, data={
        "location": r2.headers['location'],
        "size": os.path.getsize(f)
    })
    if r3.status_code == 200:
        success_msg = "Successfully uploaded: %s. File ID is: %s." % (f, file_uuid)
        if upload_progress_bytes.value == -1:  # == -1 upon completion
            print(success_msg)
        else:
            sys.stderr.write("\r")
            sys.stderr.flush()
            print(success_msg)
        with upload_progress_lock:
            total_files.value -= 1
    else:
        print("Failed to upload: %s" % f)
        sys.exit(1)

    if semaphore is not None:
        semaphore.release()