def upload(filename, bucketName, clientEmail, keyFile, **kwargs):
    service = Google_Service_Builder.buildService(clientEmail, keyFile, 
                                                  domain="devstorage.read_write",
                                                  service="storage", 
                                                  version="v1", **kwargs)

    print 'Building upload request...'
    media = MediaFileUpload(filename, chunksize=CHUNKSIZE, resumable=True)
    if not media.mimetype():
        media = MediaFileUpload(filename, DEFAULT_MIMETYPE, resumable=True)
    request = service.objects().insert(bucket=bucketName, name=filename,
                                                            media_body=media)

    print 'Uploading file: %s to: %s/%s' % (filename, bucketName, filename)

    progressless_iters = 0
    response = None
    while response is None:
        error = None
        try:
            progress, response = request.next_chunk()
            if progress:
                print 'Upload progress: %.2f%%' % (100.0 * progress.progress())
        except HttpError, err:
            error = err
            if err.resp.status < 500:
                raise
        except RETRYABLE_ERRORS, err:
            error = err
def loadData(tarball, params):
    sourceFile, formattedFile = extractFile(tarball)
        
    #open and format input file
    print "<DataLoader> formatting file: %s" % sourceFile
    try:
        inFile = open(sourceFile)
    except:
        print "<DataLoader> Could not retrieve data from file: %s" % sourceFile
        return
        
    DataFormatter.formatFile(inFile, formattedFile)
    
    inFile.close()
    
    print "<DataLoader> Successfully formatted file: %s" % sourceFile
            
    #attempt to upload file
    uploaded = DataUploader.upload(formattedFile, params["bucket"], **params)
                        
    if not uploaded:
        print "<DataLoader> Failed to upload file, exiting"
        return
    
    #create bigquery object
    try:
        bigquery = Google_Service_Builder.buildBigQuery(**params)
    except Exception as e:
        print "<DataLoader>", e
        return
        
    source = "gs://"+params["bucket"]+"/"+formattedFile
    BigQuery_Append.appendData(bigquery, source, **params)
def loadData(tarball, params):
    sourceFile, formattedFile = extractFile(tarball)

    #open and format input file
    print "<DataLoader> formatting file: %s" % sourceFile
    try:
        inFile = open(sourceFile)
    except:
        print "<DataLoader> Could not retrieve data from file: %s" % sourceFile
        return

    DataFormatter.formatFile(inFile, formattedFile)

    inFile.close()

    print "<DataLoader> Successfully formatted file: %s" % sourceFile

    #attempt to upload file
    uploaded = DataUploader.upload(formattedFile, params["bucket"], **params)

    if not uploaded:
        print "<DataLoader> Failed to upload file, exiting"
        return

    #create bigquery object
    try:
        bigquery = Google_Service_Builder.buildBigQuery(**params)
    except Exception as e:
        print "<DataLoader>", e
        return

    source = "gs://" + params["bucket"] + "/" + formattedFile
    BigQuery_Append.appendData(bigquery, source, **params)
def main():
    if len(sys.argv) <= 2:
        doHelp()
        return

    elif len(sys.argv) >= 3:
        configFile = sys.argv[1]
        action = sys.argv[2].lower()
        args = sys.argv[3:]

    else:
        return

    if action == "configure":
        BigQuery_Configuration.configureBigquery(configFile, args)
        return

    defaults = BigQuery_Configuration.loadConfig(configFile)

    args, params = BigQuery_Configuration.loadParams(args)

    for key in defaults:
        if key not in params:
            params[key] = defaults[key]

    try:
        bigquery = Google_Service_Builder.buildBigQuery(**params)
    except Exception as e:
        print e
        print "failed to build bigquery"
        return

    if action == "list":
        try:
            BigQuery_Basics.listTables(bigquery, **params)
        except Exception as e:
            print e
            print "failed to list tables in: %s.%s" % (params["projectId"], params["datasetId"])
    elif action == "create":
        try:
            with open(args[0]) as f:
                schema = json.load(f)
            BigQuery_Basics.createTable(bigquery, schema, **params)
        except Exception as e:
            print e
            print "failed to create table: %s" % params["tableId"]
    elif action == "status":
        BigQuery_Basics.getStatus(bigquery, params["projectId"], args[0])
    elif action == "upload":
        if not DataUploader.upload(args[0], params["bucket"]):
            print "<DataLoader> Failed to upload file, exiting"