예제 #1
0
def upload_to_s3(AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY,inputLocation,filepaths):
  
    try:
        conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
        print("Connected to S3")
    except:
        logging.info("Amazon keys are invalid!!")
        print("Amazon keys are invalid!!")
        exit()
        
    loc=''

    if inputLocation == 'APNortheast':
        loc=boto.s3.connection.Location.APNortheast
    elif inputLocation == 'APSoutheast':
        loc=boto.s3.connection.Location.APSoutheast
    elif inputLocation == 'APSoutheast2':
        loc=boto.s3.connection.Location.APSoutheast2
    elif inputLocation == 'CNNorth1':
        loc=boto.s3.connection.Location.CNNorth1
    elif inputLocation == 'EUCentral1':
        loc=boto.s3.connection.Location.EUCentral1
    elif inputLocation == 'EU':
        loc=boto.s3.connection.Location.EU
    elif inputLocation == 'SAEast':
        loc=boto.s3.connection.Location.SAEast
    elif inputLocation == 'USWest':
        loc=boto.s3.connection.Location.USWest
    elif inputLocation == 'USWest2':
        loc=boto.s3.connection.Location.USWest2
    
    try:   
        ts = time.time()
        st = datetime.datetime.fromtimestamp(ts)    
        bucket_name = 'adsassignment1part2'+str(st).replace(" ", "").replace("-", "").replace(":","").replace(".","")
        bucket = conn.create_bucket(bucket_name, location=loc)
        print("bucket created")
        s3 = boto3.client('s3',
                          aws_access_key_id=AWS_ACCESS_KEY_ID,
                          aws_secret_access_key=AWS_SECRET_ACCESS_KEY)
        
        print('s3 client created')
        
        for f in filepaths:
            try:
                s3.upload_file(f, bucket_name,os.path.basename(f),
                Callback=ProgressPercentage(os.path.basename(f)))
                print("File successfully uploaded to S3",f,bucket)
            except Exception as detail:
                print(detail)
                print("File not uploaded")
                exit()
        
    except:
        logging.info("Amazon keys are invalid!!")
        print("Amazon keys are invalid!!")
        exit()
예제 #2
0
def upload_all(directory):
    #sets a transfer configuration, with the specified number of maximum concurrent requests, and fetches s3 credentials from ~/.aws (for root)
    config = TransferConfig(max_concurrency=20)
    s3 = boto3.client('s3')

    files = os.listdir(directory)
    for img in files:
        if debug == True:
            print('upload start')
        path = directory + '/' + img
        #upload image to bucket daphniavideo
        s3.upload_file(path, 'daphniavideo', img, Config=config)
        if debug == True:
            print('image upload complete')
예제 #3
0
def upload_to_aws(local_file, bucket, s3_file):
    s3 = boto3.client('s3',
                      aws_access_key_id=ACCESS_KEY,
                      aws_secret_access_key=SECRET_KEY)

    try:
        s3.upload_file(local_file, bucket, s3_file)
        print("Upload Successful")
        return True
    except FileNotFoundError:
        print("The file was not found")
        return False
    except NoCredentialsError:
        print("Credentials not available")
        return False
def create_output(model, users_to_recommend, n_rec, print_csv=True):
    recomendation = model.recommend(users=users_to_recommend, k=n_rec)
    df_rec = recomendation.to_dataframe()
    df_rec['recommendedProducts'] = df_rec.groupby([user_id])[item_id] \
        .transform(lambda x: '|'.join(x.astype(str)))
    df_output = df_rec[['customerId', 'recommendedProducts']].drop_duplicates() \
        .sort_values('customerId').set_index('customerId')
    if print_csv:
        df_output.to_csv('data/output/option1_recommendation.csv')
        s3.upload_file('data/output/option1_recommendation.csv', bucketName,
                       'output/option_recommendations.csv')
        print(
            "An output file can be found in 'output' folder with name 'option1_recommendation.csv'"
        )
    return df_output
예제 #5
0
def uploadFileToS3Bucket(bucketName, fileName):

    #bucketName = AWS_ACCESS_KEY_ID.lower() + "-dump" #bucketName
    keyName = fileName.rsplit(".", 1)[0]
    print("Key Name of File Object: " + keyName)

    # Get the service client
    s3 = boto3.client('s3')

    #s3.create_bucket(Bucket=bucketName)

    # Upload tmp.txt to bucket-name at key-name
    s3.upload_file(fileName, bucketName, keyName)

    pathToS3File = PATH_TO_AMAZON_S3 + bucketName + "/" + fileName
    print("path to s3 file: " + pathToS3File)
    return pathToS3File
예제 #6
0
파일: models.py 프로젝트: shaan06/ADS_Team8
    filename_p1 = ("lr_model.pckl")
    filename_p2 =("rf_model.pckl")
    filename_p3 =("knn_model.pckl")
    filename_p4 = ("bnb_model.pckl")
    filename_p5 =("extra_tree_model.pckl")
    filename_csv = ("Accuracy_error_metrics.csv")
    ts = time.time()
    st = datetime.datetime.fromtimestamp(ts)    
    bucket_name = "assignment3adsteam8"
    #bucket = conn.create_bucket(bucket_name, location=loc)
    s3 = boto3.client(
   			"s3",
   			aws_access_key_id=AWS_ACCESS_KEY_ID,
   			aws_secret_access_key=AWS_SECRET_ACCESS_KEY
   			)
    s3.upload_file(filename_p1, bucket_name , filename_p1)
    s3.upload_file(filename_p2, bucket_name , filename_p2)
    s3.upload_file(filename_p3, bucket_name , filename_p3)
    s3.upload_file(filename_p4, bucket_name , filename_p4)
    s3.upload_file(filename_p5, bucket_name , filename_p5)
    s3.upload_file(filename_csv, bucket_name , filename_csv)

	#key = boto.s3.key.Key(bucket, 'some_file.zip')
	#with open('some_file.zip') as f:
   	#key.send_file(f)
    #filename_p1 = ("lr_model.pckl")
    #filename_p2 =("rf_model.pckl")
    #filename_p3 =("knn_model.pckl")
    #filename_p4 = ("bnb_model.pckl")
    #filename_p5 =("extra_tree_model.pckl")
    #filname_csv = (os.getcwd() + "\Accuracy_error_metrics.csv")
def connect():
    conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
    print("Connected to S3")

    loc = boto.s3.connection.Location.DEFAULT

    try:

        filename_p1 = ("lr_model.pckl")
        filename_p2 = ("rf_model.pckl")
        filename_p3 = ("knn_model.pckl")
        filename_p4 = ("bnb_model.pckl")
        filename_p5 = ("extra_tree_model.pckl")
        filename_csv = ("Accuracy_error_metrics.csv")
        #ts = time.time()
        #st = datetime.datetime.fromtimestamp(ts)
        bucket_name = "finalprojectteam8"
        #bucket = conn.create_bucket(bucket_name, location=loc)
        s3 = boto3.client("s3",
                          aws_access_key_id=AWS_ACCESS_KEY_ID,
                          aws_secret_access_key=AWS_SECRET_ACCESS_KEY)
        s3.upload_file(filename_p1, bucket_name, filename_p1)
        s3.upload_file(filename_p2, bucket_name, filename_p2)
        s3.upload_file(filename_p3, bucket_name, filename_p3)
        s3.upload_file(filename_p4, bucket_name, filename_p4)
        s3.upload_file(filename_p5, bucket_name, filename_p5)
        s3.upload_file(filename_csv, bucket_name, filename_csv)

        print("S3 bucket successfully created")

        print("Model successfully uploaded to S3")
    except Exception as e:
        print(e)
예제 #8
0
def save_file(username, file, filename):
    #save original file
    app_path = os.path.dirname(__file__)
    stat_path = os.path.join(app_path, 'static')
    USER_PATH = os.path.join(stat_path, username)

    ROOT_PATH = os.path.join(USER_PATH, 'original')
    ORIGINAL_PATH = os.path.join(ROOT_PATH,filename)
    file.save(ORIGINAL_PATH)

    #save object-detected file
    OBJ_PATH = os.path.join(USER_PATH, 'obj')
    OBJ_PATH = os.path.join(OBJ_PATH,filename)
   # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>.
    detection_img = object_detection.ap()
    detection_img.img = ORIGINAL_PATH
    detection_img.path = OBJ_PATH
    detection_img.rectangle_image()

    #save thumbnail file
    THUMBNAILS_PATH = os.path.join(USER_PATH, 'thumbnails')
    THUMBNAILS_PATH = os.path.join(THUMBNAILS_PATH, filename)
    thumbnails(ORIGINAL_PATH, THUMBNAILS_PATH)
    
    #####Save file to s3
    #Upload original image
    s3 = boto3.client('s3')
    s3.upload_file(file,
                     S3_BUCKET_NAME,
                     ORIGINAL_PATH)
    
    #Upload Object detected file
    s3.upload_file(detection_img.img,
                     S3_BUCKET_NAME,
                     OBJ_PATH)
                     
    #Upload thumbnails
    s3.upload_file(thumbnails,
                      S3_BUCKET_NAME,
                      THUMBNAILS_PATH)
                      

    # Save file to database
    cnx = get_db()
    cursor = cnx.cursor()
    query = 'SELECT id FROM user WHERE (username) = %s'
    cursor.execute(query, (username,))
    row = cursor.fetchone()
    user_id = int(row[0])

    try:
        query = '''INSERT INTO images (id, imagename, original,obj,thumbnails) VALUES (%s,%s,%s, %s, %s)'''
        cnx.rollback()
        return 'Uploaded successfully'
    except mysql.connector.Error as error:
        cnx.rollback()
        return 'Failed to Save'
    return 'Successfully Uploaded'
    
    os.remove(file)
    os.remove(detection_img.img)
    os.remove(thumbnails)