def s3_upload_results(): #AWS S3 Storage # Creating a simple connection s3_conn = tinys3.Connection(AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY,default_bucket='cupidscrape-usernames', endpoint='s3-us-west-2.amazonaws.com', tls='True') s3_conn_meta = tinys3.Connection(AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY,default_bucket='okscrape-metadata', endpoint='s3.amazonaws.com', tls='True') # Uploading the usernames file open_usernames = open(ouput_file_deduped,'rb') s3_conn.upload(ouput_file_deduped,open_usernames) open_usernames.close() # Uploading weekly metadata open_metadata = open(ouput_file_metadata1,'rb') s3_conn_meta.upload(ouput_file_metadata1,open_metadata) open_metadata.close() # Uploading monthly metadata open_metadata2 = open(ouput_file_metadata2,'rb') s3_conn_meta.upload(ouput_file_metadata2,open_metadata2) open_metadata2.close() # Uploading monthly metadata open_metadata3 = open(ouput_file_metadata3,'rb') s3_conn_meta.upload(ouput_file_metadata3,open_metadata3) open_metadata3.close() # Uploading monthly metadata open_metadata4 = open(ouput_file_metadata4,'rb') s3_conn_meta.upload(ouput_file_metadata4,open_metadata4) open_metadata4.close() print "Upload to S3 Complete" os.remove(ouput_file_deduped) os.remove(ouput_file) os.remove(ouput_file_metadata1) os.remove(ouput_file_metadata2) os.remove(ouput_file_metadata3) os.remove(ouput_file_metadata4) return;
def save_json_to_cloud(self): """ Upload json file to Amazon S3 """ # import gevent.monkey # gevent.monkey.patch_socket() print('Starting upload json to cloud: ' + self.local_filename()) f = open(self.local_filename(), 'rb') # 'w+' # Divide into subdirectories like git: # http://www.quora.com/File-Systems-Why-does-git-shard-the-objects-folder-into-256-subfolders shard_folder = self.filename()[:2] path_list = (''.join([ settings.AMAZON_S3_BUCKET, "/quote/", settings.HASH_ALGORITHM, '/', settings.VERSION_NUM, "/", shard_folder ])) bucket_folder = ''.join(path_list) conn = tinys3.Connection(settings.AMAZON_ACCESS_KEY, settings.AMAZON_SECRET_KEY, tls=True, endpoint=settings.AMAZON_S3_ENDPOINT) conn.upload(self.filename(), f, bucket=bucket_folder, content_type='application/json', expires='max') print("json upload succeeded: " + self.filename()) return self.filename()
def textFile(self, file_name): if file_name.startswith('s3'): if has_tinys3: file_name = file_name.split('://')[1] bucket_name = file_name.split('/')[0] key_name = file_name.replace(bucket_name, '')[1:] access_key = self.hc.get('fs.s3n.awsAccessKeyId') secret_key = self.hc.get('fs.s3n.awsSecretAccessKey') region = self.hc.get('fs.s3n.endpoint') if region is None: region = 's3.amazonaws.com' conn = tinys3.Connection(access_key, secret_key, endpoint=region) file = conn.get(key_name, bucket_name) if file_name.endswith('.gz'): compressed = buffer(file.content) gzipper = gzip.GzipFile(fileobj=compressed) return gzipper.readlines() return file.content.decode('utf-8').split('\n') else: raise Exception('Need TinyS3 to use s3 files') else: if file_name.endswith('.gz'): opener = gzip.open else: opener = open with opener(file_name, 'r') as f: return f.readlines()
def upload_zip_to_s3(self,filetoupload): print("Upload to s3") S3_ACCESS_KEY= input("Enter S3_ACCESS_KEY : ") S3_SECRET_KEY = input("Enter S3_SECRET_KEY : ") try: bucket = input("Enter BUCKET_NAME : ") my_endpoint = "s3-us-west-1.amazonaws.com" conn = tinys3.Connection(S3_ACCESS_KEY,S3_SECRET_KEY,tls=True,endpoint=my_endpoint) f = open(filetoupload,'rb') # print("this is f",f) # print("this is file to upload",filetoupload) # print("this is bucket",bucket) conn.upload(filetoupload,f,bucket) log.info("Data zipped and loaded on S3") print("Upload to s3 successfull. Proceeding to Analysis") except Exception: print("INVALID keys") choice = input("Proceed without uploading to s3? Y/N : (Select N to try again)") if(choice == "Y" or choice == "y"): print("Folder not uploaded to S3. Proceeding to Analysis ") elif(choice == "N" or choice == "n"): self.upload_zip_to_s3(filetoupload) else: print("Invalid input. Try again.") self.upload_zip_to_s3(filetoupload)
def upload(list): """Uploads list of links to user profiles params list list of user profile urls returns status code response from AWS S3 - Ok 200 = successful write """ datetimestamp = '{:%Y-%m-%d %H:%M:%S}'.format(datetime.datetime.now()) f = open('profiles_without_names', 'w+') for item in list: f.write("%s\n" % item) conn = tinys3.Connection( config.AWS_ACCESS_KEY_ID, config.AWS_SECRET_ACCESS_KEY, tls=True, endpoint='s3-us-west-2.amazonaws.com') try: response = conn.upload(datetimestamp + ' profiles_without_names',f,'github-api-demo') except requests.exceptions.HTTPError: print("\nUnable to connect to AWS S3 while storing user list\n" "Check AWS S3 credentials in config.py and\n" "verify AWS S3 user is in S3FullAccess security group in IAM\n") raise SystemExit f.close() return response
def save2S3(path): conn = tinys3.Connection(os.environ['AWS_ACCESS_KEY_ID'], os.environ['AWS_SECRET_ACCESS_KEY'], tls=True, endpoint='s3-eu-west-1.amazonaws.com') f = open(path, 'rb') conn.upload('emojifiedimages/' + path, f, 'emojimosaic')
def load_vocab(request, format='xls'): from vocabulary.load_xls import load_xls from vocabulary.load_skos import SKOSLoader form = UploadFileForm() if request.method == 'POST' and request.user.is_authenticated: form = UploadFileForm(request.POST, request.FILES) if form.is_valid(): file = request.FILES['file'] fn = file.name.split('.')[0].split('/')[-1] f = file.read() # save the raw file into an S3 bucket if form.cleaned_data.get('permit', False): conn = tinys3.Connection(settings.S3_ACCESS_KEY, settings.S3_SECRET_KEY, tls=True) conn.upload(file.name, file, settings.S3_BUCKET) # parse and load into the DB if format == 'xls': goto = load_xls(request, f, fn) if format == 'skos': loader = SKOSLoader(request) goto = loader.load_skos_vocab(f) loader.save_relationships() messages.success(request, loader) return redirect(goto) return render(request, 'vocabulary/upload.html', { 'form': form, 'format': format })
def s3_upload_results(): #AWS S3 Storage s3_conn_meta = tinys3.Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, default_bucket='okscrape-metadata-sfo', endpoint='s3.amazonaws.com', tls='True') # Uploading weekly metadata open_metadata = open(ouput_file_metadata1, 'rb') s3_conn_meta.upload(ouput_file_metadata1, open_metadata) open_metadata.close() # Uploading monthly metadata open_metadata2 = open(ouput_file_metadata2, 'rb') s3_conn_meta.upload(ouput_file_metadata2, open_metadata2) open_metadata2.close() # Uploading monthly metadata open_metadata3 = open(ouput_file_metadata3, 'rb') s3_conn_meta.upload(ouput_file_metadata3, open_metadata3) open_metadata3.close() # Uploading monthly metadata open_metadata4 = open(ouput_file_metadata4, 'rb') s3_conn_meta.upload(ouput_file_metadata4, open_metadata4) open_metadata4.close() print "Upload to S3 Complete" os.remove(ouput_file_metadata1) os.remove(ouput_file_metadata2) os.remove(ouput_file_metadata3) os.remove(ouput_file_metadata4) return
def upload_new(budget_data): conn = tinys3.Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_KEY) print('S3 Client Initiated') # f = bytes(json.dumps(budget_data)) if os.name == 'nt': with open("latest.json", 'w') as json_dumper: json.dump(budget_data, json_dumper) with open("latest.json", 'rb') as to_upload: try: conn.upload('latest.json', to_upload, bucket=bucket) return True except Exception as e: print("Failure: {}".format(e)) return False else: with open("/tmp/latest.json", 'w') as json_dumper: json.dump(budget_data, json_dumper) with open("/tmp/latest.json", 'rb') as to_upload: try: conn.upload('latest.json', to_upload, bucket=bucket) return True except Exception as e: print("Failure: {}".format(e)) return False
def eliminarOperario(request, id=None): if request.method == 'DELETE': if User.objects.filter(pk=request.user.id, groups__name=GRUPO1).exists(): usu = User.objects.get(id=id) operario = Operario.objects.get(id=id) try: conn = tinys3.Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_STORAGE_BUCKET_NAME, tls=True) lista = conn.list('media/operarios/' + str(id), AWS_STORAGE_BUCKET_NAME) for fichero in lista: conn.delete(fichero['key']) conn.delete('media/operarios/' + str(id)) except OSError as e: print(e) operario.delete() registrarLogs( request.user.first_name + " " + request.user.last_name, 'ELIMINAR', 'Operarios', 'Operario Eliminado Exitosamente', usu.first_name + " " + usu.last_name) usu.delete() messages.success(request, 'Borrado') return HttpResponse(status=200) else: return HttpResponseRedirect("/") else: return HttpResponseRedirect("/")
def upload_tos3(localFilePath, S3Bucket): """Do a thing.""" fileName = os.path.basename(localFilePath) # Determine the current month and year to create the upload path today = date.today() datePath = today.strftime("/%Y/%m/") # Connect to S3 s3 = boto.connect_s3() bucket = s3.get_bucket(S3Bucket) # Set the folder name based on the content type image\slideshow # if localFileType == 'slideshow': # key = bucket.new_key('slideshows/' + fileName) # else: key = bucket.new_key('images' + datePath + fileName) # Upload file to S3 key.set_contents_from_filename(localFilePath) key.set_acl('public-read') # Upload tmp.txt to bucket-name at key-name upload = boto3.client('s3') # Log the url of the hosted file logfile = open(logFilePath, "a") region = 'eu-west-1' address = 's3-' + region + '.amazonaws.com' # Create the URL for the image imageLink = 'http://' + address + '/' + S3Bucket + '/' + key.name try: # encode the file name and append the URL to the log file logfile.write(nowTime + ' ' + imageLink + '\n') #upload.upload_file(fileName, S3Bucket, key.name) # Upload a file-like object to bucket-name at key-name with open(fileName, "rb") as f: logfile.write(nowTime + ' ' + 'fileName: ' + fileName + '\n') logfile.write(nowTime + ' ' + 'S3Bucket: ' + S3Bucket + '\n') logfile.write(nowTime + ' ' + 'key.name: ' + key.name + '\n') # # Get the service client # s3 = boto3.client('s3') # # Upload tmp.txt to bucket-name at key-name # s3.upload_file( # 'paris.jpg', S3Bucket, 'images', # Callback=ProgressPercentage('paris.jpg')) conn = tinys3.Connection('AKIAJOMU7MFC6LHQEVMQ', '5yqhxCt7iZD2buR20nnSFqESjPguxISJZQd3cVwV', tls=True) g = open('paris.jpg', 'rb') conn.upload('paris.jpg', g, S3Bucket) finally: logfile.close()
def s3_connect(key, secret_key, endpoint, default_bucket): conn = tinys3.Connection(key, secret_key, tls=True, endpoint=endpoint, default_bucket=default_bucket) return conn
def main(argv): accessKey = '' secretKey = '' file = '' try: opts, args = getopt.getopt(argv,"ha:s:f:",["access_key=","secret_key=", "file="]) except getopt.GetoptError: print 'uploads3.py -a <access_key> -s <secret_key> -f <file>' sys.exit(2) for opt, arg in opts: if opt == '-h': print 'uploads3.py -a <access_key> -s <secret_key> -f <file>' sys.exit() elif opt in ("-a", "--access_key"): accessKey = arg elif opt in ("-s", "--secret_key"): secretKey = arg elif opt in ("-f", "--file"): file = arg ## Creating a simple connection conn = tinys3.Connection(accessKey, secretKey) date = time.strftime("%Y%m%d%H%M") ## Uploading a single file f = open(file,'rb') t = timedelta(days=5) conn.upload(date + '-backup.sql.gz', f, bucket='bixgitlab', expires=t)
def clickpic(): now = datetime.now() now = now.replace(tzinfo = tz) now = now.astimezone(tz) # assuming now contains a timezone aware datetime your_now = now.astimezone(tz) your_now=str(your_now) print(your_now) your_now = your_now[0:10]+your_now[11:19] your_now = your_now.replace(":","") print(your_now) print("Clicked") # Build filename string filepath = image_folder + '/' + your_now + file_extension if cfg['debug'] == True: print '[debug] Taking photo and saving to path ' + filepath camera.capture(filepath) if cfg['debug'] == True: print '[debug] Uploading ' + filepath + ' to s3' conn = tinys3.Connection(cfg['s3']['access_key_id'], cfg['s3']['secret_access_key']) f = open(filepath, 'rb') conn.upload(filepath, f, cfg['s3']['bucket_name'],headers={'x-amz-meta-cache-control': 'max-age=60'}) if os.path.exists(filepath): os.remove(filepath) print("uploaded") dynamodbTable=dynamodb.Table('photostorage') dynamodbTable.put_item( Item={ 'dateandtime': your_now, 'url':"https://s3.amazonaws.com/busphoto/images/"+your_now+".jpg" } )
def upload(bid): if not session or 'uid' not in session: return abort(403) else: photo_file = request.files['file'] bid = int(bid) if photo_file and allowed_file(photo_file.filename): filename = secure_filename(photo_file.filename) photo_file.save( os.path.join(application.config['UPLOAD_FOLDER'], filename)) f = open( os.path.join(application.config['UPLOAD_FOLDER'], filename), 'rb') conn = tinys3.Connection(S3_ACCESS_KEY, S3_SECRET_KEY, tls=True, endpoint='s3-us-west-2.amazonaws.com') conn.upload(filename, f, 'bike-share-comse6998') url = S3_BUCKET_URL + filename bda = BikeDataAccess(g.conn) output = bda.add_photo(url, bid) return jsonify(output) else: output = {'message': 'Unsupported file format', 'status': False} return jsonify(output)
def edit_collection_route(): options = {"year": datetime.datetime.now().year} options = authenticate(options) collection = request.args.get('collection') if request.method == 'GET': img_data = query("SELECT * from Images WHERE collection = '" + collection + "'") options['images'] = img_data return render_template("editCollection.html", **options) elif request.method == 'POST': if request.form['op'] == 'add': file = request.files['file'] comment = request.form['comment'] if file.filename != '': if file and allowed_file(file.filename): m = hashlib.md5( (file.filename + collection + str(datetime.datetime.now())).encode('utf-8')) hashed = m.hexdigest() get_extension = file.filename.rsplit('.', 1)[1].lower() new_filename = hashed + "." + get_extension filename = secure_filename(new_filename) carousel = '1' if request.form.getlist('carousel'): carousel = '0' conn = tinys3.Connection(s3_key, s3_skey, tls=True) conn.upload(new_filename, file, 'janehardystudio') query( "INSERT INTO Images(id, format, caption, collection, carousel) VALUES \ ('" + hashed + "', '" + get_extension + "','" + comment + "','" + collection + "','" + carousel + "')") return redirect( url_for('collection.edit_collection_route', collection=collection))
def lambda_handler(event, context): conn = tinys3.Connection("AKIAJTDGI6SEZTFNVAQQ","vGqdq36MMSqzpRzxadRglWICu4LWymuYLMTnOq8K") tokenId = "Basic 87d910883bc9446ed7922880efd21836" print event buildLink = event["links"]["api_self"]["href"] # buildLink = "/api/orgs/sina-yeganeh/projects/roller-baller/buildtargets/webgl-build/builds/2" authPayload = {"Authorization": tokenId} buildData = requests.get("https://build-api.cloud.unity3d.com" + buildLink, headers=authPayload) primaryLink = json.loads(buildData.text)["links"]["download_primary"]["href"] print primaryLink results = requests.get(primaryLink) zip = zipfile.ZipFile(StringIO.StringIO(results.content)) zip.extractall("/tmp/") f = open("/tmp/WebGL build/index.html",'rb') conn.upload('index.html',f,'rollerballer') files = os.listdir("/tmp/WebGL build/Build") for filename in files: f = open("/tmp/WebGL build/Build/" + filename,'rb') conn.upload("Build/" + filename,f,'rollerballer') files = os.listdir("tmp/WebGL build/TemplateData") for filename in files: f = open("/tmp/WebGL build/TemplateData/" + filename,'rb') conn.upload("TemplateData/" + filename,f,'rollerballer') return "Done"
def eliminarBeneficiario(request, id=None): if request.method == 'DELETE': s = Salud.objects.filter(beneficiario=id) n = Nutricion.objects.filter(beneficiario=id) c = Cabeza_Nucleo.objects.filter(beneficiario=id) f = Familiar.objects.filter(beneficiario=id) v = CaracteristicasVivienda.objects.filter(beneficiario=id) b = Beneficiario.objects.get(id=id) try: conn = tinys3.Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_STORAGE_BUCKET_NAME, tls=True) lista = conn.list('media/beneficiarios/' + str(id), AWS_STORAGE_BUCKET_NAME) for fichero in lista: conn.delete(fichero['key']) conn.delete('media/beneficiarios/' + str(id)) except OSError as e: print(e) registrarLogs( request.user.first_name + " " + request.user.last_name, 'ELIMINAR', 'Beneficiarios', 'Beneficiario Eliminado Exitosamente', b.primer_nombre + " " + b.segundo_nombre + " " + b.primer_apellido + " " + b.segundo_apellido) s.delete() n.delete() c.delete() f.delete() b.delete() messages.success(request, 'Borrado') return HttpResponse(status=200) else: return HttpResponseRedirect("/")
def post(self, request, filename='deafult_file', format=None): from datetime import datetime timestamp = str(datetime.now().strftime('%Y_%m_%d_%H_%M_%S')) file_obj = request.FILES['file'] user_name = request.POST.get("username", "") source_path = 'uploaded_files/' s3_key = '' user = request.user if not user.is_active: return Response({'error': 'You are not activated.'}, status=status.HTTP_401_UNAUTHORIZED) source_path = source_path + user.username + "/" bucket_name = settings.BUCKET_NAME if bucket_name == "": pass s3_key = source_path + filename conn = tinys3.Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) # Uploading a single file try: s = conn.upload(s3_key, file_obj, bucket_name) # conn.update_metadata(s3_file_path, {'x-amz-meta-redshift-status': 'False'}, bucket_name) except requests.HTTPError as e: # if httpe.response.status_code == 404: return Response({'error': str(e)}, status=status.HTTP_303_SEE_OTHER) profile = Profile.objects.get(user=user) profile.image = s.url profile.save() return Response({'deatils': 'Uploaded Successfully'}, status=status.HTTP_200_OK)
def conn(self): """ Establishes connection to S3 bucket """ return tinys3.Connection(config.S3_ACCESS_KEY, config.S3_SECRET_KEY, default_bucket=self.name)
def upload_to_s3(filename): conn = tinys3.Connection(os.environ['S3_ACCESS_KEY'], os.environ['S3_SECRET_KEY'], tls=True) with open(filename, 'rb') as f: conn.upload(filename, f, s3_bucket)
def download_upload_image_s3(url, img_name): """Download the image from S3 bucket""" try: http = urllib3.PoolManager() response_image = http.request('GET', url, preload_content=False) print("status", response_image.status, url) with open('images/' + img_name, 'wb') as out: while True: data = response_image.read(1024) if not data: break out.write(data) conn = tinys3.Connection(conf.AWS_ACCESS_KEY, conf.AWS_SECRET_KEY, tls=True) with open("images/" + img_name, 'rb') as myimg: conn.upload(conf.FOLDER + img_name, myimg, 'drugs-catalog') return True except Exception as e: logger.error("[ERROR] downloading or uploading images {}".format(e)) return False
def upload(file_name): bucket = tinys3.Connection(os.getenv("S3_ACCESS_KEY"), os.getenv("S3_SECRET_KEY"), tls=True) file = open(file_name, "rb") bucket.upload(("media/" + file_name), file, os.getenv("S3_BUCKET_NAME"))
def take_photo_and_upload_img(): conn = tinys3.Connection("AKIAIJV4A2VRU7RAALSA","/OwQ9zmhaefHmoSiqLJxwB56K4qUWIucAZxOpm5P") fileNameShort = str(int(time.time())) + ".jpg" fileName = "../Image_Description/static/uploads/" + fileNameShort take_photo(fileName) img_file = open(fileName, 'rb') conn.upload(fileNameShort,img_file,'pisight') return fileNameShort
def s3fileuplaod(resoucepath, accesskey, secretkey, bucketname): s3 = tinys3.Connection(accesskey, secretkey, tls=True, endpoint='s3-us-west-2.amazonaws.com') f = open(resoucepath + '//Output//Prediction-new.csv', 'rb') s3.upload('Prediction-new.csv', f, bucketname) print("Uploaded")
def connect_s3(): global s3 if not s3: s3 = tinys3.Connection( os.environ.get("AWS_ACCESS_KEY_ID"), os.environ.get("AWS_SECRET_ACCESS_KEY"), ) return s3
def push_to_s3(filepath): s3 = tinys3.Connection(os.environ['AWS_ACCESS_KEY_ID'], os.environ['AWS_SECRET_KEY'], tls=True) f = open(filepath, 'rb') s3.upload(filepath, f, 'darkmattersheep.uk/strictly/') return
def __init__(self, app=None): super().__init__(app) username, api_key, endpoint = self.read_from_config() self.conn = tinys3.Connection(username, api_key, tls=True, endpoint=endpoint) self.user_metadata_header = 'x-amz-meta-'
def upload_checkpoint_to_s3(source_file, current_step, bucket, random_id): conn = tinys3.Connection(os.environ["AWS_ACCESS_KEY_ID"], os.environ["AWS_SECRET_ACCESS_KEY"], tls=True) upload_files = [source_file + "-" + str(current_step), source_file + "-" + str(current_step) + ".meta", source_file.rsplit('/', 1)[0] + "/checkpoint"] for uf in upload_files: f = open(uf, 'rb') uploaded_name = random_id + "-" + uf.rsplit('/', 1)[1] print("file name: %s uploaded named: %s" % (uf, uploaded_name)) conn.upload(uploaded_name, f, bucket)
def __init__(self, *args, **kwargs): """ Overrides the LocalStorage and initializes a shared S3 connection """ super(S3Storage, self).__init__(*args, **kwargs) self.conn = tinys3.Connection(self.S3_ACCESS_KEY, self.S3_SECRET_KEY, default_bucket=self.S3_BUCKET, tls=True)