def add_bucket(self, bucket_name, access, zonename, create_date): try: bucket_count = len(self.conn.get_all_buckets()) # print 'bucket_count is %s ' %bucket_count if bucket_count < self.bucket_limit: self.conn.create_bucket(bucket_name) b = self.conn.get_bucket(bucket_name) try: # k=b.new_key('create_info') # k.set_contents_from_string("{'bucket_name':'%s','zonename':'%s','access':'%s','create_date':'%s'}" %(bucket_name,zonename,access,create_date)) k1 = Key(b) k1.key = 'create_info' #k1.set_metadata('Bucket_Name',bucket_name),注意,在设置元数据的时候key名不能带有下划线,该示例在创建的时候会报错403 k1.set_metadata('BucketName', bucket_name) k1.set_metadata('ZoneName', zonename) k1.set_metadata('Access', access) k1.set_metadata('CreateDate', create_date) k1.set_contents_from_string('') except Exception as e: print r'\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\', e return True else: return False except Exception as e: return False
def add_bucket(self,bucket_name,access,zonename,create_date): try: bucket_count=len(self.conn.get_all_buckets()) # print 'bucket_count is %s ' %bucket_count if bucket_count < self.bucket_limit: self.conn.create_bucket(bucket_name) b=self.conn.get_bucket(bucket_name) try: # k=b.new_key('create_info') # k.set_contents_from_string("{'bucket_name':'%s','zonename':'%s','access':'%s','create_date':'%s'}" %(bucket_name,zonename,access,create_date)) k1=Key(b) k1.key='create_info' #k1.set_metadata('Bucket_Name',bucket_name),注意,在设置元数据的时候key名不能带有下划线,该示例在创建的时候会报错403 k1.set_metadata('BucketName',bucket_name) k1.set_metadata('ZoneName',zonename) k1.set_metadata('Access',access) k1.set_metadata('CreateDate',create_date) k1.set_contents_from_string('') except Exception as e: print r'\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\',e return True else: return False except Exception as e: return False
def uploadStrToS3(self, destDir, filename, contents): '''Uploads a string to an S3 file.''' print 'Uploading %s to Amazon S3 bucket %s' % (filename, self.bucket_name) k2 = Key(self.bucket) k2.key = os.path.join(destDir, filename) k2.set_contents_from_string(contents, reduced_redundancy=True) print # This newline is needed to get the path of the compiled binary printed on a newline.
def WriteDataStringtoS3(string,game,msg_type, S3_bucket): today_YYYMMDD, today_hhmmss = datetime.now().strftime('%Y%m%d') , datetime.now().strftime('%H-%M-%S') S3_path = '/data/' + game + '/' + msg_type + '/' + today_YYYMMDD + '/' + today_hhmmss + '-logs.txt' k=Key(S3_bucket) k.key = S3_path k.set_contents_from_string(string,reduced_redundancy=True)
def add(bkt, key, img, form = 'JPEG'): bucket = conn.get_bucket(bkt) newKeyObj = Key(bucket) newKeyObj.key = key newKeyObj.set_metadata('Content-Type', 'image/jpg') buf = s.StringIO() img.save(buf, form) newKeyObj.set_contents_from_string(buf.getvalue()) newKeyObj.set_acl('public-read')
def uploadStrToS3(self, destDir, filename, contents): # pylint: disable=invalid-name,missing-param-doc # pylint: disable=missing-type-doc """Upload a string to an S3 file.""" print(f"Uploading {filename} to Amazon S3 bucket {self.bucket_name}") k2 = Key(self.bucket) # pylint: disable=invalid-name k2.key = os.path.join(destDir, filename) k2.set_contents_from_string(contents, reduced_redundancy=True) print() # This newline is needed to get the path of the compiled binary printed on a newline.
def upload(self): for destination, data, content_type, compressed in self.get_files(): key = Key(self.bucket) key.content_type = content_type if compressed: key.set_metadata('content-encoding', 'gzip') for header, value in self.headers: key.set_metadata(header, value) key.key = destination key.set_contents_from_string(data)
def WriteStringtoS3(string,game,msg_type): env,game,msgtype =g_env, game, msg_type today_YYYMMDD, today_hhmmss = datetime.now().strftime('%Y%m%d') , datetime.now().strftime('%H-%M-%S') S3_path = env + '/data/' + game + '/' + msgtype + '/' + today_YYYMMDD + '/' + today_hhmmss + '-logs.txt' S3_bucket = 'dailydosegames-gamedata-' + g_AWSAccessKeyId.lower() conn = S3Connection(g_AWSAccessKeyId, g_AWSSecretKey) bucket = conn.get_bucket(S3_bucket) k=Key(bucket) k.key = S3_path k.set_contents_from_string(string,reduced_redundancy=True)
def WriteStringtoS3(string, game, msg_type): env, game, msgtype = g_env, game, msg_type today_YYYMMDD, today_hhmmss = datetime.now().strftime( '%Y%m%d'), datetime.now().strftime('%H-%M-%S') S3_path = env + '/data/' + game + '/' + msgtype + '/' + today_YYYMMDD + '/' + today_hhmmss + '-logs.txt' S3_bucket = 'dailydosegames-gamedata-' + g_AWSAccessKeyId.lower() conn = S3Connection(g_AWSAccessKeyId, g_AWSSecretKey) bucket = conn.get_bucket(S3_bucket) k = Key(bucket) k.key = S3_path k.set_contents_from_string(string, reduced_redundancy=True)
def upload_content(bucket=None, key_name=None, data_type=kUploadContentType.String, data=None) : bucket = get_bucket(bucket) bucketKey = Key(bucket) bucketKey.key = key_name try : if data_type == kUploadContentType.String : bucketKey.set_contents_from_string(data) elif data_type == kUploadContentType.File : bucketKey.set_contents_from_file(data) elif data_type == kUploadContentType.FileName(data) : bucketKey.set_contents_from_filename(data) elif data_type == kUploadContentType.Stream : bucketKey.set_contents_from_stream(data) return True except Exception, e : return False
def save_tree(self, target, new_tree): self.connect() now_dt = datetime.utcnow() now_dt_str = now_dt.strftime('%Y%m%dT%H%M') sio = StringIO() tree.save_tree(new_tree, sio) # Save to S3 print("Saving tree to S3") s3_pf_prefix = self.cp.get('options', 's3-pf-prefix') k = Key(self.s3_bucket) k.key = '{}/trees/{}.{}'.format( s3_pf_prefix, target, now_dt_str ) k.set_metadata('pf:target', target) k.set_metadata('pf:saved_dt', now_dt_str) k.set_contents_from_string(sio.getvalue())
def s3_put_string(key, content): aws_key = Key(aws_bucket, key) return aws_key.set_contents_from_string(content)
entityInfo = reduce(lambda df, tbl: df.join(tbl, entity, how="outer"), partTbls) entityInfo = add_computed_cols(entityInfo) # COMMAND ---------- print "writing entityInfo to s3: %s" % destEntityInfoPath entityInfo.repartition(1).write.format('com.databricks.spark.csv') \ .options(header='true', nullValue='') \ .save('/mnt/' + AWS_BUCKET_NAME + '/' + destEntityInfoPath, mode='overwrite') print "written entityInfo to S3." # COMMAND ---------- # compute and write out simMatrix sim = compute_sim_mat(orderInfoDF.groupby(entity, uID).count()) print "sim length: %d" % len(sim) binary_dump = dumps(sim, -1) print "writing simMat to s3 Key: %s/%s" % (AWS_BUCKET_NAME, destSimMatKey) # Open a connection to S3 and write the contents conn = S3Connection(ACCESS_KEY, SECRET_KEY) bucket = conn.get_bucket(AWS_BUCKET_NAME) k = Key(bucket) k.key = destSimMatKey k.set_metadata("encoder", "pickle-version-2") k.set_metadata("generated-by", "mappr-etl") k.set_metadata("generated-by-source", srcFilePath) k.set_contents_from_string(binary_dump) print "written simMat to S3." sqlContext.clearCache()
#!/usr/bin/env python import boto from os import getenv from boto.s3.connection import Key from time import ctime s3con = boto.connect_s3(getenv("PHOENIX_COMMERCE_AWS_ACCESS_KEY_ID"), getenv("PHOENIX_COMMERCE_AWS_SECRET_ACCESS_KEY")) s3con.connection.connect() b = s3con.get_bucket("binarytemple-phoenix-commerce") keys = b.list() for k in keys: print k.name k = Key(b) k.key = "test.txt" k.set_contents_from_string("last updated - %s" % ctime())
def createVideo(): req = flask.request.get_json()['params'] session = database.DBSession() # Check if the user is allowed to access this method allowed = authorized.authorized(req['user_id'], req['access_token'], session) if allowed is not True: session.close() return allowed from user import Connection # Create the video and send back a response video_date = int(req['date']) user_id = req['user_id'] timeline_id = req['timeline_id'] if timeline_id == '': timeline_id = string_constants.kServerVideoPublicFeedKey description = req['description'] public_feed = False if 'public_feed' in req: public_feed = bool(req['public_feed']) video_content = None if 'video_content' in req: video_content = base64.b64decode(req['video_content']) video_thumbnail = None if 'video_thumbnail' in req: video_thumbnail = base64.b64decode(req['video_thumbnail']) if video_date is None or user_id is None or timeline_id is None or description is None: session.close() return authorized.wrongParams() # Add video_id to the playlist in the relationship timeline = None if timeline_id != string_constants.kServerVideoPublicFeedKey: timeline = session.query(Timeline).filter(Timeline.timeline_id == timeline_id).join(Timeline.connection).filter(Connection.approved == 1).filter(Connection.disabled == 0).first() if timeline is None: response = jsonify(message=string_constants.kServerVideoTimelineIDDoesntExist, status=False, HTTP_CODE=200 ) response.status_code = 200 session.close() return response video_filename = hashlib.sha256(str(video_date) + user_id + timeline_id).hexdigest() # Check if video already exists video_check = session.query(VideoModel).filter(VideoModel.video_id == video_filename).first() if video_check is not None: response = jsonify(message=string_constants.kServerVideoAlreadyExistsError, status=False, HTTP_CODE=200 ) response.status_code = 200 session.close() return response try: video_path = str(Video.getVideoObjectPath(video_filename, user_id, timeline_id, str(video_date))+".m4v") thumbnail_path = str(Video.getVideoThumbnailObjectPath(video_filename, user_id, timeline_id, str(video_date))+".jpg") if app.config["AWS_S3"]: if video_content is not None and video_thumbnail is not None: aws_s3_connection = S3Connection(app.config['AWS_ACCESS_KEY'], app.config['AWS_SECRET_KEY']) aws_s3_bucket = Bucket(aws_s3_connection, app.config['AWS_BUCKET_NAME']) aws_s3_video_key = Key(aws_s3_bucket) aws_s3_video_key.key = video_path aws_s3_video_key.content_type = app.config['AWS_KEY_CONTENT_TYPE'] aws_s3_video_key.set_contents_from_string(video_content, replace=True) aws_s3_thumb_key = Key(aws_s3_bucket) aws_s3_thumb_key.key = thumbnail_path aws_s3_thumb_key.content_type = app.config['AWS_KEY_CONTENT_TYPE'] aws_s3_thumb_key.set_contents_from_string(video_thumbnail, replace=True) # Create new video object and save it to the database new_video = VideoModel(video_date, user_id, timeline_id, video_filename + '_thumb.jpg', video_filename, description, public_feed) if timeline is not None: timeline.video_count += 1 from user import UserModel userDisplayName = session.query(UserModel.display_name).filter(UserModel.user_id == int(user_id)).first() userDisplayName = userDisplayName[0] # Add the notification for the new video from notification import NotificationModel, RegisteredNotificationUserModel notification = NotificationModel( user_id, (int(timeline.connection.user1) == int(user_id)) and timeline.connection.user2 or timeline.connection.user1, { string_constants.kServerNotificationsType: string_constants.kServerNotificationsTypeNewVideo, string_constants.kServerNotificationsTimeline_idKey: timeline_id, string_constants.kServerNotificationsUser_NameKey: userDisplayName }, calendar.timegm(datetime.utcnow().timetuple())) session.add(notification) session.add(new_video) session.commit() except exc.SQLAlchemyError as e: response = jsonify(message=string_constants.kServerVideoIssueMakingVideo, status=False, HTTP_CODE=200 ) response.status_code = 200 session.close() return response else: response = jsonify(message=string_constants.kServerVideoCreatedVideoSuccess, status=True, HTTP_CODE=200, Video={ "video_path": video_path, "thumbnail_path": thumbnail_path } ) response.status_code = 200 session.close() return response
def s3_put_string(key, content): aws_key = Key(aws_bucket, key) return aws_key.set_contents_from_string(content)