def update_video_youtubeid_s3(vid): ''' Input: Dashboard Video Locates video on YouTube based on youtubeid field Creates/updates thumbnails on s3 Creates/updates duration in duration field ''' # Create S3 Connection bucket_name = 'digitalgreen' bucket = S3Connection(ACCESS_KEY, SECRET_KEY).create_bucket(bucket_name) bucket.set_acl('public-read') location_raw_images = 'video_thumbnail/raw/' location_16by9_images = 'video_thumbnail/16by9/' file_save_dir = os.path.join(MEDIA_ROOT, 'youtube') cleaned_id = cleanup_youtubeid(vid.youtubeid) if cleaned_id == vid.youtubeid: vid.youtubeid = cleaned_id vid.save() #Fetch the video entry from Youtube entry = get_youtube_entry(cleaned_id) if entry: # Update thumbnails on s3 key = "".join([location_raw_images, str(vid.id), '.jpg']) if not bucket.get_key(key): img = ProcessedImage() found_thumbnail = False for thumbnail in entry.media.thumbnail: try: url = thumbnail.url filepath = os.path.join(file_save_dir, thumbnail.url.split("/")[-1]) img.set_image_from_url(url, filepath) found_thumbnail = True break except: continue if found_thumbnail: img_borderless = img.remove_border() filepath_borderless = os.path.join(file_save_dir, ("raw.jpg")) img_borderless.save(filepath_borderless) add_to_s3(bucket, key, filepath_borderless) print key img_cropped = img_borderless.crop(217, 124) filepath_16by9 = os.path.join(file_save_dir, ("16.jpg")) img_cropped.save(filepath_16by9) key = "".join([location_16by9_images, str(vid.id), '.jpg']) add_to_s3(bucket, key, filepath_16by9) print key else: logger = logging.getLogger('social_website') logger.info('Image does not exist for youtubeID') # Update duration duration = timedelta(seconds = int(entry.media.duration.seconds)) if vid.duration != str(duration): vid.duration = str(duration) vid.save()
def update_video_youtubeid_s3(vid): ''' Input: Dashboard Video Locates video on YouTube based on youtubeid field Creates/updates thumbnails on s3 Creates/updates duration in duration field ''' # Create S3 Connection bucket_name = 'digitalgreen' bucket = S3Connection(ACCESS_KEY, SECRET_KEY).create_bucket(bucket_name) bucket.set_acl('public-read') location_raw_images = 'video_thumbnail/raw/' location_16by9_images = 'video_thumbnail/16by9/' file_save_dir = os.path.join(MEDIA_ROOT, 'youtube') cleaned_id = cleanup_youtubeid(vid.youtubeid) if cleaned_id != vid.youtubeid: vid.youtubeid = cleaned_id vid.save() url = 'https://www.googleapis.com/youtube/v3/videos?part=snippet%2C+contentDetails&id=' entry = get_youtube_entry(cleaned_id, url) if entry: # Update thumbnails on s3 key = "".join([location_raw_images, str(vid.id), '.jpg']) if not bucket.get_key(key): img = ProcessedImage() try: url = entry['items'][0]['snippet']['thumbnails']['high']['url'] filepath = os.path.join(file_save_dir, url.split("/")[-1]) img.set_image_from_url(url, filepath) found_thumbnail = True except: found_thumbnail = False if found_thumbnail: img_borderless = img.remove_border() filepath_borderless = os.path.join(file_save_dir, ("raw.jpg")) img_borderless.save(filepath_borderless) add_to_s3(bucket, key, filepath_borderless) print key img_cropped = img_borderless.crop(217, 124) filepath_16by9 = os.path.join(file_save_dir, ("16.jpg")) img_cropped.save(filepath_16by9) key = "".join([location_16by9_images, str(vid.id), '.jpg']) add_to_s3(bucket, key, filepath_16by9) print key else: logger = logging.getLogger('social_website') logger.info('Image does not exist for youtubeID') # Update duration duration = isodate.parse_duration( entry['items'][0]['contentDetails']['duration']) if vid.duration != str(duration): vid.duration = str(duration) vid.save()
def update_video_youtubeid_s3(vid): ''' Input: Dashboard Video Locates video on YouTube based on youtubeid field Creates/updates thumbnails on s3 Creates/updates duration in duration field ''' # Create S3 Connection bucket_name = 'digitalgreen' bucket = S3Connection(ACCESS_KEY, SECRET_KEY).create_bucket(bucket_name) bucket.set_acl('public-read') location_raw_images = 'video_thumbnail/raw/' location_16by9_images = 'video_thumbnail/16by9/' file_save_dir = os.path.join(MEDIA_ROOT, 'youtube') cleaned_id = cleanup_youtubeid(vid.youtubeid) if cleaned_id != vid.youtubeid: vid.youtubeid = cleaned_id vid.save() url = 'https://www.googleapis.com/youtube/v3/videos?part=snippet%2C+contentDetails&id=' entry = get_youtube_entry(cleaned_id, url) if entry: # Update thumbnails on s3 key = "".join([location_raw_images, str(vid.id), '.jpg']) if not bucket.get_key(key): img = ProcessedImage() try: url = entry['items'][0]['snippet']['thumbnails']['high']['url'] filepath = os.path.join(file_save_dir, url.split("/")[-1]) img.set_image_from_url(url, filepath) found_thumbnail = True except: found_thumbnail = False if found_thumbnail: img_borderless = img.remove_border() filepath_borderless = os.path.join(file_save_dir, ("raw.jpg")) img_borderless.save(filepath_borderless) add_to_s3(bucket, key, filepath_borderless) print key img_cropped = img_borderless.crop(217, 124) filepath_16by9 = os.path.join(file_save_dir, ("16.jpg")) img_cropped.save(filepath_16by9) key = "".join([location_16by9_images, str(vid.id), '.jpg']) add_to_s3(bucket, key, filepath_16by9) print key else: logger = logging.getLogger('social_website') logger.info('Image does not exist for youtubeID') # Update duration duration = isodate.parse_duration(entry['items'][0]['contentDetails']['duration']) if vid.duration != str(duration): vid.duration = str(duration) vid.save()
try: obj = Animator.objects.get(old_coco_id=animator_id) new_key_name = "".join([dir_name, '/', str(obj.id), '.jpg']) except Animator.DoesNotExist: print "Animator Does Not Exist" elif(dir_name == 'village'): if(file_name.find('.') != -1): village_id = file_name.split('.')[0] if village_id.isdigit(): try: obj = Village.objects.get(old_coco_id=village_id) new_key_name = "".join([dir_name, '/', str(obj.id), '.jpg']) except Village.DoesNotExist: print "Village Does Not Exist" if(len(dir_name) == 11 or len(dir_name) == 14): try: obj = Person.objects.get(old_coco_id=dir_name) new_key_name = "".join([str(obj.id), '/', file_name.replace('JPG', 'jpg')]) except Person.DoesNotExist: print "Person Does Not Exist" if new_key_name != '': if not bucket_new.get_key(new_key_name): print new_key_name save_name = key.name.replace('/', '_') key.get_contents_to_filename(folder_path + save_name) add_to_s3(bucket_new, new_key_name, folder_path + save_name) else: if not os.path.exists(folder_path + key.name): os.makedirs(folder_path + key.name)
except Animator.DoesNotExist: print "Animator Does Not Exist" elif (dir_name == 'village'): if (file_name.find('.') != -1): village_id = file_name.split('.')[0] if village_id.isdigit(): try: obj = Village.objects.get(old_coco_id=village_id) new_key_name = "".join( [dir_name, '/', str(obj.id), '.jpg']) except Village.DoesNotExist: print "Village Does Not Exist" if (len(dir_name) == 11 or len(dir_name) == 14): try: obj = Person.objects.get(old_coco_id=dir_name) new_key_name = "".join( [str(obj.id), '/', file_name.replace('JPG', 'jpg')]) except Person.DoesNotExist: print "Person Does Not Exist" if new_key_name != '': if not bucket_new.get_key(new_key_name): print new_key_name save_name = key.name.replace('/', '_') key.get_contents_to_filename(folder_path + save_name) add_to_s3(bucket_new, new_key_name, folder_path + save_name) else: if not os.path.exists(folder_path + key.name): os.makedirs(folder_path + key.name)
from libs.s3_utils import add_to_s3 #old S3 Account bucket_name = 'dg_farmerbook' conn = S3Connection(S3_ACCESS_KEY, S3_SECRET_KEY) bucket = conn.get_bucket(bucket_name) #new S3 Account bucket_name_new = 'dg-farmerbook' bucket_new = S3Connection(ACCESS_KEY, SECRET_KEY).create_bucket(bucket_name_new) bucket_new.set_acl('public-read') folder_path = MEDIA_ROOT + 'farmerbook/' if not os.path.exists(folder_path): os.makedirs(folder_path) buck_list = bucket.list() for key in buck_list: if key.name.find('.') != -1: print key.name if not bucket_new.get_key(key.name.replace("JPG", "jpg")): key.get_contents_to_filename(folder_path + key.name) add_to_s3(bucket_new, key.name.replace("JPG", "jpg"), folder_path + key.name) else: if not os.path.exists(folder_path + key.name): os.makedirs(folder_path + key.name)
from dg.settings import ACCESS_KEY, SECRET_KEY from libs.s3_utils import add_to_s3 #old S3 Account bucket_name = 'dg_farmerbook' conn = S3Connection(S3_ACCESS_KEY, S3_SECRET_KEY) bucket = conn.get_bucket(bucket_name) #new S3 Account bucket_name_new = 'dg-farmerbook' bucket_new = S3Connection(ACCESS_KEY, SECRET_KEY).create_bucket(bucket_name_new) bucket_new.set_acl('public-read') folder_path = MEDIA_ROOT + 'farmerbook/' if not os.path.exists(folder_path): os.makedirs(folder_path) buck_list = bucket.list() for key in buck_list: if key.name.find('.') != -1: print key.name if not bucket_new.get_key(key.name.replace("JPG", "jpg")): key.get_contents_to_filename(folder_path + key.name) add_to_s3(bucket_new, key.name.replace("JPG", "jpg"), folder_path + key.name) else: if not os.path.exists(folder_path + key.name): os.makedirs(folder_path + key.name)