def generate_and_upload(gauge_factory, config, logger): start = datetime.datetime.now() twitter_followers = gauge_factory('twitter.followers') twitter_tweets = gauge_factory('twitter.tweets') fb_friends = gauge_factory('facebook.friends') foursq_checkins = gauge_factory('foursquare.checkins') klout_score = gauge_factory('klout.score') runkeeper_activities = gauge_factory('runkeeper.activities') runkeeper_calories = gauge_factory('runkeeper.calories_burned') runkeeper_weight = gauge_factory('runkeeper.weight') tmp102_celsius = gauge_factory('tmp102.te mperature', gauge_type='hourly') lastfm_listened = gauge_factory('lastfm.listened') jawbone_sleeps = gauge_factory('jawbone.sleeps') jawbone_steps = gauge_factory('jawbone.steps') jawbone_caffeine = gauge_factory('jawbone.caffeine') data = {} data_sources = [ # (output key, gauge, days back, aggregator, postprocessors) ('twitter.followers', twitter_followers, 30, None, [zero_fill_daily, interpolators.linear]), ('twitter.tweets', twitter_tweets, 20, None, [zero_fill_daily]), ('facebook.friends', fb_friends, 180, monthly_max, None), ('foursquare.checkins', foursq_checkins, 14, None, [zero_fill_daily]), ('lastfm.listened', lastfm_listened, 14, None, [zero_fill_daily]), ('klout.score', klout_score, 30, weekly_max, [zero_fill_weekly, interpolators.linear]), ('runkeeper.calories', runkeeper_calories, 60, weekly_sum, [zero_fill_weekly]), ('runkeeper.activities', runkeeper_activities, 60,weekly_sum, [zero_fill_weekly]), ('runkeeper.weight', runkeeper_weight, 180,weekly_min, [zero_fill_weekly, interpolators.linear]), ('sleeps', jawbone_sleeps, 14, None, [zero_fill_daily, interpolators.linear]), ('steps', jawbone_steps, 14, None, [zero_fill_daily, interpolators.linear]), ('caffeine', jawbone_caffeine, 30, None, [zero_fill_daily]), ('tmp102.temperature', tmp102_celsius, 2.5, None, None) ] for ds in data_sources: data[ds[0]] = ds[1].aggregate(today_utc() - timedelta(days=ds[2]), aggregator=ds[3], post_processors=ds[4]) report = { 'generated': str(now_utc()), 'data': data, 'took': (datetime.datetime.now() - start).seconds } report_json = json.dumps(report, indent=4, default=json_date_serializer) report_content = '{0}({1})'.format(JSONP_CALLBACK_NAME, report_json) blob_service = BlobService(config['azure.account'], config['azure.key']) blob_service.create_container(config['azure.blob.container']) blob_service.set_container_acl(config['azure.blob.container'], x_ms_blob_public_access='container') blob_service.put_blob(config['azure.blob.container'], config['azure.blob.name'], report_content, 'BlockBlob') took = (datetime.datetime.now() - start).seconds logger.info('Report generated and uploaded. Took {0} s.'.format(took))
import glob import os import time logtime = time.strftime("%Y%m%d-%H%M%S") logmessage = "uploadmp3.py started" command = "sed -i '1s/^/" + logtime + " " + logmessage + "\\n/' /home/pi/selcuk/log.txt" os.system(command) from azure.storage import BlobService blob_service = BlobService(account_name='account_name', account_key='account_key') blob_service.create_container('record') blob_service.create_container('record', x_ms_blob_public_access='container') blob_service.set_container_acl('record', x_ms_blob_public_access='container') directory = "/home/pi/selcuk/mp3" os.chdir(directory) for file in glob.glob("*.mp3"): full_path = directory + "/" + file blob_service.put_block_blob_from_path( 'record', file, full_path, x_ms_blob_content_type='audio/mpeg3' ) delete_command = "rm " + file os.system(delete_command) logtime = time.strftime("%Y%m%d-%H%M%S") logmessage = file + " uploaded to cloud and deleted from device"
class Command(BaseCommand): help = "Synchronizes static media to cloud files." option_list = BaseCommand.option_list + ( optparse.make_option('-w', '--wipe', action='store_true', dest='wipe', default=False, help="Wipes out entire contents of container first."), optparse.make_option('-t', '--test-run', action='store_true', dest='test_run', default=False, help="Performs a test run of the sync."), optparse.make_option('-c', '--container', dest='container', help="Override STATIC_CONTAINER."), ) # settings from azurite.settings ACCOUNT_NAME = AZURITE['ACCOUNT_NAME'] ACCOUNT_KEY = AZURITE['ACCOUNT_KEY'] STATIC_CONTAINER = AZURITE['STATIC_CONTAINER'] # paths DIRECTORY = os.path.abspath(settings.STATIC_ROOT) STATIC_URL = settings.STATIC_URL if not DIRECTORY.endswith('/'): DIRECTORY = DIRECTORY + '/' if STATIC_URL.startswith('/'): STATIC_URL = STATIC_URL[1:] local_object_names = [] create_count = 0 upload_count = 0 update_count = 0 skip_count = 0 delete_count = 0 service = None def handle(self, *args, **options): self.wipe = options.get('wipe') self.test_run = options.get('test_run') self.verbosity = int(options.get('verbosity')) if hasattr(options, 'container'): self.STATIC_CONTAINER = options.get('container') self.sync_files() def sync_files(self): self.service = BlobService(account_name=self.ACCOUNT_NAME, account_key=self.ACCOUNT_KEY) try: self.service.get_container_properties(self.STATIC_CONTAINER) except WindowsAzureMissingResourceError: self.service.create_container(self.STATIC_CONTAINER, x_ms_blob_public_access='blob') self.service.set_container_acl(self.STATIC_CONTAINER, x_ms_blob_public_access='blob') # if -w option is provided, wipe out the contents of the container if self.wipe: blob_count = len(self.service.list_blobs(self.STATIC_CONTAINER)) if self.test_run: print "Wipe would delete %d objects." % blob_count else: print "Deleting %d objects..." % blob_count for blob in self.service.list_blobs(self.STATIC_CONTAINER): self.service.delete_blob(self.STATIC_CONTAINER, blob.name) # walk through the directory, creating or updating files on the cloud os.path.walk(self.DIRECTORY, self.upload_files, "foo") # remove any files on remote that don't exist locally self.delete_files() # print out the final tally to the cmd line self.update_count = self.upload_count - self.create_count print if self.test_run: print "Test run complete with the following results:" print "Skipped %d. Created %d. Updated %d. Deleted %d." % ( self.skip_count, self.create_count, self.update_count, self.delete_count) def upload_files(self, arg, dirname, names): # upload or skip items for item in names: file_path = os.path.join(dirname, item) if os.path.isdir(file_path): continue # Don't try to upload directories object_name = self.STATIC_URL + file_path.split(self.DIRECTORY)[1] self.local_object_names.append(object_name) try: properties = self.service.get_blob_properties(self.STATIC_CONTAINER, object_name) except WindowsAzureMissingResourceError: properties = {} self.create_count += 1 cloud_datetime = None if 'last-modified' in properties: cloud_datetime = (properties['last-modified'] and datetime.datetime.strptime( properties['last-modified'], "%a, %d %b %Y %H:%M:%S %Z" ) or None) local_datetime = datetime.datetime.utcfromtimestamp( os.stat(file_path).st_mtime) if cloud_datetime and local_datetime < cloud_datetime: self.skip_count += 1 if self.verbosity > 1: print "Skipped %s: not modified." % object_name continue if not self.test_run: file_contents = open(file_path, 'r').read() content_type, encoding = mimetypes.guess_type(file_path) print "content-type", content_type print "encoding", encoding self.service.put_blob(self.STATIC_CONTAINER, object_name, file_contents, x_ms_blob_type='BlockBlob', x_ms_blob_content_type=content_type, content_encoding=encoding) # sync_headers(cloud_obj) self.upload_count += 1 if self.verbosity > 1: print "Uploaded", object_name def delete_files(self): # remove any objects in the container that don't exist locally for blob in self.service.list_blobs(self.STATIC_CONTAINER): if blob.name not in self.local_object_names: self.delete_count += 1 if self.verbosity > 1: print "Deleted %s" % blob.name if not self.test_run: self.service.delete_blob(self.STATIC_CONTAINER, blob.name)
class AzureBlobStorage(Storage): def __init__(self, account='nyxstorage', container='pxo'): self.base_storage_uri = 'http://%s.blob.core.windows.net/%s/' % ( account, container) self.blob_service = BlobService( account, get_env_variable('AZURE_BLOB_STORAGE_KEY')) self.container = container def _open(self, name, mode='rb'): data = self.blob_service.get_blob(self.container, name) return ContentFile(data) def _save(self, name, content): _file = content.read() file_name = content.name[-35:] self.blob_service.put_blob( self.container, file_name, _file, x_ms_blob_type='BlockBlob') return self.base_storage_uri + file_name def create_container(self, container_name): result = self.blob_service.create_container( container_name, x_ms_blob_public_access='container') return result def delete(self, name): self.blob_service.delete_blob(self.container, name) def exists(self, name): try: self.blob_service.get_blob_properties(self.container, name) except: return False else: return True def get_available_name(self, name): return name def get_blobs(self): blobs = self.blob_service.list_blobs(self.container) return blobs def get_valid_name(self, name): return name def modified_time(self, name): metadata = self.blob_service.get_blob_metadata(self.container, name) modified_time = float(metadata.get('x-ms-meta-modified_time')) return datetime.fromtimestamp(modified_time) def set_public_container(self, container_name): result = self.blob_service.set_container_acl( container_name, x_ms_blob_public_access='container') return result def size(self, name): properties = self.blob_service.get_blob_properties( self.container, name) return properties.get('content-length') def url(self, name): blob = self.blob_service.list_blobs(self.container, prefix=name) return blob.blobs[0].url
def generate_and_upload(gauge_factory, config, logger): start = datetime.datetime.now() twitter_followers = gauge_factory('twitter.followers') twitter_tweets = gauge_factory('twitter.tweets') fb_friends = gauge_factory('facebook.friends') foursq_checkins = gauge_factory('foursquare.checkins') klout_score = gauge_factory('klout.score') runkeeper_activities = gauge_factory('runkeeper.activities') runkeeper_calories = gauge_factory('runkeeper.calories_burned') runkeeper_weight = gauge_factory('runkeeper.weight') tmp102_celsius = gauge_factory('tmp102.temperature', gauge_type='hourly') lastfm_listened = gauge_factory('lastfm.listened') jawbone_sleeps = gauge_factory('jawbone.sleeps') jawbone_heartrate = gauge_factory('jawbone.resting_heartrate') jawbone_steps = gauge_factory('jawbone.steps') jawbone_caffeine = gauge_factory('jawbone.caffeine') data = {} data_sources = [ # (output key, gauge, days back, aggregator, postprocessors) ('twitter.followers', twitter_followers, 30, None, [zero_fill_daily, interpolators.linear]), ('twitter.tweets', twitter_tweets, 20, None, [zero_fill_daily]), ('facebook.friends', fb_friends, 180, monthly_max, None), ('foursquare.checkins', foursq_checkins, 14, None, [zero_fill_daily]), ('lastfm.listened', lastfm_listened, 14, None, [zero_fill_daily]), ('klout.score', klout_score, 30, weekly_max, [zero_fill_weekly, interpolators.linear]), ('runkeeper.calories', runkeeper_calories, 60, weekly_sum, [zero_fill_weekly]), ('runkeeper.activities', runkeeper_activities, 60, weekly_sum, [zero_fill_weekly]), ('runkeeper.weight', runkeeper_weight, 180, weekly_min, [zero_fill_weekly, interpolators.linear]), ('sleeps', jawbone_sleeps, 14, None, [zero_fill_daily, interpolators.linear]), ('heartrate', jawbone_heartrate, 21, None, [zero_fill_daily, interpolators.linear]), ('steps', jawbone_steps, 14, None, [zero_fill_daily, interpolators.linear]), ('caffeine', jawbone_caffeine, 30, None, [zero_fill_daily]), ('tmp102.temperature', tmp102_celsius, 2.5, None, None) ] for ds in data_sources: data[ds[0]] = ds[1].aggregate(today_utc() - timedelta(days=ds[2]), aggregator=ds[3], post_processors=ds[4]) report = { 'generated': str(now_utc()), 'data': data, 'took': (datetime.datetime.now() - start).seconds } report_json = json.dumps(report, indent=4, default=json_date_serializer) report_content = '{0}({1})'.format(JSONP_CALLBACK_NAME, report_json) blob_service = BlobService(config['azure.account'], config['azure.key']) blob_service.create_container(config['azure.blob.container']) blob_service.set_container_acl(config['azure.blob.container'], x_ms_blob_public_access='container') blob_service.put_blob(config['azure.blob.container'], config['azure.blob.name'], report_content, 'BlockBlob') took = (datetime.datetime.now() - start).seconds logger.info('Report generated and uploaded. Took {0} s.'.format(took))
class Command(BaseCommand): help = "Synchronizes static media to cloud files." option_list = BaseCommand.option_list + ( optparse.make_option( '-w', '--wipe', action='store_true', dest='wipe', default=False, help="Wipes out entire contents of container first."), optparse.make_option('-t', '--test-run', action='store_true', dest='test_run', default=False, help="Performs a test run of the sync."), optparse.make_option('-c', '--container', dest='container', help="Override STATIC_CONTAINER."), ) # settings from azurite.settings ACCOUNT_NAME = AZURITE['ACCOUNT_NAME'] ACCOUNT_KEY = AZURITE['ACCOUNT_KEY'] STATIC_CONTAINER = AZURITE['STATIC_CONTAINER'] # paths DIRECTORY = os.path.abspath(settings.STATIC_ROOT) STATIC_URL = settings.STATIC_URL if not DIRECTORY.endswith('/'): DIRECTORY = DIRECTORY + '/' if STATIC_URL.startswith('/'): STATIC_URL = STATIC_URL[1:] local_object_names = [] create_count = 0 upload_count = 0 update_count = 0 skip_count = 0 delete_count = 0 service = None def handle(self, *args, **options): self.wipe = options.get('wipe') self.test_run = options.get('test_run') self.verbosity = int(options.get('verbosity')) if hasattr(options, 'container'): self.STATIC_CONTAINER = options.get('container') self.sync_files() def sync_files(self): self.service = BlobService(account_name=self.ACCOUNT_NAME, account_key=self.ACCOUNT_KEY) try: self.service.get_container_properties(self.STATIC_CONTAINER) except WindowsAzureMissingResourceError: self.service.create_container(self.STATIC_CONTAINER, x_ms_blob_public_access='blob') self.service.set_container_acl(self.STATIC_CONTAINER, x_ms_blob_public_access='blob') # if -w option is provided, wipe out the contents of the container if self.wipe: blob_count = len(self.service.list_blobs(self.STATIC_CONTAINER)) if self.test_run: print "Wipe would delete %d objects." % blob_count else: print "Deleting %d objects..." % blob_count for blob in self.service.list_blobs(self.STATIC_CONTAINER): self.service.delete_blob(self.STATIC_CONTAINER, blob.name) # walk through the directory, creating or updating files on the cloud os.path.walk(self.DIRECTORY, self.upload_files, "foo") # remove any files on remote that don't exist locally self.delete_files() # print out the final tally to the cmd line self.update_count = self.upload_count - self.create_count print if self.test_run: print "Test run complete with the following results:" print "Skipped %d. Created %d. Updated %d. Deleted %d." % ( self.skip_count, self.create_count, self.update_count, self.delete_count) def upload_files(self, arg, dirname, names): # upload or skip items for item in names: file_path = os.path.join(dirname, item) if os.path.isdir(file_path): continue # Don't try to upload directories object_name = self.STATIC_URL + file_path.split(self.DIRECTORY)[1] self.local_object_names.append(object_name) try: properties = self.service.get_blob_properties( self.STATIC_CONTAINER, object_name) except WindowsAzureMissingResourceError: properties = {} self.create_count += 1 cloud_datetime = None if 'last-modified' in properties: cloud_datetime = ( properties['last-modified'] and datetime.datetime.strptime(properties['last-modified'], "%a, %d %b %Y %H:%M:%S %Z") or None) local_datetime = datetime.datetime.utcfromtimestamp( os.stat(file_path).st_mtime) if cloud_datetime and local_datetime < cloud_datetime: self.skip_count += 1 if self.verbosity > 1: print "Skipped %s: not modified." % object_name continue if not self.test_run: file_contents = open(file_path, 'r').read() content_type, encoding = mimetypes.guess_type(file_path) self.service.put_blob(self.STATIC_CONTAINER, object_name, file_contents, x_ms_blob_type='BlockBlob', x_ms_blob_content_type=content_type, content_encoding=encoding) # sync_headers(cloud_obj) self.upload_count += 1 if self.verbosity > 1: print "Uploaded", object_name def delete_files(self): # remove any objects in the container that don't exist locally for blob in self.service.list_blobs(self.STATIC_CONTAINER): if blob.name not in self.local_object_names: self.delete_count += 1 if self.verbosity > 1: print "Deleted %s" % blob.name if not self.test_run: self.service.delete_blob(self.STATIC_CONTAINER, blob.name)