Exemplo n.º 1
0
def main():
    # this should come from Django later
    ec2_access_key = 'svallero'
    ec2_secret_key = '5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8'
    #ec2_access_key = 'oneadmin'
    #ec2_secret_key = 'd2154097e7420fb39d8b101dd521cc29717772eb'
    ssh_key = 'sara'
    master_image = 'ami-00000984'
    master_flavour = 'm1.small'
    master_userdata = 'echo "pippo" > /root/pippo.txt'
    worker_flavour = 'm1.large'
    worker_userdata = 'echo "pippo" > /root/pippo.txt'
    shared_secret = 'pippo'
    check_queue_every = 15
    min_job_waiting_time = 100
    jobs_per_vm = 6
    check_vms_every = 45
    kill_idle_after = 3600
    min_num_workers = 2
    max_num_workers = 10
    vm_deploy_time = 350
    ###########################################################################

    try:
        import logging
        logging.getLogger('boto').setLevel(logging.DEBUG)
        logging.getLogger('urllib3').setLevel(logging.DEBUG)
        boto.set_file_logger('boto', 'boto.log')
        conn = boto.connect_ec2_endpoint(
            "https://one-master.to.infn.it/ec2api",
            aws_access_key_id=str(ec2_access_key),
            aws_secret_access_key=str(ec2_secret_key),
            validate_certs=True,
            #https_connection_factory=factory,
            #is_secure=True,
            debug=10)
        #    conn.run_instances(master_image,instance_type=master_flavour,key_name=ssh_key)
        print(conn.get_params())
        #conn.run_instances(master_image,instance_type=master_flavour)
        conn.get_all_reservations()
        #print ("Error: ",conn.ResponseError)
        #reservations = conn.get_all_reservations(dry_run = True)
        #reservations = conn.get_only_instances()


#    inst=reservations[0].instances
#return inst[-1]
#print inst
#print (boto.exception.EC2ResponseError)
#return 1
    except:
        print("Ciccia!")
        errore = sys.exc_info()[0]
        print("Error: %s" % errore)
        raise
Exemplo n.º 2
0
 def __init__(self, download_path, bucket_name, num_processes=2,
              log_file=None, log_level=logging.INFO):
     self.download_path = download_path
     self.bucket_name = bucket_name
     self.num_processes = num_processes
     if log_file:
         boto.set_file_logger('boto-downloader', log_file, log_level)
     else:
         boto.set_stream_logger('boto-downloader', log_level)
     self.task_queue = multiprocessing.JoinableQueue()
     self.s3 = boto.connect_s3()
     self.bucket = self.s3.lookup(self.bucket_name)
     self.n_tasks = 0
Exemplo n.º 3
0
 def __init__(self, index_path, bucket_name, num_processes=2,
              log_file=None, log_level=logging.INFO):
     self.index_path = index_path
     self.bucket_name = bucket_name
     self.num_processes = num_processes
     if log_file:
         boto.set_file_logger('pynas-uploader', log_file, log_level)
     else:
         boto.set_stream_logger('pynas-uploader', log_level)
     self.task_queue = multiprocessing.JoinableQueue()
     self.status_queue = multiprocessing.Queue()
     self.s3 = boto.connect_s3()
     self.bucket = self.s3.lookup(self.bucket_name)
     self.index = pynas.index.Index(index_path)
     self.n_tasks = 0
Exemplo n.º 4
0
 def __init__(self):
     ScriptBase.__init__(self)
     self.wdir = boto.config.get('Pyami', 'working_dir')
     self.log_file = '%s.log' % self.instance_id
     self.log_path = os.path.join(self.wdir, self.log_file)
     boto.set_file_logger(self.name, self.log_path)
     self.src_name = boto.config.get(self.name, 'src_bucket')
     self.dst_name = boto.config.get(self.name, 'dst_bucket')
     self.replace = boto.config.getbool(self.name, 'replace_dst', True)
     s3 = boto.connect_s3()
     self.src = s3.lookup(self.src_name)
     if not self.src:
         boto.log.error('Source bucket does not exist: %s' % self.src_name)
     dest_access_key = boto.config.get(self.name, 'dest_aws_access_key_id', None)
     if dest_access_key:
         dest_secret_key = boto.config.get(self.name, 'dest_aws_secret_access_key', None)
         s3 = boto.connect(dest_access_key, dest_secret_key)
     self.dst = s3.lookup(self.dst_name)
     if not self.dst:
         self.dst = s3.create_bucket(self.dst_name)
Exemplo n.º 5
0
 def __init__(self):
     super(CopyBot, self).__init__()
     self.wdir = boto.config.get('Pyami', 'working_dir')
     self.log_file = '%s.log' % self.instance_id
     self.log_path = os.path.join(self.wdir, self.log_file)
     boto.set_file_logger(self.name, self.log_path)
     self.src_name = boto.config.get(self.name, 'src_bucket')
     self.dst_name = boto.config.get(self.name, 'dst_bucket')
     self.replace = boto.config.getbool(self.name, 'replace_dst', True)
     s3 = boto.connect_s3()
     self.src = s3.lookup(self.src_name)
     if not self.src:
         boto.log.error('Source bucket does not exist: %s' % self.src_name)
     dest_access_key = boto.config.get(self.name, 'dest_aws_access_key_id', None)
     if dest_access_key:
         dest_secret_key = boto.config.get(self.name, 'dest_aws_secret_access_key', None)
         s3 = boto.connect(dest_access_key, dest_secret_key)
     self.dst = s3.lookup(self.dst_name)
     if not self.dst:
         self.dst = s3.create_bucket(self.dst_name)
Exemplo n.º 6
0
Arquivo: sonofmmm.py Projeto: 10sr/hue
 def __init__(self, config_file=None):
     super(SonOfMMM, self).__init__(config_file)
     self.log_file = '%s.log' % self.instance_id
     self.log_path = os.path.join(self.working_dir, self.log_file)
     boto.set_file_logger(self.name, self.log_path)
     if self.sd.has_option('ffmpeg_args'):
         self.command = '/usr/local/bin/ffmpeg ' + self.sd.get('ffmpeg_args')
     else:
         self.command = '/usr/local/bin/ffmpeg -y -i %s %s'
     self.output_mimetype = self.sd.get('output_mimetype')
     if self.sd.has_option('output_ext'):
         self.output_ext = self.sd.get('output_ext')
     else:
         self.output_ext = mimetypes.guess_extension(self.output_mimetype)
     self.output_bucket = self.sd.get_obj('output_bucket')
     self.input_bucket = self.sd.get_obj('input_bucket')
     # check to see if there are any messages queue
     # if not, create messages for all files in input_bucket
     m = self.input_queue.read(1)
     if not m:
         self.queue_files()
Exemplo n.º 7
0
 def __init__(self, config_file=None):
     Service.__init__(self, config_file)
     self.log_file = "%s.log" % self.instance_id
     self.log_path = os.path.join(self.working_dir, self.log_file)
     boto.set_file_logger(self.name, self.log_path)
     if self.sd.has_option("ffmpeg_args"):
         self.command = "/usr/local/bin/ffmpeg " + self.sd.get("ffmpeg_args")
     else:
         self.command = "/usr/local/bin/ffmpeg -y -i %s %s"
     self.output_mimetype = self.sd.get("output_mimetype")
     if self.sd.has_option("output_ext"):
         self.output_ext = self.sd.get("output_ext")
     else:
         self.output_ext = mimetypes.guess_extension(self.output_mimetype)
     self.output_bucket = self.sd.get_obj("output_bucket")
     self.input_bucket = self.sd.get_obj("input_bucket")
     # check to see if there are any messages queue
     # if not, create messages for all files in input_bucket
     m = self.input_queue.read(1)
     if not m:
         self.queue_files()
Exemplo n.º 8
0
 def __init__(self, config_file=None):
     super(SonOfMMM, self).__init__(config_file)
     self.log_file = '%s.log' % self.instance_id
     self.log_path = os.path.join(self.working_dir, self.log_file)
     boto.set_file_logger(self.name, self.log_path)
     if self.sd.has_option('ffmpeg_args'):
         self.command = '/usr/local/bin/ffmpeg ' + self.sd.get(
             'ffmpeg_args')
     else:
         self.command = '/usr/local/bin/ffmpeg -y -i %s %s'
     self.output_mimetype = self.sd.get('output_mimetype')
     if self.sd.has_option('output_ext'):
         self.output_ext = self.sd.get('output_ext')
     else:
         self.output_ext = mimetypes.guess_extension(self.output_mimetype)
     self.output_bucket = self.sd.get_obj('output_bucket')
     self.input_bucket = self.sd.get_obj('input_bucket')
     # check to see if there are any messages queue
     # if not, create messages for all files in input_bucket
     m = self.input_queue.read(1)
     if not m:
         self.queue_files()
Exemplo n.º 9
0
    def run_scripts(self):
        scripts = config.get('Pyami', 'scripts')
        if scripts:
            for script in scripts.split(','):
                script = script.strip(" ")
                try:
                    pos = script.rfind('.')
                    if pos > 0:
                        mod_name = script[0:pos]
                        cls_name = script[pos+1:]
                        cls = find_class(mod_name, cls_name)
                        boto.log.info('Running Script: %s' % script)
                        s = cls()
                        s.main()
                    else:
                        boto.log.warning('Trouble parsing script: %s' % script)
                except Exception, e:
                    boto.log.exception('Problem Running Script: %s. Startup process halting.' % script)
                    raise e

    def main(self):
        self.run_scripts()
        self.notify('Startup Completed for %s' % config.get('Instance', 'instance-id'))

if __name__ == "__main__":
    if not config.has_section('loggers'):
        boto.set_file_logger('startup', '/var/log/boto.log')
    sys.path.append(config.get('Pyami', 'working_dir'))
    su = Startup()
    su.main()
Exemplo n.º 10
0
    def load_packages(self):
        package_str = boto.config.get('Pyami', 'packages')
        if package_str:
            packages = package_str.split(',')
            for package in packages:
                package = package.strip()
                if package.startswith('s3:'):
                    package = self.fetch_s3_file(package)
                if package:
                    # if the "package" is really a .py file, it doesn't have to
                    # be installed, just being in the working dir is enough
                    if not package.endswith('.py'):
                        self.run('easy_install -Z %s' % package,
                                 exit_on_error=False)

    def main(self):
        self.create_working_dir()
        self.load_boto()
        self.load_packages()
        self.notify('Bootstrap Completed for %s' %
                    boto.config.get_instance('instance-id'))


if __name__ == "__main__":
    # because bootstrap starts before any logging configuration can be loaded from
    # the boto config files, we will manually enable logging to /var/log/boto.log
    boto.set_file_logger('bootstrap', '/var/log/boto.log')
    bs = Bootstrap()
    bs.main()
Exemplo n.º 11
0
                script = script.strip(" ")
                try:
                    pos = script.rfind('.')
                    if pos > 0:
                        mod_name = script[0:pos]
                        cls_name = script[pos + 1:]
                        cls = find_class(mod_name, cls_name)
                        boto.log.info('Running Script: %s' % script)
                        s = cls()
                        s.main()
                    else:
                        boto.log.warning('Trouble parsing script: %s' % script)
                except Exception as e:
                    boto.log.exception(
                        'Problem Running Script: %s. Startup process halting.'
                        % script)
                    raise e

    def main(self):
        self.run_scripts()
        self.notify('Startup Completed for %s' %
                    config.get('Instance', 'instance-id'))


if __name__ == "__main__":
    if not config.has_section('loggers'):
        boto.set_file_logger('startup', '/var/log/boto.log')
    sys.path.append(config.get('Pyami', 'working_dir'))
    su = Startup()
    su.main()
Exemplo n.º 12
0
            boto.log.exception('Problem Retrieving file: %s' % s3_file)
            path = None
        return path

    def load_packages(self):
        package_str = boto.config.get('Pyami', 'packages')
        if package_str:
            packages = package_str.split(',')
            for package in packages:
                package = package.strip()
                if package.startswith('s3:'):
                    package = self.fetch_s3_file(package)
                if package:
                    # if the "package" is really a .py file, it doesn't have to
                    # be installed, just being in the working dir is enough
                    if not package.endswith('.py'):
                        self.run('easy_install -Z %s' % package, exit_on_error=False)

    def main(self):
        self.create_working_dir()
        self.load_boto()
        self.load_packages()
        self.notify('Bootstrap Completed for %s' % boto.config.get_instance('instance-id'))

if __name__ == "__main__":
    # because bootstrap starts before any logging configuration can be loaded from
    # the boto config files, we will manually enable logging to /var/log/boto.log
    boto.set_file_logger('bootstrap', '/var/log/boto.log')
    bs = Bootstrap()
    bs.main()
Exemplo n.º 13
0
# Set environment variables to secrets:
# AWS_ACCESS_KEY_ID
# AWS_SECRET_ACCESS_KEY
from os import environ
import logging
import boto

BUCKET_NAME = 'wpmedia.vivelohoy.com'

if 'AWS_ACCESS_KEY_ID' not in environ or 'AWS_SECRET_ACCESS_KEY' not in environ:
	print 'Error: Environment variables AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY not set.'
	exit(1)

boto.set_file_logger('boto-copier', 's3_copy.log', logging.INFO)

s3 = boto.connect_s3()
bucket = s3.get_bucket(BUCKET_NAME)

for src_key in bucket.list():
	if 'wp-content/' not in src_key.name:
		new_key = 'wp-content/' + src_key.name
		src_key.copy(BUCKET_NAME, new_key)
		print '{src} => {dest}'.format(src=src_key.name, dest=new_key)
Exemplo n.º 14
0
        else:
            print value
            sys.exit(1)

    return excepthook

# It would be nice to get rid of all of the print statements in favor
# of log messages; for now I'm just doing this to log object creation
objLog = '/tmp/activity.log'
log = logging.getLogger('objLog')
activity_handler = logging.FileHandler(objLog)
activity_handler.setFormatter(logging.Formatter('%(message)s'))
log.addHandler(activity_handler)
log.setLevel(logging.INFO)

boto.set_file_logger('boto', '/tmp/boto.log', level=logging.DEBUG)

arange = lambda x, y: [ chr(z) for z in range(ord(x), ord(y)) ]
rand_from_list = lambda x: x[random.randint(0, len(x)-1)]
random.seed()

euca_version = open('/etc/eucalyptus/eucalyptus-version', 'r').read().replace('eee-', '').strip()
ec2timeout = "60"
mode = "system"
host = "localhost"
objTypes = ['keypair', 'object', 'bucket', 'volume', 'snapshot', 'rule', 'group']
userlist = {}
accounts = {}
debug = 1
local_ips = set()
activity_log_version = 0