Beispiel #1
0
 def get_database(self, job):
     ''' Get a database backend for this job's resource type'''
     logging.debug("get_database() job.resource = {0}".format(job.resource))
     # Use cached handles if we can
     if job.resource in self.database_connections:
         logging.debug("get_database() returning cached connection to {0}".format(job.resource))
         return self.database_connections[job.resource]
     # Make a new connection
     if job.resource == self.EC2_CLOUD_RESOURCE:
         params = self.get_credentials()
         #os.environ["AWS_ACCESS_KEY_ID"] = params['AWS_ACCESS_KEY_ID']
         #os.environ["AWS_SECRET_ACCESS_KEY"] = params['AWS_SECRET_ACCESS_KEY']
         db = DynamoDB(access_key=params['EC2_ACCESS_KEY'],
                             secret_key=params['EC2_SECRET_KEY'])
         self.database_connections[job.resource] = db
         logging.debug("get_database() returning new connection to {0}".format(job.resource))
         return db
     elif job.resource == self.FLEX_CLOUD_RESOURCE:
         params = self.get_credentials()
         db = FlexDB(password=params['flex_db_password'],
                           ip=params['queue_head_ip'])
         self.database_connections[job.resource] = db
         logging.debug("get_database() returning new connection to {0}".format(job.resource))
         return db
     else:
         raise Exception("Unknown job.resource = '{0}'".format(job.resource))
Beispiel #2
0
 def __create_dynamodb_stochss_table(self, ec2_access_key, ec2_secret_key):
     database = DynamoDB(ec2_access_key, ec2_secret_key)
     result = database.createtable(JobDatabaseConfig.TABLE_NAME)
     if result:
         logging.debug("creating table {0}".format(JobDatabaseConfig.TABLE_NAME))
     else:
         logging.error("FAILED on creating table {0}".format(JobDatabaseConfig.TABLE_NAME))
Beispiel #3
0
    def __init__(self, cli_jobs_config):
        self.machines = cli_jobs_config["machines"]
        self.jobs = cli_jobs_config["jobs"]
        self.output_filename = cli_jobs_config["output_filename"]

        if cli_jobs_config["output_store"][
                "type"] not in self.SUPPORTED_OUTPUT_STORES:
            raise UnsupportedError("Output store {0} not supported !".format(
                cli_jobs_config["output_store"]["type"]))

        if cli_jobs_config["job_status_db_store"][
                "type"] not in self.SUPPORTED_JOB_STATUS_DB_STORES:
            raise UnsupportedError(
                "Job Status DB store {0} not supported !".format(
                    cli_jobs_config["job_status_db_store"]["type"]))

        if re.match('^amazon.*', cli_jobs_config["output_store"]["type"]) or \
                re.match('^amazon.*', cli_jobs_config["job_status_db_store"]["type"]):
            self.aws_credentials = get_aws_credentials()

        self.output_store_info = cli_jobs_config["output_store"]

        if self.output_store_info["type"] == "amazon_s3":
            trial = 0
            s3helper = S3Helper()
            while trial < 5:
                s3_uuid = uuid.uuid4()
                self.output_store_info['bucket_name'] = "{0}-{1}".format(
                    self.output_store_info['bucket_name_prefix'], s3_uuid)
                if s3helper.make_s3_bucket(
                        self.output_store_info['bucket_name']):
                    logging.info('bucket name = {0}'.format(
                        self.output_store_info['bucket_name']))
                    break
                else:
                    self.output_store_info['bucket_name'] = None
                trial += 1

            if self.output_store_info['bucket_name'] == None:
                logging.error("Could not create S3 bucket!")
                sys.exit(0)
        else:
            raise NotImplementedError("Only Amazon S3 is supported!")

        self.job_status_db_store_info = cli_jobs_config["job_status_db_store"]

        if self.job_status_db_store_info["type"] == "amazon_dynamodb":
            self.database = DynamoDB(
                secret_key=self.aws_credentials["AWS_SECRET_ACCESS_KEY"],
                access_key=self.aws_credentials["AWS_ACCESS_KEY_ID"])
        else:
            raise NotImplementedError("Only Amazon Dynamo DB is supported!")
Beispiel #4
0
    def submit_cloud_task(self, params, agent_type=None, cost_replay=False, instance_type=None):

        logging.debug('submit_cloud_task() params =\n{}\n\n'.format(pprint.pformat(params)))

        if agent_type is None:
            if self.active_agent_type is not None:
                agent_type = self.active_agent_type
            else:
                self.isOneOrMoreComputeNodesRunning()
                if self.active_agent_type is not None:
                    agent_type = self.active_agent_type
                else:
                    raise Exception("No Cloud resources found")

        if agent_type not in JobConfig.SUPPORTED_AGENT_TYPES:
            raise Exception('Unsupported agent type {0}'.format(agent_type))

        credentials = self.get_credentials()

        if agent_type == AgentTypes.EC2:
            params['resource'] = self.EC2_CLOUD_RESOURCE
            params['bucketname'] = self.user_data.S3_bucket_name
            if 'EC2_ACCESS_KEY' not in credentials or credentials['EC2_ACCESS_KEY'] == '':
                raise Exception('EC2 Access Key is not valid!')
            if 'EC2_SECRET_KEY' not in credentials or credentials['EC2_SECRET_KEY'] == '':
                raise Exception('EC2 Secret Key is not valid!')
            ec2_access_key = credentials['EC2_ACCESS_KEY']
            ec2_secret_key = credentials['EC2_SECRET_KEY']
            logging.debug('ec2_access_key = {0}, ec2_secret_key = {1}'.format(ec2_access_key, ec2_secret_key))
            database = DynamoDB(ec2_access_key, ec2_secret_key)
            storage_agent = S3StorageAgent(bucket_name=self.user_data.S3_bucket_name,
                                           ec2_secret_key=ec2_secret_key,
                                           ec2_access_key=ec2_access_key)

        elif agent_type == AgentTypes.FLEX:
            params['resource'] = self.FLEX_CLOUD_RESOURCE
            params['bucketname'] = ''
#            if flex_credentials == None or 'flex_queue_head' not in flex_credentials \
#                    or 'flex_db_password' not in flex_credentials:
#                raise Exception('Please pass valid Flex credentials!')
            database = FlexDB(ip=credentials['queue_head_ip'],
                              password=credentials['flex_db_password'])
            flex_queue_head_machine = self.user_data.get_flex_queue_head_machine()
            storage_agent = FlexStorageAgent(queue_head_ip=flex_queue_head_machine['ip'],
                                             queue_head_username=flex_queue_head_machine['username'],
                                             queue_head_keyfile= os.path.join('/home', flex_queue_head_machine['username'], FlexConfig.QUEUE_HEAD_KEY_DIR, os.path.basename(flex_queue_head_machine['keyfile']))
                                             )
                                             #queue_head_keyfile=flex_queue_head_machine['keyfile'])
            ec2_access_key = None
            ec2_secret_key = None


        # if there is no taskid explicit, create one the first run
        if 'rerun_uuid' in params and params['rerun_uuid'] is not None:
            task_id = params['rerun_uuid']
        elif cost_replay:
            task_id = params['cost_analysis_uuid']
        else:
            task_id = str(uuid.uuid4())

        logging.debug('submit_cloud_task: task_id = {}'.format(task_id))

        result = helper.execute_cloud_task(params=params, agent_type=agent_type,
                                           ec2_access_key=ec2_access_key,
                                           ec2_secret_key=ec2_secret_key,
                                           task_id=task_id, instance_type=instance_type,
                                           cost_replay=cost_replay,
                                           database=database,
                                           storage_agent=storage_agent)

        return result