Exemplo n.º 1
0
    def run(self):
        self.running = True
        pid = self.get_pid()
        proc = None
        # check pid file and lookup process
        if pid and self.process_exist(pid):
            servo.log.debug('Existing SWF worker process is found (%d)' % pid)
            self.kill_pid(pid)
        try:
            # if no process, start a new process
            # prepare arguments to the process
            swf_url = config.get_swf_service_url()
            if not swf_url:
                raise Exception('Simple workflow service url is not found')
            swf_url = 'http://%s:%d/' % (swf_url, config.get_webservice_port())
            instance_id = config.get_servo_id()
            if not instance_id:
                raise Exception('Instance ID is not found')

            cmdline = 'load-balancer-servo-workflow --logdir %s --loglevel %s -e %s -d %s -l %s' % (
                LOG_DIR, LOG_LEVEL, swf_url, SWF_DOMAIN, instance_id)
            servo.log.debug('Running SWF worker: %s' % cmdline)
            proc = self.execute_with_popen(cmdline)
            pid = proc.pid
            self.write_pid(pid)
        except Exception, err:
            servo.log.error('Failed to run SWF worker: %s' % err)
Exemplo n.º 2
0
    def do_emit(self, tmpfile_path=None):
        if not tmpfile_path:
            return
        aws_access_key_id = config.get_access_key_id()
        aws_secret_access_key = config.get_secret_access_key()
        security_token = config.get_security_token()
        conn = boto.connect_s3(
            aws_access_key_id=aws_access_key_id,
            aws_secret_access_key=aws_secret_access_key,
            security_token=security_token,
            is_secure=False,
            port=config.get_webservice_port(),
            path='/services/objectstorage',
            host=config.get_objectstorage_service_host(),
            calling_format='boto.s3.connection.OrdinaryCallingFormat')
        if not conn:
            raise Exception('Could not connect to object storage (S3) service')

        key_name = self.generate_log_file_name()
        bucket = conn.get_bucket(self.bucket_name, validate=False)
        k = Key(bucket)
        k.key = key_name
        k.set_contents_from_filename(tmpfile_path,
                                     policy='bucket-owner-full-control')
        servo.log.debug(
            'Access logs were emitted successfully: s3://%s/%s' %
            (urllib2.quote(self.bucket_name), urllib2.quote(key_name)))
Exemplo n.º 3
0
    def run(self):
        self.running = True
        pid = self.get_pid()
        proc = None
        # check pid file and lookup process
        if pid and self.process_exist(pid):
            servo.log.debug('Existing SWF worker process is found (%d)' % pid)
            self.kill_pid(pid)
        try:
            # if no process, start a new Java process
            # prepare arguments to the process
            swf_url = config.get_swf_service_url()
            if not swf_url:
                raise Exception('Simple workflow service url is not found')
            swf_url = 'http://%s:%d/' % (swf_url, config.get_webservice_port())
            instance_id = config.get_servo_id()
            if not instance_id:
                raise Exception('Instance ID is not found')
            worker_jar = self.lookup_worker_jar()
            if not worker_jar:
                raise Exception('No worker jar is found')

            cmdline = 'java -cp .:%s/* %s --logdir %s --logappender cloud-debug-file --loglevel %s -d %s -e %s --jar %s --classes %s -l %s' % (DIR_LIBRARIES, CLIENT_CLASS, LOG_DIR, LOG_LEVEL, SWF_DOMAIN, swf_url, worker_jar, WORKER_CLASSES, instance_id)
            servo.log.debug('Running SWF worker: %s' % cmdline)
            proc = self.execute_with_popen(cmdline)
            pid = proc.pid
            self.write_pid(pid)
        except Exception, err:
            servo.log.error('Failed to run SWF worker: %s' % err)
Exemplo n.º 4
0
    def run(self):
        self.running = True
        pid = self.get_pid()
        proc = None
        # check pid file and lookup process
        if pid and self.process_exist(pid):
            servo.log.debug('Existing SWF worker process is found (%d)' % pid)
            self.kill_pid(pid)
        try:
            # if no process, start a new Java process
            # prepare arguments to the process
            swf_url = config.get_swf_service_url()
            if not swf_url:
                raise Exception('Simple workflow service url is not found')
            swf_url = 'http://%s:%d/' % (swf_url, config.get_webservice_port())
            instance_id = config.get_servo_id()
            if not instance_id:
                raise Exception('Instance ID is not found')
            worker_jar = self.lookup_worker_jar()
            if not worker_jar:
                raise Exception('No worker jar is found')

            cmdline = 'java -cp .:%s/* %s --logdir %s --logappender cloud-debug-file --loglevel %s -d %s -e %s --jar %s --classes %s -l %s' % (
                DIR_LIBRARIES, CLIENT_CLASS, LOG_DIR, LOG_LEVEL, SWF_DOMAIN,
                swf_url, worker_jar, WORKER_CLASSES, instance_id)
            servo.log.debug('Running SWF worker: %s' % cmdline)
            proc = self.execute_with_popen(cmdline)
            pid = proc.pid
            self.write_pid(pid)
        except Exception, err:
            servo.log.error('Failed to run SWF worker: %s' % err)
Exemplo n.º 5
0
def connect_euare(host_name=None,
                  port=config.get_webservice_port(),
                  path="services/Euare",
                  aws_access_key_id=None,
                  aws_secret_access_key=None,
                  security_token=None,
                  **kwargs):
    return EucaEuareConnection(host=config.get_euare_service_url(),
                               port=port,
                               path=path,
                               aws_access_key_id=aws_access_key_id,
                               aws_secret_access_key=aws_secret_access_key,
                               security_token=security_token,
                               **kwargs)
Exemplo n.º 6
0
def connect_elb(host_name=None,
                port=config.get_webservice_port(),
                cluster=None,
                path="services/LoadBalancing",
                aws_access_key_id=None,
                aws_secret_access_key=None,
                security_token=None,
                **kwargs):
    region = RegionInfo(name=cluster, endpoint=config.get_elb_service_url())

    return EucaELBConnection(region=region,
                             port=port,
                             path=path,
                             aws_access_key_id=aws_access_key_id,
                             aws_secret_access_key=aws_secret_access_key,
                             security_token=security_token,
                             **kwargs)
Exemplo n.º 7
0
def connect_euare(
    host_name=None,
    port=config.get_webservice_port(),
    path="services/Euare",
    aws_access_key_id=None,
    aws_secret_access_key=None,
    security_token=None,
    **kwargs
):
    return EucaEuareConnection(
        host=config.get_euare_service_url(),
        port=port,
        path=path,
        aws_access_key_id=aws_access_key_id,
        aws_secret_access_key=aws_secret_access_key,
        security_token=security_token,
        **kwargs
    )
Exemplo n.º 8
0
    def do_emit(self, tmpfile_path=None):
        if not tmpfile_path:
            return
        aws_access_key_id = config.get_access_key_id()
        aws_secret_access_key = config.get_secret_access_key()
        security_token = config.get_security_token()
        conn = boto.connect_s3(aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, security_token=security_token, is_secure=False, port=config.get_webservice_port(), path='/services/objectstorage', host= config.get_objectstorage_service_host(), calling_format='boto.s3.connection.OrdinaryCallingFormat')
        if not conn:
            raise Exception('Could not connect to object storage (S3) service') 

        key_name = self.generate_log_file_name()
        bucket = conn.get_bucket(self.bucket_name, validate=False)
        k = Key(bucket)
        k.key = key_name
        k.set_contents_from_filename(tmpfile_path, policy='bucket-owner-full-control')
        servo.log.debug('Access logs were emitted successfully: s3://%s/%s'  % (urllib2.quote(self.bucket_name),urllib2.quote(key_name)))