Пример #1
0
def build_executor_stack(
    processor,
    cluster_executor,
    taskproc_config,
    cluster,
    region,
):
    """ Executor stack consists of:
    1. Cluster Executor (e.g. MesosExecutor)
    2. LoggingExecutor
    3. StatefulExecutor
    """
    # logging executor
    task_logging_executor = processor.executor_from_config(
        provider='logging',
        provider_config={
            'downstream_executor': cluster_executor,
        },
    )
    # stateful executor
    StatefulExecutor = processor.executor_cls(provider='stateful')
    stateful_executor = StatefulExecutor(
        downstream_executor=task_logging_executor,
        persister=DynamoDBPersister(
            table_name=f"taskproc_events_{cluster}",
            session=create_boto_session(taskproc_config, region),
            endpoint_url=taskproc_config.get('dynamodb_endpoint'),
        ),
    )
    return stateful_executor
Пример #2
0
def persister(mocker):
    mock_session = mocker.Mock()
    mock_session.client.return_value = []

    mock_resource = mocker.Mock()
    mock_resource.Table.return_value = mocker.Mock()
    mock_session.resource.return_value = mock_resource
    persister = DynamoDBPersister(table_name='foo', session=mock_session)
    return persister
Пример #3
0
def main():
    mesos_address = os.getenv('MESOS', 'mesosmaster:5050')
    with open('./examples/cluster/secret') as f:
        secret = f.read().strip()

    processor = TaskProcessor()
    for p in ['mesos', 'stateful']:
        processor.load_plugin(provider_module='task_processing.plugins.' + p)
    mesos_executor = processor.executor_from_config(provider='mesos_task',
                                                    provider_config={
                                                        'secret': secret,
                                                        'mesos_address':
                                                        mesos_address,
                                                        'role': 'taskproc',
                                                    })

    s = session.Session(region_name='foo',
                        aws_access_key_id='foo',
                        aws_secret_access_key='bar')
    dynamo_address = os.getenv('DYNAMO', 'http://dynamodb:5050')
    client = s.client(
        service_name='dynamodb',
        endpoint_url=dynamo_address,
    )
    try:
        create_table(client)
    except ClientError:
        pass

    executor = processor.executor_from_config(
        provider='stateful',
        provider_config={
            'downstream_executor':
            mesos_executor,
            'persister':
            DynamoDBPersister(table_name='events',
                              endpoint_url=dynamo_address,
                              session=s)
        })
    runner = Sync(executor=executor)
    tasks = set()
    TaskConfig = mesos_executor.TASK_CONFIG_INTERFACE
    for _ in range(1, 2):
        task_config = TaskConfig(image='ubuntu:14.04', cmd='/bin/sleep 2')
        tasks.add(task_config.task_id)
        runner.run(task_config)
        print(executor.status(task_config.task_id))
Пример #4
0
def build_executor_stack(
    # TODO: rename to registry?
    processor,
    service,
    instance,
    cluster,
    role,
    pool,
    # TODO: move run_id into task identifier?
    run_id,
    system_paasta_config,
    framework_staging_timeout,
):

    cluster_fqdn = system_paasta_config.get_cluster_fqdn_format().format(
        cluster=cluster)
    mesos_address = '{}:{}'.format(
        mesos_tools.find_mesos_leader(cluster_fqdn),
        mesos_tools.MESOS_MASTER_PORT,
    )

    # TODO: implement DryRunExecutor?
    taskproc_config = system_paasta_config.get_taskproc()

    MesosExecutor = processor.executor_cls('mesos')
    mesos_executor = MesosExecutor(
        role=role,
        pool=pool,
        principal=taskproc_config.get('principal'),
        secret=taskproc_config.get('secret'),
        mesos_address=mesos_address,
        framework_name="paasta-remote {} {} {}".format(
            compose_job_id(service, instance),
            datetime.utcnow().strftime('%Y%m%d%H%M%S%f'),
            run_id,
        ),
        framework_staging_timeout=framework_staging_timeout,
        initial_decline_delay=0.5,
    )

    task_logging_executor = processor.executor_from_config(
        provider='logging',
        provider_config={
            'downstream_executor': mesos_executor,
        },
    )

    credentials_file = taskproc_config.get('boto_credential_file')
    if credentials_file:
        with open(credentials_file) as f:
            credentials = json.loads(f.read())
    else:
        raise ValueError("Required aws credentials")

    region = taskproc_config.get('aws_region')

    endpoint = taskproc_config.get('dynamodb_endpoint')
    session = Session(
        region_name=region,
        aws_access_key_id=credentials['accessKeyId'],
        aws_secret_access_key=credentials['secretAccessKey'],
    )

    StatefulExecutor = processor.executor_cls(provider='stateful')
    stateful_executor = StatefulExecutor(
        downstream_executor=task_logging_executor,
        persister=DynamoDBPersister(
            table_name="taskproc_events_%s" % cluster,
            session=session,
            endpoint_url=endpoint,
        ),
    )

    return stateful_executor