コード例 #1
0
def main(args):
    args.start_time = parse_time_string(args.start_time)
    args.end_time = parse_time_string(args.end_time)

    staticconf.DictConfiguration({
        'join_delay_mean_seconds':
        args.join_delay_params[0],
        'join_delay_stdev_seconds':
        args.join_delay_params[1],
        'cpus_per_weight':
        args.cpus_per_weight,
        'ebs_volume_size':
        args.ebs_volume_size,
    })
    # We can provide up to two simulation objects to compare.  If we load two simulator objects to compare,
    # we don't need to run a simulation here.  If the user specifies --compare but only gives one object,
    # then we need to run a simulation now, and use that to compare to the saved sim
    sims = []
    if args.compare:
        if len(args.compare) > 2:
            raise argparse.ArgumentError(
                None,
                f'Cannot compare more than two simulations: {args.compare}')
        sims = [
            read_object_from_compressed_json(sim_file)
            for sim_file in args.compare
        ]

    if len(sims) < 2:
        metrics_client = _load_metrics(args.metrics_data_files, args.pool)
        simulator = _run_simulation(args, metrics_client)
        sims.insert(0, simulator)

    if len(sims) == 2:
        cmp_fn = getattr(operator, args.comparison_operator)
        final_simulator = cmp_fn(*sims)
    else:
        final_simulator = sims[0]

    if args.simulation_result_file:
        write_object_to_compressed_json(final_simulator,
                                        args.simulation_result_file)

    if hasattr(args, 'reports'):
        if 'all' in args.reports:
            args.reports = REPORT_TYPES.keys()

        for report in args.reports:
            make_report(report, final_simulator, args.start_time,
                        args.end_time, args.output_prefix)
コード例 #2
0
def disable(args: argparse.Namespace) -> None:
    ensure_account_id(args.cluster)

    state = {
        'state': {
            'S': AUTOSCALER_PAUSED
        },
        'entity': {
            'S': f'{args.cluster}.{args.pool}.{args.scheduler}'
        },
        'timestamp': {
            'N': str(int(time.time()))
        },
    }

    if args.until:
        state['expiration_timestamp'] = {
            'N': str(parse_time_string(args.until).timestamp)
        }

    dynamodb.put_item(
        TableName=staticconf.read('aws.state_table',
                                  default=CLUSTERMAN_STATE_TABLE),
        Item=state,
    )

    time.sleep(1)  # Give DynamoDB some time to settle

    now = parse_time_string('now').to('local')

    if not autoscaling_is_paused(args.cluster, args.pool, args.scheduler, now):
        print('Something went wrong!  The autoscaler is NOT paused')
    else:
        s = f'The autoscaler for {args.cluster}.{args.pool}.{args.scheduler} was paused at {now}'

        if args.until:
            until_str = str(parse_time_string(args.until).to('local'))
            s += f' until {until_str}'

        print(s)
コード例 #3
0
def main(args):
    """Download metrics from SignalFX and save them in a format that the Clusterman simulator can use."""
    kwargs = _parse_extra_options(args.option)
    start_time = parse_time_string(args.start_time)
    end_time = parse_time_string(args.end_time)
    args.dest_metric_names = args.dest_metric_names or args.src_metric_names

    if len(args.src_metric_names) != len(args.dest_metric_names):
        raise ValueError(
            'Different number of source and destination metrics\n'
            f'src = {args.src_metric_names}, dest = {args.dest_metric_names}')

    # Get clusterman metric type for each downloaded metric.
    metric_options = list(METRIC_TYPES)
    metric_types = {}
    values = {}
    for src in args.src_metric_names:
        metric_types[src] = ask_for_choice(f'What metric type is {src}?',
                                           metric_options)

    values = defaultdict(dict)
    api_token = args.api_token
    filters = [s.split(':') for s in args.filter]
    for src, dest in zip(args.src_metric_names, args.dest_metric_names):
        print(f'Querying SignalFX for {src}')
        metric_type = metric_types[src]
        values[metric_type][dest] = basic_sfx_query(
            api_token,
            src,
            start_time,
            end_time,
            filters=filters,
            aggregation=Aggregation('sum', by=['AZ', 'inst_type']),
            extrapolation='last_value',
            max_extrapolations=3,
            **kwargs,
        )

    write_object_to_compressed_json(dict(values), args.dest_file)
コード例 #4
0
def load_experimental_design(inputfile):
    """ Generate metric timeseries data from an experimental design .yaml file

    The format of this file should be:
    metric_type:
      metric_name:
        start_time: XXXX
        end_time: YYYY
        dict_keys: <AWS market specifiction> (optional)
        frequency: <frequency specification>
        values: <values specification>

    This will generate a set of metric values between XXXX and YYYY, with the interarrival
    time between events meeting the frequency specification and the metric values corresponding
    to the values specification. When frequency is specified as historical, this will use the
    data in database (aws_region should be provided as a value parameter) to generate timeseries
    data.

    :returns: a dictionary of metric_type -> (metric_name -> timeseries data)
    """
    with open(inputfile) as f:
        design = yaml.safe_load(f.read())

    metrics = {}
    for metric_type, metric_design in design.items():
        metrics[metric_type] = defaultdict(list)
        for metric_name, config in metric_design.items():
            start_time = parse_time_string(config['start_time'])
            end_time = parse_time_string(config['end_time'])

            if config['frequency'] == 'historical':
                metrics[metric_type][metric_name] = get_historical_data(
                    metric_name, metric_type, config, start_time, end_time)
            else:
                metrics[metric_type][metric_name] = get_random_data(
                    config, start_time, end_time)

    return metrics
コード例 #5
0
ファイル: toggle.py プロジェクト: openarun/clusterman
def enable(args: argparse.Namespace) -> None:
    dynamodb.delete_item(
        TableName=staticconf.read('aws.state_table', default=CLUSTERMAN_STATE_TABLE),
        Key={
            'state': {'S': AUTOSCALER_PAUSED},
            'entity': {'S': f'{args.cluster}.{args.pool}.{args.scheduler}'},
        }
    )
    time.sleep(1)  # Give DynamoDB some time to settle
    now = parse_time_string('now').to('local')
    if autoscaling_is_paused(args.cluster, args.pool, args.scheduler, now):
        print('Something went wrong!  The autoscaler is paused')
    else:
        print(f'The autoscaler for {args.cluster}.{args.pool}.{args.scheduler} was enabled at {now}')
コード例 #6
0
ファイル: toggle.py プロジェクト: up1/clusterman
def disable(args: argparse.Namespace) -> None:
    state = {
        'state': {
            'S': AUTOSCALER_PAUSED
        },
        'entity': {
            'S': f'{args.cluster}.{args.pool}.{args.scheduler}'
        },
    }
    if args.until:
        state['expiration_timestamp'] = {
            'N': str(parse_time_string(args.until).timestamp)
        }

    dynamodb.put_item(
        TableName=staticconf.read('aws.state_table',
                                  default=CLUSTERMAN_STATE_TABLE),
        Item=state,
    )
コード例 #7
0
def test_parse_time_string_non_arrow():
    t = parse_time_string('one hour ago', tz='US/Eastern')

    # This has potential to be a little flaky so there's a little wiggle room here
    actual_timestamp = arrow.now().replace(tzinfo='US/Eastern').shift(hours=-1).timestamp
    assert abs(actual_timestamp - t.timestamp) <= 1
コード例 #8
0
def test_parse_time_string_with_tz():
    # Ignore the tz argument here and use the '+04:00' in the string
    t = parse_time_string('2017-08-01T00:00:00+04:00', tz='US/Eastern')
    assert t.timestamp == 1501531200
コード例 #9
0
def test_parse_time_string_without_tz():
    t = parse_time_string('2017-08-01 00:00', tz='US/Eastern')
    assert t.timestamp == 1501560000