Example #1
0
def main():
    key = Cache.generate_key(str(Config()))
    if Cache.check(key):
        data = Cache.get(key)
        points = data['points']
        network = data['network']
    else:
        pass
        # get points from trajectories
        preprocessor = Preprocessor(Config.DATASET_ROOT_DIR,
                                    Config.DATASET_SCALE)
        points = preprocessor.get_points()

        # use coherence expanded algorithm to form clusters
        clusters = Cluster(points).coherence_expanding()
        network = TransferNetwork(points, clusters)

        # derive transfer probability
        tp = TransferProbability(network)
        tp.derive()

        # save points and network to cache
        Cache.save(key, {"points": points, "network": network})

    # show the distribution of transfer probability
    figure = Figure(width=8)
    figure.transfer_probability(network, 8).show()

    # search the most popular route
    mpr = MostPopularRoute(network)
    route = mpr.search(0, 4)
    print(route)
    figure = Figure()
    figure.most_popular_route(points, network, route).show()
Example #2
0
def test_cache():
    c = Cache()

    # c.data should be a dict
    assert isinstance(c.table, dict)
    assert isinstance(c.graph, dict)

    c.put('table', 'a', 'foo')
    c.put('table', 'b', {'b-a': 'foo'})
    # should be able to put stuff
    assert c.table == {'a': 'foo', 'b': {'b-a': 'foo'}}

    # manually set cache the bad way
    c.table = {'test_get': 'success'}
    get_data = c.get('table', 'test_get')
    # should be able to fetch stuff
    assert get_data == c.table['test_get']

    c.table = {'Nerd': 'present!'}
    ferris = c.check('table', 'Bueller')
    perfect_attendance = c.check('table', 'Nerd')
    # check should return true if the key is present else false
    assert ferris is False
    assert perfect_attendance is True
Example #3
0
def main():
    LOGGER.info('ADF Version %s', ADF_VERSION)
    LOGGER.info("ADF Log Level is %s", ADF_LOG_LEVEL)

    _create_inputs_folder()
    parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3)
    s3 = S3(DEPLOYMENT_ACCOUNT_REGION, SHARED_MODULES_BUCKET)
    deployment_map = DeploymentMap(parameter_store, s3, ADF_PIPELINE_PREFIX)
    sts = STS()
    partition = get_partition(DEPLOYMENT_ACCOUNT_REGION)
    cross_account_access_role = parameter_store.fetch_parameter(
        'cross_account_access_role')
    role = sts.assume_cross_account_role(
        f'arn:{partition}:iam::{MASTER_ACCOUNT_ID}:role/'
        f'{cross_account_access_role}-readonly', 'pipeline')
    organizations = Organizations(role)
    ensure_event_bus_status(ORGANIZATION_ID)
    try:
        auto_create_repositories = parameter_store.fetch_parameter(
            'auto_create_repositories')
    except ParameterNotFoundError:
        auto_create_repositories = 'enabled'
    threads = []
    _cache = Cache()
    for p in deployment_map.map_contents.get('pipelines', []):
        _source_account_id = p.get('default_providers',
                                   {}).get('source',
                                           {}).get('properties',
                                                   {}).get('account_id', {})
        if _source_account_id and int(_source_account_id) != int(
                DEPLOYMENT_ACCOUNT_ID) and not _cache.check(
                    _source_account_id):
            rule = Rule(
                p['default_providers']['source']['properties']['account_id'])
            rule.create_update()
            _cache.add(
                p['default_providers']['source']['properties']['account_id'],
                True)
        thread = PropagatingThread(target=worker_thread,
                                   args=(p, organizations,
                                         auto_create_repositories,
                                         deployment_map, parameter_store))
        thread.start()
        threads.append(thread)

    for thread in threads:
        thread.join()
class Resolver:
    def __init__(self, parameter_store, stage_parameters,
                 comparison_parameters):
        self.parameter_store = parameter_store
        self.stage_parameters = stage_parameters
        self.comparison_parameters = comparison_parameters
        self.sts = STS()
        self.cache = Cache()

    @staticmethod
    def _is_optional(value):
        return value.endswith('?')

    def fetch_stack_output(self, value, key, optional=False):  # pylint: disable=too-many-statements
        try:
            [_, account_id, region, stack_name,
             output_key] = str(value).split(':')
        except ValueError:
            raise ValueError(
                "{0} is not a valid import string."
                "syntax should be import:account_id:region:stack_name:output_key"
                .format(str(value)))
        if Resolver._is_optional(output_key):
            LOGGER.info("Parameter %s is considered optional", output_key)
            optional = True
        output_key = output_key[:-1] if optional else output_key
        try:
            role = self.sts.assume_cross_account_role(
                'arn:aws:iam::{0}:role/{1}'.format(
                    account_id, 'adf-readonly-automation-role'), 'importer')
            cloudformation = CloudFormation(
                region=region,
                deployment_account_region=os.environ["AWS_REGION"],
                role=role,
                stack_name=stack_name,
                account_id=account_id)
            stack_output = self.cache.check(
                value) or cloudformation.get_stack_output(output_key)
            if stack_output:
                LOGGER.info("Stack output value is %s", stack_output)
                self.cache.add(value, stack_output)
        except ClientError:
            if not optional:
                raise
            stack_output = ""
            pass
        try:
            parent_key = list(
                Resolver.determine_parent_key(self.comparison_parameters,
                                              key))[0]
            if optional:
                self.stage_parameters[parent_key][key] = stack_output
            else:
                if not stack_output:
                    raise Exception(
                        "No Stack Output found on {account_id} in {region} "
                        "with stack name {stack} and output key "
                        "{output_key}".format(
                            account_id=account_id,
                            region=region,
                            stack=stack_name,
                            output_key=output_key,
                        ))
                self.stage_parameters[parent_key][key] = stack_output
        except IndexError:
            if stack_output:
                if self.stage_parameters.get(key):
                    self.stage_parameters[key] = stack_output
            else:
                raise Exception(
                    "Could not determine the structure of the file in order to import from CloudFormation"
                )
        return True

    def upload(self, value, key, file_name):
        if not any(item in value
                   for item in ['path', 'virtual-hosted', 's3-key-only']):
            raise Exception(
                'When uploading to S3 you need to specify a '
                'pathing style for the response either path or virtual-hosted, '
                'read more: https://docs.aws.amazon.com/AmazonS3/latest/dev/VirtualHosting.html'
            ) from None
        if str(value).count(':') > 2:
            [_, region, style, value] = value.split(':')
        else:
            [_, style, value] = value.split(':')
            region = DEFAULT_REGION
        bucket_name = self.parameter_store.fetch_parameter(
            '/cross_region/s3_regional_bucket/{0}'.format(region))
        client = S3(region, bucket_name)
        try:
            parent_key = list(
                Resolver.determine_parent_key(self.comparison_parameters,
                                              key))[0]
        except IndexError:
            if self.stage_parameters.get(key):
                self.stage_parameters[key] = client.put_object(
                    "adf-upload/{0}/{1}".format(value, file_name),
                    "{0}".format(value),
                    style,
                    True  #pre-check
                )
            return True
        self.stage_parameters[parent_key][key] = client.put_object(
            "adf-upload/{0}/{1}".format(value, file_name),
            "{0}".format(value),
            style,
            True  #pre-check
        )
        return True

    @staticmethod
    def determine_parent_key(d, target_key, parent_key=None):
        for key, value in d.items():
            if key == target_key:
                yield parent_key
            if isinstance(value, dict):
                for result in Resolver.determine_parent_key(
                        value, target_key, key):
                    yield result

    def fetch_parameter_store_value(self, value, key, optional=False):  # pylint: disable=too-many-statements
        if self._is_optional(value):
            LOGGER.info("Parameter %s is considered optional", value)
            optional = True
        if str(value).count(':') > 1:
            [_, region, value] = value.split(':')
        else:
            [_, value] = value.split(':')
            region = DEFAULT_REGION
        value = value[:-1] if optional else value
        client = ParameterStore(region, boto3)
        try:
            parameter = self.cache.check('{0}/{1}'.format(
                region, value)) or client.fetch_parameter(value)
        except ParameterNotFoundError:
            if optional:
                LOGGER.info("Parameter %s not found, returning empty string",
                            value)
                parameter = ""
            else:
                raise
        try:
            parent_key = list(
                Resolver.determine_parent_key(self.comparison_parameters,
                                              key))[0]
            if parameter:
                self.cache.add('{0}/{1}'.format(region, value), parameter)
                self.stage_parameters[parent_key][key] = parameter
        except IndexError as error:
            if parameter:
                if self.stage_parameters.get(key):
                    self.stage_parameters[key] = parameter
            else:
                LOGGER.error(
                    "Parameter was not found, unable to fetch it from parameter store"
                )
                raise Exception(
                    "Parameter was not found, unable to fetch it from parameter store"
                ) from error
        return True

    def update(self, key):
        for k, _ in self.comparison_parameters.items():
            if not self.stage_parameters.get(
                    k) and not self.stage_parameters.get(k, {}).get(key):
                self.stage_parameters[k] = self.comparison_parameters[k]
            if key not in self.stage_parameters[
                    k] and self.comparison_parameters.get(k, {}).get(key):
                self.stage_parameters[k][key] = self.comparison_parameters[k][
                    key]
class Resolver:
    def __init__(self, parameter_store, stage_parameters,
                 comparison_parameters):
        self.parameter_store = parameter_store
        self.stage_parameters = stage_parameters
        self.comparison_parameters = comparison_parameters
        self.sts = STS()
        self.cache = Cache()

    @staticmethod
    def _is_optional(value):
        return value.endswith('?')

    def fetch_stack_output(self, value, key, optional=False):  # pylint: disable=too-many-statements
        partition = get_partition(DEFAULT_REGION)
        try:
            [_, account_id, region, stack_name,
             output_key] = str(value).split(':')
        except ValueError as error:
            raise ValueError(
                f"{value} is not a valid import string. Syntax should be "
                "import:account_id:region:stack_name:output_key") from error
        if Resolver._is_optional(output_key):
            LOGGER.info("Parameter %s is considered optional", output_key)
            optional = True
        output_key = output_key[:-1] if optional else output_key
        try:
            role = self.sts.assume_cross_account_role(
                f'arn:{partition}:iam::{account_id}:role/adf-readonly-automation-role',
                'importer')
            cloudformation = CloudFormation(
                region=region,
                deployment_account_region=os.environ["AWS_REGION"],
                role=role,
                stack_name=stack_name,
                account_id=account_id)
            stack_output = self.cache.check(
                value) or cloudformation.get_stack_output(output_key)
            if stack_output:
                LOGGER.info("Stack output value is %s", stack_output)
                self.cache.add(value, stack_output)
        except ClientError:
            if not optional:
                raise
            stack_output = ""
        try:
            parent_key = list(
                Resolver.determine_parent_key(self.comparison_parameters,
                                              key))[0]
            if optional:
                self.stage_parameters[parent_key][key] = stack_output
            else:
                if not stack_output:
                    raise Exception(
                        f"No Stack Output found on {account_id} in {region} "
                        f"with stack name {stack_name} and "
                        f"output key {output_key}")
                self.stage_parameters[parent_key][key] = stack_output
        except IndexError as error:
            if stack_output:
                if self.stage_parameters.get(key):
                    self.stage_parameters[key] = stack_output
            else:
                raise Exception(
                    "Could not determine the structure of the file in order "
                    "to import from CloudFormation", ) from error
        return True

    def upload(self, value, key, file_name):
        if not any(item in value for item in S3.supported_path_styles()):
            raise Exception(
                'When uploading to S3 you need to specify a path style'
                'to use for the returned value to be used. '
                f'Supported path styles include: {S3.supported_path_styles()}'
            ) from None
        if str(value).count(':') > 2:
            [_, region, style, value] = value.split(':')
        else:
            [_, style, value] = value.split(':')
            region = DEFAULT_REGION
        bucket_name = self.parameter_store.fetch_parameter(
            f'/cross_region/s3_regional_bucket/{region}')
        client = S3(region, bucket_name)
        try:
            parent_key = list(
                Resolver.determine_parent_key(self.comparison_parameters,
                                              key))[0]
        except IndexError:
            if self.stage_parameters.get(key):
                self.stage_parameters[key] = client.put_object(
                    f"adf-upload/{value}/{file_name}".format(value, file_name),
                    str(value),
                    style,
                    True  # pre-check
                )
            return True
        self.stage_parameters[parent_key][key] = client.put_object(
            f"adf-upload/{value}/{file_name}",
            str(value),
            style,
            True  # pre-check
        )
        return True

    @staticmethod
    def determine_parent_key(d, target_key, parent_key=None):
        for key, value in d.items():
            if key == target_key:
                yield parent_key
            if isinstance(value, dict):
                for result in Resolver.determine_parent_key(
                        value, target_key, key):
                    yield result

    def fetch_parameter_store_value(self, value, key, optional=False):  # pylint: disable=too-many-statements
        if self._is_optional(value):
            LOGGER.info("Parameter %s is considered optional", value)
            optional = True
        if str(value).count(':') > 1:
            [_, region, value] = value.split(':')
        else:
            [_, value] = value.split(':')
            region = DEFAULT_REGION
        value = value[:-1] if optional else value
        client = ParameterStore(region, boto3)
        try:
            parameter = self.cache.check(
                f'{region}/{value}') or client.fetch_parameter(value)
        except ParameterNotFoundError:
            if optional:
                LOGGER.info("Parameter %s not found, returning empty string",
                            value)
                parameter = ""
            else:
                raise
        try:
            parent_key = list(
                Resolver.determine_parent_key(self.comparison_parameters,
                                              key))[0]
            if parameter:
                self.cache.add(f'{region}/{value}', parameter)
                self.stage_parameters[parent_key][key] = parameter
        except IndexError as error:
            if parameter:
                if self.stage_parameters.get(key):
                    self.stage_parameters[key] = parameter
            else:
                LOGGER.error(
                    "Parameter was not found, unable to fetch it from parameter store"
                )
                raise Exception(
                    "Parameter was not found, unable to fetch it from parameter store"
                ) from error
        return True

    def update(self, key):
        for k, _ in self.comparison_parameters.items():
            if not self.stage_parameters.get(
                    k) and not self.stage_parameters.get(k, {}).get(key):
                self.stage_parameters[k] = self.comparison_parameters[k]
            if key not in self.stage_parameters[
                    k] and self.comparison_parameters.get(k, {}).get(key):
                self.stage_parameters[k][key] = self.comparison_parameters[k][
                    key]