Ejemplo n.º 1
0
    def log_message(self,
                    level='INFO',
                    message=None,
                    data=None,
                    duration='',
                    from_method=None,
                    from_class=None):
        if type(data) is str:
            data = {
                'str': data
            }  # so that elastic doesn't make this field a string

        log_data = {
            "duration": duration,
            "from_class": from_class,
            "from_method": from_method,
            "level": level,
            "message": message,
            "data": data,
            self.time_field: datetime.utcnow()
        }  # this is a python datatime object (which is well supported by elastic but doesn't serialise to json ok)
        if self.enabled:
            return self.elastic().add(data=log_data,
                                      refresh=self.refresh_index)
        # if elastic server is not available, log messages to console
        pprint(log_data)
Ejemplo n.º 2
0
    def test_repository_create(self):
        pprint(self.result_create)

        assert self.ecr.repository_create(name=self.repository_name) == {
            'message': f'repository {self.repository_name} already existed',
            'status': 'warning'
        }
Ejemplo n.º 3
0
    def test_query(self):
        query = self.py_query.query('head')
        assert type(query) == Py_Query

        assert query.outer_html() == '<head id="abc" answer="42">inside <b>head</b></head>'

        children = query.children()
        assert children[0].html() == '<b>head</b>'

        elements = query.elements()
        assert type(elements[0]) == Py_Query
        pprint(type(elements))
        assert type(elements).mro() == [list, object]



        element = elements[0]
        assert type(element).mro()  == [Py_Query, object]
        assert element.text()       == 'inside head'
        assert element.attributes() == {'id': 'abc', 'answer': '42'}

        assert type(elements[0]       ) == Py_Query
        assert type(element.items()[0]) is Py_Query       # confirm that .items returns an Py_Query object

        # couple more pyquery tests
        assert query.set_pyquery_from_html('<form><input name="order" value="spam"></form>').serialize_array() == [{'name': 'order', 'value': 'spam'}]
    def test__enter__leave__(self):
        image_name = 'hello-world'

        with Temp_ECS_Fargate_Task(image_name=image_name,
                                   delete_on_exit=True) as fargate_task:
            pprint(fargate_task.create())
            assert 'Hello from Docker!\n' in fargate_task.logs_wait_for_data()
Ejemplo n.º 5
0
    def test_setup_and_run_containers_in_ec2(self):
        container_instance = self.ecs.container_instances().pop()
        subnet_id = container_instance.get('attributes').get('ecs.subnet-id')
        security_group_id = self.ec2.security_group_default().get('GroupId')

        task_family = 'hello-world-in-ec2'
        image_name = 'hello-world'
        skip_if_exists = True
        task_definition_config = self.ecs.task_definition_setup(
            task_family=task_family,
            image_name=image_name,
            requires='EC2',
            network_mode='none')
        task_definition = self.ecs.task_definition_create(
            task_definition_config=task_definition_config,
            skip_if_exists=skip_if_exists)
        task_definition_arn = task_definition.get('taskDefinitionArn')

        # task_config = { "launch_type"           : "EC2"               ,
        #                 "security_group_id"     : security_group_id   ,
        #                 "subnet_id"             : subnet_id           ,
        #                 "task_definition_arn"   : task_definition_arn }
        #task = self.ecs.task_create(**task_config)
        pprint(task_definition_arn)
        task = self.ecs.task_create_ec2(
            task_definition_arn=task_definition_arn)
        task_arn = task.get('taskArn')
        self.ecs.wait_for_task_stopped(task_arn=task_arn)
        pprint(
            self.ecs.logs(task_arn=task_arn,
                          task_definition_arn=task_definition_arn,
                          image_name=image_name))
Ejemplo n.º 6
0
    def test_metric_image(self):
        image_in_tmp = "/tmp/cloud_watch_metric.png"
        instance_id = "i-0a2a089f3dd878051"  #"i-0f9b8338610fc96e7"
        options = {
            "id": "m1",
            "stat": "Average",
            "label": "Median value",
            "visible": True,
            "color": "#0000FF",
            "yAxis": "left",
            "period": 1800
        }
        kwargs = {
            "namespace": "AWS/EC2",
            "metric_name": "CPUUtilization",
            "dimensions": {
                "InstanceId": instance_id
            },
            "path_image_file": image_in_tmp,
            "options": options,
            "title": "Metric Title"
        }

        result = self.cloud_watch.metric_image(**kwargs)
        pprint(result)
Ejemplo n.º 7
0
 def tearDown(self):
     if hasattr(self, 'result'):
         if self.result is not None:
             pprint(self.result)
     if hasattr(self, 'png_data'):
         if hasattr(self, 'png_data') is False:
             self.png_file = '/tmp/unit-test.png'
         self.save_png(self.png_data, self.png_file)
Ejemplo n.º 8
0
 def test_upload_dependency(self):
     from osbot_aws.Dependencies import upload_dependency
     from osbot_aws.Dependencies import pip_install_dependency
     dependencies = ['websocket-client', 'syncer']
     for dependency in dependencies:
         pip_install_dependency(dependency)
         result = upload_dependency(dependency)
         pprint(result)
Ejemplo n.º 9
0
 def test_invoke_directly(self):
     assert run({}) == 'in lambda shell'
     auth_key = self.lambda_shell.get_lambda_shell_auth()
     pprint(auth_key)
     self.result = run(
         {'lambda_shell': {
             'method_name': 'ping',
             'auth_key': auth_key
         }})
Ejemplo n.º 10
0
 def test_start_container_in_ec2__in_default_cluster(self):
     ecs = self.ecs
     task_family = 'testing-ec2'
     task_definition = ecs.task_definition(task_family=task_family)
     task_definition_arn = task_definition.get('taskDefinitionArn')
     task = ecs.task_create_ec2(task_definition_arn=task_definition_arn)
     task_arn = task.get('taskArn')
     ecs.wait_for_task_stopped(task_arn=task_arn)
     pprint(task_arn)
Ejemplo n.º 11
0
    def test_simple_execution(self, region, account_id):
        event_data = {'answer': 42}
        queue_name = random_string(prefix="osbot_unit_test")
        with Temp_SQS_Queue(queue_name=queue_name) as queue:
            assert queue_name == queue.queue_name
            assert queue.exists() is True
            assert queue.queue_url == f'https://{region}.queue.amazonaws.com/{account_id}/{queue.queue_name}'

            assert queue.push(event_data).pop() == event_data
            pprint(queue.info())
Ejemplo n.º 12
0
 async def open(self, path='', wait_until=None):
     url = urljoin(self.target_server, path)
     page = await self.page()
     try:
         await self.api_browser.open(url=url,
                                     page=page,
                                     wait_until=wait_until)
     except Exception as error:
         pprint(f'Error in Web_Base - open: {error}')
     return url
 def test_update_status(self):
     temp_data_file = temp_file()
     with patch.object(Hash_Json, 'get_file_path', return_value=temp_data_file):
         self.hash_json.add_file(self.test_file_hash, self.test_file_name)
         assert self.hash_json.data()[self.test_file_hash]['file_status'] == 'Initial'
         self.hash_json.update_status(self.test_file_hash, 'BBBB')
         self.hash_json.save()
         assert self.hash_json.data()[self.test_file_hash]['file_status'] == 'BBBB'
         assert json_load_file(temp_data_file)[self.test_file_hash]['file_status'] == 'BBBB'
     pprint(self.hash_json.load())
    def test_connect_to_ec2_instance(self):

        path_to_key = '/var/folders/_j/frqs70d93l328f307rw2jx5h0000gn/T/tmp8_tl8q5l/osbot-test_ec2_with_ssh_supporteyvjpbqb.pem'
        username    = '******'
        server_ip   = "3.251.63.86"
        ssh_config = {"server": server_ip,
                      "ssh_key": path_to_key,
                      "user": username}
        result = Ssh(ssh_config=ssh_config).ls('/')
        pprint(result)
Ejemplo n.º 15
0
 def test_commands_list(self):
     instance_id = 'i-06298c4377973f455'                                 # todo: refactor into ssm method
     command_id= '18f8de5e-116d-4245-89d0-f01f07310ef1'
     #result = self._.commands_list()
     #pprint(result)
     output = self._.client.get_command_invocation(
         CommandId=command_id,
         InstanceId=instance_id,
         )
     pprint(output)
    def test_user_privs(self):
        user_name = 'AlekhAnejaUpwork'
        access_key_id = 'AKIA3NUU5XSYZRQYXMP2'
        iam_user = IAM(user_name=user_name)

        assert iam_user.user_exists()
        assert list_set(iam_user.user_access_keys(index_by='AccessKeyId')) == [
            access_key_id
        ]

        pprint(iam_user.user_polices())
Ejemplo n.º 17
0
 async def wait_for_navigation(self,
                               page=None,
                               timeout_in_ms=10000,
                               show_error=False):
     try:
         if page is None:
             page = await self.page()
         await page.waitForNavigation({'timeout': timeout_in_ms})
         return True
     except Exception as error:
         if show_error:
             pprint(error)
         return False
Ejemplo n.º 18
0
 def test_create(self, account_id, region):
     with Temp_SQS_Queue() as sqs_queue:
         assert self.event_rule.targets() == []
         rule_name = self.event_rule.name()
         queue_name = sqs_queue.name()
         target_id = self.event_rule.add_target_sqs_queue(sqs_queue)
         assert self.event_rule.targets() == [{
             'Arn': f'arn:aws:sqs:{region}:{account_id}:{queue_name}',
             'Id': f'{queue_name}',
             'SqsParameters': {
                 'MessageGroupId': rule_name
             }
         }]
         pprint(self.event_rule.delete_target(target_id=target_id))
Ejemplo n.º 19
0
 def test_invoke(self):
     #self.test_update_lambda_function()
     pprint(self.aws_lambda.invoke({'method_name': 'ping'          }).get('return_value'))
     pprint(self.aws_lambda.invoke({'method_name': 'pwd'           }).get('return_value'))
     pprint(self.aws_lambda.invoke({'method_name': 'disk_space'    }).get('return_value'))
     pprint(self.aws_lambda.invoke({'method_name'  : 'python_exec',
                                    'method_kwargs': {'code':"result=40+2"}}))
    def test_create_info_exists_delete(self):
        result = self.dashboard.create()
        pprint(result)
        return

        assert result.get('attributes').get('title') == self.pattern_name
        assert self.index_pattern.exists() is True
        assert list_set(self.index_pattern.info()) == [
            'fields', 'id', 'namespaces', 'references', 'score', 'title',
            'type', 'updated_at'
        ]
        assert Index_Pattern(kibana=self.kibana,
                             pattern_name=random_text()).info() == {}
        assert self.index_pattern.delete() is True
Ejemplo n.º 21
0
    def test_tags_for_resource(self):
        resource_arn = None
        result = self.cloud_watch.tags_for_resource(resource_arn)
        pprint(result)


# 'query': 'SOURCE '
#            "'/aws/lambda/f2f_aws_lambdas_lambdas_deploy_fast_api__prod_v2' | "
#            'fields @timestamp, @message\n'
#            '| sort @timestamp desc\n'
#            '| limit 20',
#   'region': 'eu-west-1',
#   'stacked': False,
#   'title': 'Log group: '
#            '/aws/lambda/f2f_aws_lambdas_lambdas_deploy_fast_api__prod_v2',
#   'view': 'table'
Ejemplo n.º 22
0
 def test_get_file_analysis(self):
     with patch.object(Metadata, 'get_from_file', return_value=self.meta_data):
         response=self.analysis_json.get_file_analysis(self.test_file_hash, self.report_data)
         pprint(response)
         assert "file_name" in response
         assert "original_hash"         in response
         assert response["original_hash"]    == self.test_file_hash
         assert "rebuild_hash"          in response
         assert "file_type"             in response
         assert "file_size"             in response
         assert "remediated_item_count" in response
         assert "remediate_items_list"  in response
         assert "sanitised_item_count"  in response
         assert "sanitised_items_list"  in response
         assert "issue_item_count"      in response
         assert "issue_item_list"       in response
Ejemplo n.º 23
0
    def test_policy_add_sqs_permissions_to_lambda_role(self):
        policy_name = self.iam_utils.arn_aws_policy_service_sqs_lambda.split(
            '/').pop(-1)
        with Temp_Lambda() as temp_lambda:
            lambda_name = temp_lambda.lambda_name
            iam_role_name = self.iam_utils.policy_add_sqs_permissions_to_lambda_role(
                lambda_name)
            iam_role = IAM_Role(role_name=iam_role_name)
            pprint(iam_role.info())
            assert policy_name in iam_role.policies_statements()
            assert iam_role.exists() is True

            self.iam_utils.policy_remove_sqs_permissions_to_lambda_role(
                lambda_name)

            assert policy_name not in iam_role.policies_statements()
    def test_process_files(self):
        path_data = self.pre_processor.storage.hd2_data()
        path_status = self.pre_processor.storage.hd2_status()

        assert len(files_list(path_data)) == 0
        assert len(
            files_list(path_status)) == 1  # should have the status.json file

        self.pre_processor.process_files()

        pprint('**********: DATA')
        pprint(files_list(path_data))
        pprint('**********: STATUS')
        pprint(files_list(path_status))
        pprint('------------------------')
        assert len(files_list(path_data)) > 0
        assert len(files_list(path_status)) == 1  # should have: status.json
 def test_import_dashboard(self):
     # dashboard_file = 'processed-files-v8.ndjson'
     # dashboard_file = 'KD1.ndjson'
     # url_dashboards = 'https://raw.githubusercontent.com/filetrust/cdr-plugin-folder-to-folder-test-data/main/kibana-dashboards/'
     # url_dashboard = url_dashboards + dashboard_file
     # dashboard_data = GET(url_dashboard)
     #
     # import_file = file_create(extension=dashboard_file, contents=dashboard_data)
     # dashboard = Dashboard(kibana=self.kibana)
     # #import_file = '/var/folders/_j/frqs70d93l328f307rw2jx5h0000gn/T/tmp6gz6sc76.tmp'
     # #import_data = file_contents(export_file)
     # pprint(self.dashboard.import_dashboard(import_file= import_file))
     dashboard = Dashboard(kibana=self.kibana)
     pprint(
         dashboard.import_dashboard_from_github(
             dashboard_file_name='processed-files-v8.ndjson'))
     pprint(
         dashboard.import_dashboard_from_github(
             dashboard_file_name='KD1.ndjson'))
Ejemplo n.º 26
0
    def test_run_containers_in_ec2(self):
        print()
        image_name = 'hello-world'
        task_definition_arn = 'arn:aws:ecs:eu-west-1:785217600689:task-definition/hello-world-in-ec2:3'
        for j in range(0, 2):
            for i in range(0, 12):
                result = self.ecs.task_create_ec2(
                    task_definition_arn=task_definition_arn)
                failures = len(result.get('failures'))
                ok = len(result.get('tasks'))
                print(f'************** {j} {i} ok: {ok} failures: {failures}')

        return
        task_arn = task.get('taskArn')
        #self.ecs.wait_for_task_stopped(task_arn=task_arn)
        logs = self.ecs.logs_wait_for_data(
            task_arn=task_arn,
            task_definition_arn=task_definition_arn,
            image_name=image_name)
        pprint(logs)
    def start(self):
        while self.enabled():
            log_data = self.next_value()
            kwargs = {
                "level": log_data.get("level"),
                "message": log_data.get("message"),
                "data": log_data.get("data"),
                "duration": log_data.get("duration"),
                "from_method": log_data.get("from_method"),
                "from_class": log_data.get("from_class")
            }

            if print_log_messages:
                pprint(kwargs)
            self.log_message(**kwargs)

            # todo refactor into method focused on internal logging messages
            if  kwargs.get('level'  ) == 'DEBUG'        and  \
                kwargs.get('message') == 'stop_logging' and  \
                kwargs.get('data'   ) == {'when' : 'now'}:
                return
Ejemplo n.º 28
0
    def test_role_create_assume_role(self):
        sts = STS()
        current_user_arn = sts.caller_identity_arn()
        original_policy  = {'Statement': [ { 'Action'   : 'sts:AssumeRole',
                                             'Effect'   : 'Allow',
                                             'Principal': { 'Service': 'codebuild.amazonaws.com'}}]}

        new_policy       = {'Statement': [{'Action'   : 'sts:AssumeRole',
                                           'Effect'   : 'Allow',
                                           'Principal': {'AWS': current_user_arn } }]}

        test_role        = IAM(role_name="temp_role_to_test_assume_role")

        test_role.role_create(original_policy)
        role_arn = test_role.role_arn()
        current_assume_policy = test_role.role_assume_policy()
        test_role.role_assume_policy_update(new_policy)

        for i in range(0,15):
            with Catch(log_exception=False):
                sts.assume_role(role_arn=role_arn)
                sts.assume_role(role_arn=role_arn)
                sts.assume_role(role_arn=role_arn)
                sts.assume_role(role_arn=role_arn)
                sts.assume_role(role_arn=role_arn)
                sts.assume_role(role_arn=role_arn)
                sts.assume_role(role_arn=role_arn)
                sts.assume_role(role_arn=role_arn)

                pprint('got credentials')
                break
            print(f'after {i} seconds')
            wait(1)

        assert sts.assume_role(role_arn=role_arn).get('Credentials') is not None
        test_role.role_assume_policy_update(current_assume_policy)
        assert test_role.role_assume_policy() == current_assume_policy
        test_role.role_delete()
Ejemplo n.º 29
0
    async def screenshot(self,
                         url=None,
                         page=None,
                         full_page=True,
                         file_screenshot=None,
                         clip=None,
                         viewport=None,
                         js_code=None,
                         delay=None):
        try:
            if url:
                await self.open(url, page=page)

            await self.js_execute(js_code)

            if delay:
                await asyncio.sleep(delay)

            if file_screenshot is None:
                file_screenshot = Files.temp_file('.png')

            page = await self.page()
            if viewport:
                await self.viewport(viewport)
            if clip:
                full_page = False
            await page.screenshot({
                'path': file_screenshot,
                'fullPage': full_page,
                'clip': clip
            })

            return file_screenshot
        except Exception as error:
            pprint(f"Error in API_Browser.screenshot: f{error}"
                   )  # todo: add support for logging
Ejemplo n.º 30
0
 def test_alarms_for_metric(self):
     metric_name = None
     namespace = None
     result = self.cloud_watch.alarms_for_metric(metric_name=metric_name,
                                                 namespace=namespace)
     pprint(result)