def validate_zoom_level(uri, max_zoom_level):
    """ check if current geopackage provide only zoom that restricted by provided zoom level value"""
    s_code, downloaded_data = send_download_request(uri)
    if config.ResponseCode.Ok.value != s_code:
        raise ConnectionError('Failed on downloading data')
    file_name = os.path.basename(uri)
    download_local_url = common.combine_url(
        config.TMP_DIR, "_".join(["zoom", str(max_zoom_level)]))
    full_location = common.combine_url(download_local_url, file_name)
    if not os.path.exists(download_local_url):
        os.makedirs(download_local_url)
    with open(full_location, "wb") as f:
        f.write(downloaded_data)
    # if config.S3_EXPORT_STORAGE_MODE:
    #     s3_conn = s3.S3Client(config.S3_END_POINT, config.S3_ACCESS_KEY, config.S3_SECRET_KEY)
    #     object_key = "/".join(uri.split("/")[-2:])
    #
    #     destination_dir = os.path.join(config.S3_DOWNLOAD_DIRECTORY, object_key.split('.')[0])
    #     if not os.path.exists(destination_dir):
    #         os.makedirs(destination_dir)
    #
    #     s3_conn.download_from_s3(config.S3_BUCKET_NAME, object_key, os.path.join(destination_dir, destination_dir.split('/')[-1]))
    #     uri = os.path.join(destination_dir, destination_dir.split('/')[-1])
    # else:  # FS
    #     if not os.path.exists(config.PACKAGE_OUTPUT_DIR):
    #         _logger.error(
    #             "Output directory not exist [%s]- validate mapping and directory on config", config.PACKAGE_OUTPUT_DIR)
    #         raise Exception("Output directory: [%s] not found ! validate config or mapping" % config.PACKAGE_OUTPUT_DIR)
    res = gpv.validate_zoom_levels(full_location, max_zoom_level)
    res = set(res)
    return max(res) <= max_zoom_level
def create_testing_status(directory_name, fileName):
    """ mock helper function that create status on storage"""
    current_time = datetime.utcnow()
    utc_curr = current_time.utcnow().strftime('%Y-%m-%d %H:%M:%SZ')
    utc_expierd = (current_time +
                   timedelta(hours=1)).strftime('%Y-%m-%d %H:%M:%SZ')

    body = {
        'taskId':
        common.generate_uuid(),
        'userId':
        'deletion_test',
        'fileName':
        fileName.split('.')[0],
        'directoryName':
        directory_name,
        'fileURI':
        common.combine_url(config.DOWNLOAD_STORAGE_URL, config.DOWNLOAD_API,
                           directory_name, fileName),
        'progress':
        100,
        'sourceLayer':
        config.SOURCE_LAYER,
        'status':
        config.EXPORT_STATUS_COMPLITED,
        "geometry": {
            "type":
            "Polygon",
            "coordinates": [[[34.8119380171075, 31.9547503375918],
                             [34.822372617076, 31.9547503375918],
                             [34.822372617076, 31.9642696217735],
                             [34.8119380171075, 31.9642696217735],
                             [34.8119380171075, 31.9547503375918]]]
        },
        'estimatedFileSize':
        1500,
        'realFileSize':
        1500,
        'creationTime':
        utc_curr,
        'updatedTime':
        utc_curr,
        'expirationTime':
        utc_expierd,
    }

    resp = br.send_post_request(
        common.combine_url(config.EXPORT_STORAGE_URL, config.STATUSES_API),
        body)
    if resp.status_code == config.ResponseCode.Ok.value:
        _logger.info('Created new task with uuid: %s', (body['taskId']))
        _logger.debug('Task was registered as : body %s', body)
        return resp, body['taskId']
    else:
        _logger.error(
            'Error while trying create new task with - status: %d | error: %s',
            resp.status_code, resp.content)
        return resp, "None"
def get_tileset_from_s3(identifier, job_id):
    """
    This method download relevant tileset.json directly from s3 by identifier
    """
    try:
        s3_client = s3.S3Client(config.S3_END_POINT, config.S3_ACCESS_KEY, config.S3_SECRET_KEY)
        bucket_exists = s3_client.is_bucket_exists(config.S3_BUCKET_NAME)
        source = common.combine_url(job_id, 'tileset.json')

        destination_dir = os.path.join(config.TMP_DIR, job_id)
        if not os.path.exists(destination_dir):
            os.makedirs(destination_dir)
        dest = os.path.join(config.TMP_DIR, source)
        if bucket_exists[0]:
            try:
                s3_client.download_from_s3(config.S3_BUCKET_NAME, source, dest)
                return os.path.join(config.TMP_DIR, job_id,'tileset.json')
            except Exception as e:
                _logger.error(f'Failed on downloading {identifier}/tileset.json from S3 with error: {str(e)}')
                raise Exception(f'Failed on downloading {identifier}/tileset.json from S3 with error: {str(e)}')

        else:
            _logger.error(f'Failed on connecting to bucket with error: {bucket_exists[2]}')
            raise Exception(f'Failed on connecting to bucket with error: {bucket_exists[2]}')

    except Exception as e:
        _logger.error(f'Failed on connecting S3 with error: {str(e)}')
        raise Exception(f'Failed on connecting S3 with error: {str(e)}')
def is_geopackage_exist(file_url, request=None):
    """
    Validation of specific geopackge on S3 or File system
    """
    if config.S3_EXPORT_STORAGE_MODE:
        _logger.info('Test running on s3 mode')
        try:
            s3_conn = s3.S3Client(config.S3_END_POINT, config.S3_ACCESS_KEY,
                                  config.S3_SECRET_KEY)
        except Exception as e:
            _logger.error('Some error occur one connection to S3')
            raise e
        if isinstance(request, str):
            request = json.loads(request)
        object_key = ".".join([request['fileName'], config.PACKAGE_EXT])
        object_key = "/".join([request['directoryName'], object_key])
        res = s3_conn.is_file_exist(config.S3_BUCKET_NAME, object_key)
        pkg_url = file_url.split(
            '?'
        )[0] if '?' in file_url else file_url  # todo - update after download link will be
        return res, pkg_url

    else:
        _logger.info('Test running on file-system mode')
        pkg_url = common.combine_url(config.PACKAGE_OUTPUT_DIR,
                                     *(file_url.split('/')[-2:]))
        _logger.info(pkg_url)
        res = os.path.exists(pkg_url)
        return res, pkg_url
def load_gpkg_from_storage(file_name, directory_name):
    """
    - This function load to memory geopackage by provided name
    - Its support FS and OS by running configuration
    """
    if config.S3_EXPORT_STORAGE_MODE:
        s3_conn = s3.S3Client(config.S3_END_POINT, config.S3_ACCESS_KEY,
                              config.S3_SECRET_KEY)
        object_key = "/".join(
            [directory_name, ".".join([file_name, config.PACKAGE_EXT])])

        destination_dir = os.path.join(config.S3_DOWNLOAD_DIRECTORY,
                                       object_key.split('.')[0])
        if not os.path.exists(destination_dir):
            os.makedirs(destination_dir)

        s3_conn.download_from_s3(
            config.S3_BUCKET_NAME, object_key,
            os.path.join(destination_dir,
                         destination_dir.split('/')[-1]))
        uri = os.path.join(destination_dir, destination_dir.split('/')[-1])

    else:
        uri = common.combine_url(
            config.PACKAGE_OUTPUT_DIR, config.EXPORT_DOWNLOAD_DIR_NAME,
            ".".join([config.EXPORT_DOWNLOAD_FILE_NAME, config.PACKAGE_EXT]))

    pkg = common.load_file_as_bytearray(uri)
    return pkg
def get_task_status(uuid):
    """
    This method provide task status by providing uuid (task id)
    """
    res = su.get_uuid_status(
        common.combine_url(config.EXPORT_STORAGE_URL, config.STATUSES_API),
        uuid)
    return res
def get_all_statuses(url):
    """
    This method return all statuses on db
    :param url: api's url
    :return: list[dict]
    """
    full_url = common.combine_url(url, config.STATUSES_API)
    resp = conn.send_get_request(full_url)
    return resp
def delete_by_uuid(url, uuid):
    """
    This delete from common storage db the status of specific uuid given as list
    :param url: api's url
    :param uuid: list of strings represent uuid of task to delete
    :return: status request response type
    """
    full_url = common.combine_url(url, config.STATUSES_API, config.DELETE_API)
    resp = conn.send_post_request(full_url, uuid)
    return resp
Esempio n. 9
0
 def post_model_ingestion_job(self, request):
     """This method start and trigger new ingestion process of 3rd model"""
     if not isinstance(request, dict):
         _log.error(
             f'Request should be provided as valid json format:\n{request} => {type(request)}'
         )
         raise TypeError('Request should be provided as valid json format')
     full_model_ingestion_url = common.combine_url(
         self._ingestion_stack_url, config.INGESTION_3RD_MODEL)
     resp = br.send_post_request(full_model_ingestion_url, body=request)
     return resp
Esempio n. 10
0
    def get_single_job_status(self, job_id):
        """This method return specific ingestion job status"""
        if not isinstance(job_id, str):
            _log.error(
                f'should be provided job id string expressed as uuid:\n{job_id} => {type(job_id)}'
            )
            raise TypeError('Request should be provided as valid json format')
        job_model_ingestion_url = common.combine_url(
            self._ingestion_job_service_url, config.INGESTION_3RD_JOB_STATUS,
            job_id)

        try:
            resp = br.send_get_request(job_model_ingestion_url)
            return resp
        except Exception as e:
            _log.error(
                f'Error on get response from ingestion job progress service with error {str(e)}'
            )
Esempio n. 11
0
    def get_single_3rd_metadata(self, identifier):
        """This method return specific exists metadata from catalog db"""
        if not isinstance(identifier, str):
            _log.error(
                f'should be provided identifier expressed as string:\n{identifier} => {type(identifier)}'
            )
            raise TypeError(
                'identifier should be provided as valid json format')
        model_metadata_on_catalog_url = common.combine_url(
            self._ingestion_catalog_url, config.INGESTION_CATALOG_MODEL_DATA,
            identifier)

        try:
            resp = br.send_get_request(model_metadata_on_catalog_url)
            return resp
        except Exception as e:
            _log.error(
                f'Error on get response from catalog db service with error {str(e)}'
            )
            raise e
Esempio n. 12
0
def test_environment_validation():
    """This test validate basic pre-running validation of exporter environment micro-services"""
    services_list = []

    # fields to test:
    exporter_ui = config_dev.EXPORT_UI_URL
    trigger_api = common.combine_url(config_dev.EXPORT_TRIGGER_URL,
                                     config.GET_EXPORT_STATUSES_API)
    map_proxy = config_dev.MAP_PROXY_URL
    s3_end_point = config_dev.S3_END_POINT
    s3_access_key = config_dev.S3_ACCESS_KEY
    s3_secret_key = config_dev.S3_SECRET_KEY
    s3_bucket_name = config_dev.S3_BUCKET_NAME
    postgress_vm = config_dev.POSTGRESS_VM
    kafka_vm = config_dev.KAFKA_VM

    _log.info(
        'Pre - Running test series that validate environment:\n'
        'Will check :\n'
        f'    1) Exporter ui - {exporter_ui}\n'
        f'    2) Trigger API - {trigger_api}\n'
        f'    3) Map_proxy - {map_proxy}\n'
        f'    4) S3 client - End point: {s3_end_point}, Bucket: {s3_bucket_name}\n'
        f'    5) Postgress - VM alive : address: {postgress_vm}\n'
        f'    6) ELK - VM - kafka service: address {kafka_vm}\n')
    ui_ok = common.check_url_exists(exporter_ui, config_dev.HTTP_REQ_TIMEOUT)
    ui_ok['name'] = 'Exporter UI route'
    services_list.append(ui_ok)
    trigger_ok = common.check_url_exists(trigger_api,
                                         config_dev.HTTP_REQ_TIMEOUT)
    trigger_ok['name'] = 'Trigger route'
    services_list.append(trigger_ok)
    map_proxy_ok = common.check_url_exists(map_proxy,
                                           config_dev.HTTP_REQ_TIMEOUT)
    map_proxy_ok['name'] = 'Map-Proxy'
    services_list.append(map_proxy_ok)
    s3_ok = s3storage.check_s3_valid(s3_end_point, s3_access_key,
                                     s3_secret_key, s3_bucket_name)
    s3_ok['name'] = 'S3 - minio'
    services_list.append(s3_ok)
    postgress_vm_ok = common.ping_to_ip(postgress_vm)

    if postgress_vm_ok:
        ssh_conn = bash_utils.ssh_to_machine(postgress_vm,
                                             config_dev.AZURE_USER_NAME,
                                             config_dev.AZURE_PASSWORD)
        if ssh_conn:
            postgress_vm_ok = environment_validators.is_postgress_alive(
                ssh_conn)
            if postgress_vm_ok:
                postgress_vm_ok = {
                    'url_valid': True,
                    'status_code': None,
                    'content': None,
                    'error_msg': None
                }
            else:
                postgress_vm_ok = {
                    'url_valid': False,
                    'status_code': None,
                    'content': None,
                    'error_msg': 'Postgress service not running|activated'
                }
        else:
            postgress_vm_ok = {
                'url_valid': False,
                'status_code': None,
                'content': None,
                'error_msg': 'Postgress vm '
                'not reachable'
            }
    else:
        postgress_vm_ok = {
            'url_valid': False,
            'status_code': None,
            'content': None,
            'error_msg': 'Postgress vm not '
            'reachable'
        }
    postgress_vm_ok['name'] = 'Postgress DB'
    services_list.append(postgress_vm_ok)

    kafka_vm_ok = common.ping_to_ip(kafka_vm)

    if kafka_vm_ok:
        kafka_vm_ok = bash_utils.listen_to_port(kafka_vm,
                                                config_dev.KAFKA_PORT)
        if kafka_vm_ok:
            kafka_vm_ok = {
                'url_valid': True,
                'status_code': None,
                'content': None,
                'error_msg': None
            }

        else:
            kafka_vm_ok = {
                'url_valid':
                False,
                'status_code':
                None,
                'content':
                None,
                'error_msg':
                f"can't reached connection to kafka port {config_dev.KAFKA_PORT}, probably service not running"
            }
    else:
        kafka_vm_ok = {
            'url_valid': False,
            'status_code': None,
            'content': None,
            'error_msg': 'kafka vm not reachable'
        }
    kafka_vm_ok['name'] = 'KAFKA SERVICE'
    services_list.append(kafka_vm_ok)

    final_res = _analyze_system_results(services_list)
    passed = "--> " + "\n--> ".join(
        final_res['pass']) if final_res['pass'] else "\n--> ".join(
            final_res['pass'])
    failed = "--> " + "\n--> ".join(
        final_res['failure']) if final_res['failure'] else "\n--> ".join(
            final_res['failure'])
    _log.info(
        '\n-------------------------------------------------------------------------------------------'
        '\nEnvironment status:\n'
        f'\npassed:[{len(final_res["pass"])}]\n'
        f'{passed}\n'
        f'\n-------------------------------------------------------------------------------------------\n'
        f'failed:[{len(final_res["failure"])}]\n'
        f'{failed}'
        f'\n-------------------------------------------------------------------------------------------\n'
    )
    assert final_res['state'], "Bad environment's prerequisites"
def get_uuid_status(url, uuid):
    """This method return current state of export task by uuid created"""
    full_url = common.combine_url(url, uuid)
    resp = conn.send_get_request(full_url)
    return resp
Esempio n. 14
0
 def __init__(self):
     self._base_url = config.EXPORT_TRIGGER_URL
     self._export_url = common.combine_url(self._base_url,
                                           config.EXPORT_GEOPACKAGE_API)
     self._get_status_url = common.combine_url(
         self._base_url, config.GET_EXPORT_STATUSES_API)