Ejemplo n.º 1
0
def _fetch_quotes():
    '''fetch quotes from reddit'''
    error = False
    logging.debug('fetching data from reddit')
    raw_quotes = [constants.error_text]
    try:
        r = requests.get(constants.quotes_url, headers=constants.headers)
    except requests.RequestException as e:
        logging.error(
            f'failed to fetch quotes from {constants.quotes_url}, {e}')
    if r.status_code == 200:
        try:
            json_data = dictor(r.json(), constants.quote_data_addr)
            raw_quotes = [
                dictor(q, constants.quote_title_addr) for q in json_data
            ]
        except json.JSONDecodeError as e:
            logging.error(f'bad json data: {e}')
            raw_quotes = [constants.error_text]
            error = True
    else:
        logging.warning(
            f'error accessing {constants.quotes_url}: code {r.status_code}')
        raw_quotes = [constants.error_text]
        error = True

    if len(raw_quotes) < 1:
        raw_quotes = [constants.error_text]
        error = True

    return (raw_quotes, error)
Ejemplo n.º 2
0
 def pokemon_weak(self):
     url_weak = dictor(self.pokemon_data, 'types.0.type.url')
     url_weak = requests.get(url_weak)
     json_weak = url_weak.json()
     return dictor(json_weak, 'damage_relations.double_damage_from.0.name'), \
            dictor(json_weak, 'damage_relations.double_damage_from.1.name'), \
            dictor(json_weak, 'damage_relations.double_damage_from.2.name')
Ejemplo n.º 3
0
    def update_data(file_path_to_write: str, file_path_to_read: str,
                    file_ending: str) -> None:
        """
        Collects special chosen fields from the file_path_to_read and writes them into the file_path_to_write.
        :param file_path_to_write: The output file path to add the special fields to.
        :param file_path_to_read: The input file path to read the special fields from.
        :param file_ending: The files ending
        :return: None
        """

        pack_obj_data, _ = get_dict_from_file(file_path_to_read)
        fields: list = DELETED_YML_FIELDS_BY_DEMISTO if file_ending == 'yml' else DELETED_JSON_FIELDS_BY_DEMISTO
        # Creates a nested-complex dict of all fields to be deleted by Demisto.
        # We need the dict to be nested, to easily merge it later to the file data.
        preserved_data: dict = unflatten(
            {
                field: dictor(pack_obj_data, field)
                for field in fields if dictor(pack_obj_data, field)
            },
            splitter='dot')

        if file_ending == 'yml':
            with open(file_path_to_write, 'r') as yf:
                file_yaml_object = yaml.load(yf)
            if pack_obj_data:
                merge(file_yaml_object, preserved_data)
            with open(file_path_to_write, 'w') as yf:
                yaml.dump(file_yaml_object, yf)

        elif file_ending == 'json':
            file_data: dict = get_json(file_path_to_write)
            if pack_obj_data:
                merge(file_data, preserved_data)
            with open(file_path_to_write, 'w') as jf:
                json.dump(obj=file_data, fp=jf, indent=4)
Ejemplo n.º 4
0
def test_non_existent_value():
    ''' test a non existent key search '''
    result = dictor(BASIC, 'non.existent.value')
    eq_(None, result)

    result = dictor({'lastname': 'Doe'}, 'foo.lastname')
    eq_(None, result)
Ejemplo n.º 5
0
def information_extraction(data, page_id, final_no_top_results):

    response = data.json()

    final_information_dict = {}
    final_information_dict['URL'] = data.url

    final_information_dict["Title"] = dictor(response,
                                             f"query.pages.{page_id}0.title")
    extraction = dictor(response, f"query.pages.{page_id}0.extract")

    component = ""
    component_list = []

    for each in extraction:
        if each != '"' and each != ' ' and each.isalnum():
            component += each
        if each == '"' or each == ' ':
            component_list.append(component)
            component = ""

    for each in component_list:
        if each == '':
            component_list.remove(each)

    c = collections.Counter(component_list)

    final_key = f"Top {final_no_top_results} Words"

    final_information_dict[final_key] = {}

    for key, values in c.most_common(final_no_top_results):
        final_information_dict[final_key][values] = key

    yaml_final_view(final_information_dict)
Ejemplo n.º 6
0
def test_parsing_large_JSON():
    ''' test parsing large JSON file '''
    result = dictor(LARGE, '0.tags.3')
    eq_('sunt', result)

    result = dictor(LARGE, '1.friends.2.name')
    eq_('Tanisha Saunders', result)
Ejemplo n.º 7
0
    def handle(self, *args, **options):
        datasets = self.fetch_all_datasets()

        total_count = len(datasets)
        created_count = 0

        # datasets = filter(lambda ds: 'covid' in dictor(ds, "dataset.dataset_id"), datasets)
        keywords_datasets = dict()
        for dataset in datasets:
            links = map_links(dataset['links'])
            metas = dictor(dataset, 'dataset.metas')

            dataset_id = dictor(dataset, 'dataset.dataset_id')
            theme = dictor(metas, 'default.theme.0')
            title = dictor(metas, 'default.title')
            description = dictor(metas, 'default.description') or ''
            modified = parse_datetime(dictor(metas, 'default.modified'))
            features = dictor(dataset, 'dataset.features')
            exports = fetch_exports(links['exports'])
            popularity_score = dictor(metas, 'explore.popularity_score')
            nb_downloads = dictor(metas, 'explore.download_count')

            obj, created = ProxyDataset.objects.update_or_create(
                id=dataset_id,
                defaults={
                    'theme': Theme.objects.get(name=theme) if theme else None,
                    'title': title,
                    'description': description,
                    'modified': modified,
                    'has_map': 'geo' in features,
                    'has_analysis': 'analyze' in features,
                    'has_calendar': 'calendar' in features,
                    'has_custom': 'custom_view' in features,
                    'exports': exports,
                    'popularity_score': popularity_score,
                    'nb_downloads_api': nb_downloads,
                    'nb_downloads_local': 0,
                }
            )

            if created:
                created_count += 1
                self.stdout.write(f'{dataset_id!r} proxy dataset created.')
            else:
                self.stdout.write(f'{dataset_id!r} proxy dataset updated.')

            keywords = dictor(metas, 'default.keyword')
            keywords_datasets = generate_keywords(obj, keywords, keywords_datasets)

        self.stdout.write(self.style.SUCCESS(f'Done: {total_count} datasets, {created_count} added.'))

        for keyword, datasets_occurence in keywords_datasets.items():
            keyword_obj, created = Keyword.objects.get_or_create(word=keyword)
            for dataset, occurence in datasets_occurence:
                dsship, created = Datasetship.objects.get_or_create(keyword=keyword_obj, dataset=dataset)
                if created or dsship.occurence != occurence:
                    dsship.occurence = occurence
                    dsship.save()
                verb = "Add" if created else "Update"
                self.stdout.write(f'{verb} keyword {keyword!r} for {dataset!r}.')
Ejemplo n.º 8
0
    def trace(self,
              data: ARRAY_LIKE,
              data_err: Optional[ARRAY_LIKE] = None,
              x: Optional[ARRAY_LIKE] = None,
              text: Optional[ARRAY_LIKE] = None,
              mode: Optional[str] = None,
              name: Optional[str] = None,
              hover_data: Optional[ARRAY_LIKE] = None,
              hover_template: Optional[str] = None,
              color: Optional[str] = None,
              trace_kwargs: Optional[dict] = None) -> go.Scatter:
        """Just generates a trace for a figure

        Args: hover_data: Shape should be (N-datas per pt, data.shape)  Note: plotly does this the other way around (
            which is wrong)

        """
        data, data_err, x = [
            np.asanyarray(arr) if arr is not None else None
            for arr in [data, data_err, x]
        ]
        if data.ndim != 1:
            logger.warning('Raising an error')
            raise ValueError(
                f'data.shape: {data.shape}. Invalid shape, should be 1D for a 1D trace'
            )

        if trace_kwargs is None:
            trace_kwargs = {}
        trace_kwargs['marker'] = dictor.dictor(trace_kwargs,
                                               'marker',
                                               default=dict(
                                                   size=5,
                                                   line=dict(width=1),
                                                   symbol='cross-thin'))
        trace_kwargs['line'] = dictor.dictor(trace_kwargs,
                                             'line',
                                             default=dict(width=2))
        if color is not None:
            trace_kwargs['marker'].update(color=color)
            trace_kwargs['marker']['line'].update(color=color)
            trace_kwargs['line'].update(color=color)

        x = self._get_x(x)
        mode = self._get_mode(mode)

        data, x = self._resample_data(
            data,
            x)  # Makes sure not plotting more than self.MAX_POINTS in any dim
        if hover_data:  # Also needs same dimensions in x
            hover_data = np.asanyarray(hover_data)
            if (s := hover_data.shape[1:]) == data.shape:
                hover_data = np.moveaxis(
                    hover_data, 0, -1)  # This is how plotly likes the shape
            elif (s := hover_data.shape[:-1]) == data.shape:
                pass
Ejemplo n.º 9
0
def awg_from_json(awg_json):
    """Converts from standardized exp json to my dictionary of values (to be put into AWG NamedTuple)

    Args:
        awg_json (dict): The AWG json from exp sweep_logs in standard form

    Returns:
        dict: AWG data in a dict with my keys (in AWG NamedTuple)

    """

    AWG_KEYS = ['AWG_used', 'AW_Waves', 'AW_Dacs', 'waveLen', 'numADCs', 'samplingFreq', 'measureFreq', 'numWaves', 'numCycles',
                'numSteps']
    if awg_json is not None:
        # Check keys make sense
        for k, v in awg_json.items():
            check_key(k, AWG_KEYS)

        d = {}
        d['awg_used'] = bool(dictor(awg_json, 'AWG_used', default=1))  # 2021-12-17 -- Added to ScanController
        waves = dictor(awg_json, 'AW_Waves', '')
        dacs = dictor(awg_json, 'AW_Dacs', '')
        d['outputs'] = {int(k): [int(val) for val in list(v.strip())] for k, v in zip(waves.split(','), dacs.split(','))}  # e.g. {0: [1,2], 1: [3]}
        d['wave_len'] = dictor(awg_json, 'waveLen')
        d['num_adcs'] = dictor(awg_json, 'numADCs')
        d['samplingFreq'] = dictor(awg_json, 'samplingFreq')
        d['measureFreq'] = dictor(awg_json, 'measureFreq')
        d['num_cycles'] = dictor(awg_json, 'numCycles')
        d['num_steps'] = dictor(awg_json, 'numSteps')
        return d
    else:
        return None
Ejemplo n.º 10
0
def get_coord(*args, **kwargs):
    '''USER FACING HELPER FUNCTION:
    lookup and print the latitude, longitude of a place given as a string:
    
    usage: --run_plugin_func met_no.get_coord "Horsetooth Reservoir, Fort Collins CO, USA"
    
    Args:
        place(`str`): "City, Provence, Country
    
    Returns:
        `tuple`: lat, lon
        
    Example:
        get_coord("Denver, Colorado, USA")
        get_coord("Bamako, Mali")
        %U'''
    if args:
        place = args[0]
    elif 'place' in kwargs:
        place = kwargs['place']
    else:
        place = None
    
    
    
    lat, lon = None, None
    if not place:
        print('lookup the lat/lon of city, town or geographic area')
        print('usage: met_no.get_coord "City, Area, Country"')
        print('met_no.get_coord "Golden Colorado, USA"')
        return (lat, lon)
    osm_endpoint = constants.osm_endpoint
    osm_query = constants.osm_query
    place_quote = quote(place)
    url = f'{osm_endpoint}{place_quote}{osm_query}'
    try:
        result = requests.get(url)
    except requests.RequestException as e:
        logging.warning(f'could not process request: {e}')
    if result.status_code == 200:
        if len(result.json()) > 0:
            lat = dictor(result.json()[0], 'lat')
            lon = dictor(result.json()[0], 'lon')
            display_name = dictor(result.json()[0], 'display_name')
            print(f'{display_name}\n{place}:\nlat: {float(lat):.3f}\nlon: {float(lon):.3f}')
        else:
            print(f'no data was returned for place: {place}')
            print(f'check the spelling or try a more general query')
    else:
        print(f'No valid data was returned: status_code: {result.status_code}')
    
    return(lat, lon)    
def capturarTokenProd(application_id, cust_id):

    # data = []

    url = "http://api.internal.ml.com/applications/" + application_id + "/credentials?caller.id=" + cust_id
    response = requests.get(url)
    response.encoding = "Latin-1"
    comments = json.loads(response.content)
    access_token = (str(dictor(comments, "0.access_token")))
    test_access_token = (str(dictor(comments, "0.test_access_token")))

    # data.append[{'token_prod': access_token,'token_sand': test_access_token}]
    return access_token
Ejemplo n.º 12
0
 def is_valid(self):
     _get_single_museum_data(self.gk_museum_id)
     if not self.single_museum_data:
         return False
     if not (
         dictor(self.single_museum_data, "regDate") is None
         or len(dictor(self.single_museum_data, "identifiers")) == 0
     ):
         for identifier in self.single_museum_data["identifiers"]:
             if identifier["type"] == "INN":
                 self.museum_inn = str(identifier["value"])
                 return True
     return False
Ejemplo n.º 13
0
    def get_paths(self, s_file: str = None):

        import json
        from dictor import dictor

        self.s_file = self.s_flt_test_data
        print(self.s_file)
        try:
#            with open('../data/NZAFD/JSON/NZAFD_Oct_2020_WGS84.json') as json_file:
#                data = json.load(json_file)
#            with open('../data/NZAFD/JSON/NZAFD_WGS84-test.json') as json_file:
#                data = json.load(json_file)
            ''' change parameter to switch between test, full downloaded, and latest data sets
                test: s_flt_test_data
                full: s_flt_full_data
                new: s_flt_new_data '''
            with open(s_file) as json_file:
                data = json.load(json_file)

            faults = []
            fault_path_count = 1
            for each_feature in range(len(data['features'])):
                s_flt_id = dictor(data,('features.{0}.attributes.FID').format(each_feature))
                s_flt_name = dictor(data,('features.{0}.attributes.NAME').format(each_feature))
                s_flt_uid = str(s_flt_id) + " " + s_flt_name
                if s_flt_uid==" ":
                    s_flt_uid = 'Unnamed fault '+ str(fault_path_count)
                    fault_path_count += 1
                points = []
                path = dictor(data,'features.{}.geometry.paths.0'.format(each_feature))
                for each_coordinate in range(len(path)):
                    points.append([path[each_coordinate][0],path[each_coordinate][1]])
                faults.append([s_flt_uid,points])

            '''
            faults = []
            fault_path_count = 1
            for each_feature in range(len(data['features'])):
                flt = dictor(data,('features.{}.attributes.FID'+' '+'features.{}.attributes.NAME').format(each_feature))
                if flt==" ":
                    flt = 'Unnamed fault '+ str(fault_path_count)
                    fault_path_count += 1
                points = []
                path = dictor(data,'features.{}.geometry.paths.0'.format(each_feature))
                for each_coordinate in range(len(path)):
                    points.append([path[each_coordinate][0],path[each_coordinate][1]])
                faults.append([flt,points])
            '''
        except Exception as err:
            print("Error message:", err)
        return faults
Ejemplo n.º 14
0
    def get_sweeplogs(self) -> dict:
        sweep_logs = super().get_sweeplogs()
        # Then need to change the format of BabyDAC and FastDAC
        bdacs_json = dictor(sweep_logs, 'BabyDAC', None)
        if bdacs_json is not None:
            sweep_logs['BabyDAC'] = convert_babydac_json(bdacs_json)

        fdacs_json = dictor(sweep_logs, 'FastDAC', None)
        if fdacs_json is not None:
            hdf = self.get_exp_dat_hdf()
            num_adc = get_num_adc_from_hdf(hdf)
            hdf.close()
            sweep_logs['FastDAC'] = convert_fastdac_json(fdacs_json, num_adc)
        return sweep_logs
def get_token():
    """
    :return: ims token for authorization
    """
    ims_token = dictor(cfg, PLATFORM + ".ims_token", checknone=True)
    api_key = dictor(cfg, ENTERPRISE + ".api_key", checknone=True)
    org_id = dictor(cfg, ENTERPRISE + ".org_id", checknone=True)

    if ims_token == "<ims_token>":
        # Server parameters
        ims_host = dictor(cfg, SERVER + ".ims_host", checknone=True)
        ims_endpoint_jwt = dictor(cfg,
                                  SERVER + ".ims_endpoint_jwt",
                                  checknone=True)

        # Enterprise parameters used to construct JWT
        client_secret = dictor(cfg,
                               ENTERPRISE + ".client_secret",
                               checknone=True)
        tech_acct = dictor(cfg, ENTERPRISE + ".tech_acct", checknone=True)
        priv_key_filename = dictor(cfg,
                                   ENTERPRISE + ".priv_key_filename",
                                   checknone=True)

        # read private key from file
        priv_key_file = open(priv_key_filename, "r")
        priv_key = priv_key_file.read()
        priv_key_file.close()
        ims_token = "Bearer " + get_access_token(ims_host, ims_endpoint_jwt,
                                                 org_id, tech_acct, api_key,
                                                 client_secret, priv_key)
    if not ims_token.startswith("Bearer "):
        ims_token = "Bearer " + ims_token

    return ims_token
Ejemplo n.º 16
0
def tcp(ip, port, feed, group, count, args):
    ''' test TCP connectivity from host '''
    ## check if connection is already established, get ordered dict of all active netstat connections
    od = netstat()

    ## if IP is already connected on this host, skip to next IP
    if port in od:
        if ip in od[port]:
            print(
                f'{count} [tcp] {group} {feed} {GREEN} Already connected to this host: {ip}:{port} {RESET}'
            )
            return

    # check if hostname or IP, if hostname, get actual IP of target
    try:
        socket.inet_aton(ip)
        ip_addr = ip
    except socket.error:
        try:
            ip_addr = socket.gethostbyname(ip)
        except socket.gaierror:
            print(f"[tcp] {RED}IP is invalid: {ip}{RESET}")
            return
    except TypeError:
        print(f"[tcp] {RED}IP is invalid: {ip} type error{RESET}")
        return

    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    s.settimeout(tcp_timeout)
    result = s.connect_ex((ip_addr, int(port)))
    s.close()
    route = get_route(ip)

    # connection return code
    return_code = {
        '111': ['Refused', f'{RED}'],
        '11': ['Timeout', f'{ORANGE}'],
        '0': ['Received', f'{GREEN}']
    }

    status = dictor(return_code, f'{result}.0')
    color = dictor(return_code, f'{result}.1')

    if args.csv:
        print(f'tcp,{group},{feed},{ip},{port},{status}')
    else:
        print(
            f'{count} [tcp] {group} {feed} {color}{status} {ip}:{port}{RESET} {CYAN}via {route}{RESET}'
        )
def get_headers():
    """
    :return: headers
    """
    api_key = dictor(cfg, ENTERPRISE + ".api_key", checknone=True)
    org_id = dictor(cfg, ENTERPRISE + ".org_id", checknone=True)
    headers = {}
    ims_token = get_token()
    if ims_token is not None:
        headers = {
            "Authorization": ims_token,
            "x-api-key": api_key,
            "x-gw-ims-org-id": org_id
        }
    return headers
Ejemplo n.º 18
0
 def get_cob_iob(self, params={}):
     """Fetch device status json object
     Args:
       params:
         Mongodb style query params. For example, you can do things like:
             get_profiles({'count':0, 'find[startDate][$gte]': '2017-03-07T01:10:26.000Z'})
     Returns:
       ProfileDefinitionSet
     """
     r = requests.get(self.site_url + '/api/v1/devicestatus',
                      headers=self.request_headers(),
                      params="{'count':0}")
     cob = dictor(r.json(), "0.openaps.suggested.predBGs.COB")
     iobpred = dictor(r.json(), "0.openaps.suggested.predBGs.IOB")
     iob = dictor(r.json(), "0.openaps.suggested.IOB")
     return cob, iobpred, iob
Ejemplo n.º 19
0
def signin(request: HttpRequest):
    next_url = request.GET.get("next") or get_default_next_url()

    try:
        if "next=" in unquote(next_url):
            parsed_next_url = urlparse.parse_qs(
                urlparse.urlparse(unquote(next_url)).query)
            next_url = dictor(parsed_next_url, "next.0")
    except:
        next_url = request.GET.get("next") or get_default_next_url()

    # Only permit signin requests where the next_url is a safe URL
    allowed_hosts = set(settings.SAML2_AUTH.get("ALLOWED_REDIRECT_HOSTS", []))
    if parse_version(get_version()) >= parse_version("2.0"):
        url_ok = is_safe_url(next_url, allowed_hosts)
    else:
        url_ok = is_safe_url(next_url)

    if not url_ok:
        return HttpResponseRedirect(
            get_reverse([denied, "denied", "django_saml2_auth:denied"]))

    request.session["login_next_url"] = next_url

    saml_client = get_saml_client(get_assertion_url(request), acs)
    _, info = saml_client.prepare_for_authenticate(relay_state=next_url)

    redirect_url = None

    if "Location" in info["headers"]:
        redirect_url = info["headers"]["Location"]

    return HttpResponseRedirect(redirect_url)
def flatten_to_string(input_obj):
    """
    Flatten a document post Mongo enrichment to produce a nice simple string that can be used
    to extract entities.

    :param input_obj:
    :return:
    """
    source_keys = [
        "mongo.covering_dates",
        "mongo.creators.corporate_body_name",
        "mongo.note",
        "mono.physical_description_form",
        "mongo.scope_and_content.description",
        "mongo.related_material.description",
        "mongo.separated_material.description",
        "mongo.title",
        "title",
        "description",
        "mongo.former_reference_dept",
        "mongo.administrative_background",
        "mongo.arrangement",
        "mongo.custodial_history",
    ]
    flat = " ".join(
        [x for x in [dictor(input_obj, k) for k in source_keys] if x])
    return flat
Ejemplo n.º 21
0
    def resume(self, endpoint):
        data = fetch_dict(endpoint)

        if data and dictor(data, '_checkpoint.records'):
            records = data['_checkpoint']['records']

            for record in records:
                self._registry_route_records.append(to_route_record(record))
def get_headers():
    """
    :return: headers
    """
    api_key = dictor(cfg, ENTERPRISE + ".api_key", checknone=True)
    org_id = dictor(cfg, ENTERPRISE + ".org_id", checknone=True)
    sandbox_name = dictor(cfg, TITLES + '.sandbox_name', default="prod")
    headers = {}
    ims_token = get_token()
    if ims_token is not None:
        headers = {
            "Authorization": ims_token,
            "x-api-key": api_key,
            "x-gw-ims-org-id": org_id,
            'x-sandbox-name': sandbox_name
        }
    return headers
Ejemplo n.º 23
0
 def _set_fastdac(self, fastdac_dict):
     fds = _dac_logs_to_dict(fastdac_dict)
     # self.set_group_attr('FastDACs', fds)
     additional_attrs = {}
     for name, k in zip(['sampling_freq', 'measure_freq', 'visa_address'], ['SamplingFreq', 'MeasureFreq', 'visa_address']):
         additional_attrs[name] = dictor(fastdac_dict, k, None)
     fastdac = FastDac(**additional_attrs, dacs=fds)
     fastdac.save_to_hdf(self.hdf.group, name='FastDACs')
Ejemplo n.º 24
0
 def set_simple_attrs(group, json):
     """Sets top level attrs in Dat HDF from sweeplogs"""
     group.attrs['comments'] = dictor(json, 'comment', '')
     group.attrs['filenum'] = dictor(json, 'filenum', 0)
     group.attrs['x_label'] = dictor(json, 'axis_labels.x', 'None')
     group.attrs['y_label'] = dictor(json, 'axis_labels.y', 'None')
     group.attrs['current_config'] = dictor(json, 'current_config', None)
     group.attrs['time_completed'] = dictor(json, 'time_completed', None)
     group.attrs['time_elapsed'] = dictor(json, 'time_elapsed', None)
     group.attrs['part_of'] = get_part(dictor(json, 'comment', ''))
Ejemplo n.º 25
0
 def save_to_file(clicks, fig, save_name):
     if clicks and fig:
         if not save_name:
             save_name = dictor(fig, 'layout.title.text',
                                NameResetter().get_resetting_fig_name())
         SharedFigs().add_fig(save_name, fig)
         return True
     else:
         raise PreventUpdate
def capturarApp(cust_id):

    url = "http://api.internal.ml.com/applications/search?owner_id=" + cust_id
    response = requests.get(url)
    response.encoding = "Latin-1"
    comments = json.loads(response.content)
    # print (comments)
    application_id = str(dictor(comments, "0.id"))
    return application_id
Ejemplo n.º 27
0
 def test_soundex_representation_failure(self):
     tester = app.test_client(self)
     response = tester.get("/name/" + fail_test_name_1 + "/" +
                           fail_test_name_2)
     data = json.loads(response.get_data(as_text=True))
     soundex_name_1 = dictor(data, "soundex_representaion.name_1")
     soundex_name_2 = dictor(data, "soundex_representaion.name_2")
     assertioin_soundex_represent = 1
     if soundex_name_1 != soundex_name_2:
         assertioin_soundex_represent = 0
     print(
         "[Success]Names don't match, Soundex representation is '%s'[%s] and '%s'[%s]"
         % (
             soundex_name_1,
             fail_test_name_1,
             soundex_name_2,
             fail_test_name_2,
         ))
     self.assertEqual(assertioin_soundex_represent, 0)
Ejemplo n.º 28
0
def set_slider_vals(fig: dict):
    if fig:
        # y = dat.Data.get_data('y')
        d = dictor(fig, 'data', [])
        if d:
            d = d[0]
            y = dictor(d, 'y', None)
            z = dictor(
                d, 'z',
                None)  # Only exists for 2D or something that can be sliced
            if y is not None and z is not None:
                start, stop = y[0], y[-1]
                step = abs((stop - start) / len(y))
                marks = {
                    float(v): f'{v:.3g}'
                    for v in np.linspace(start, stop, 10)
                }
                return [start, stop, step, marks]
    return [0, 1, 0.1, {0: '0', 0.5: '0.5', 1: '1'}]
Ejemplo n.º 29
0
def check_logs(host):
    ''' parses System Event log and checks for Warnings or Errors, emails if found '''
    ip = (dictor(config, 'hosts.{}'.format(host), checknone=True))
    logdump = get_logs(ip)

    event_dict = {}
    
    if dictor(logdump, 'Members'):
        for event in dictor(logdump, 'Members'):

            # get Warnings or Errors only
            if not event['Severity'].upper() == 'OK':
                # actual timestamp of event in remote timezone
                idrac_raw = arrow.get(event['Created'])
                idrac_event = idrac_raw.to(local_tz).format('YYYY-MM-DD HH:mm:ss')
                idrac_event = arrow.get(idrac_event, 'YYYY-MM-DD HH:mm:ss')

                # actual current time right now
                now = arrow.now(dictor(config, 'local_tz')).format('YYYY-MM-DD HH:mm:ss')
                now = arrow.get(now, 'YYYY-MM-DD HH:mm:ss')

                # check if errors/warnings are old or brand new
                diff = idrac_event - now
                days = diff.days
                hours, remainder = divmod(diff.seconds, 3600) # Get difference hour

                # latest events that happened today
                if abs(days) == 0:

                    # log the errors
                    log.warning('ALERT: iDrac Error or Warning detected..')
                    log.warning('iDrac instance: %s' % host)
                    log.warning('timestamp (local timezone): %s' % idrac_raw)
                    log.warning('severity: %s' % event['Severity'])
                    log.warning('message: %s' % event['Message'])
                    event_dict[event['Created']] = {"severity": event['Severity'], "message": event['Message']}

    # if non-"OK" events detected, send email alert
    if event_dict:
        html = render_template(os.getcwd()+'/notification.j2', vars=event_dict)
        subject = f'iDRAC Alert: {host}'
        send_email(to_addr=notif_emails, from_addr=from_addr, smtp_host=smtp_host, 
            subject=subject, body=html)
Ejemplo n.º 30
0
def get_metadata(user_id: Optional[str] = None) -> Mapping[str, Any]:
    """Returns metadata information, either by running the GET_METADATA_AUTO_CONF_URLS hook function
    if available, or by checking and returning a local file path or the METADATA_AUTO_CONF_URL. URLs
    are always validated and invalid URLs will be either filtered or raise a SAMLAuthError
    exception.

    Args:
        user_id (str, optional): If passed, it will be further processed by the
            GET_METADATA_AUTO_CONF_URLS trigger, which will return the metadata URL corresponding to
            the given user identifier, either email or username. Defaults to None.

    Raises:
        SAMLAuthError: No metadata URL associated with the given user identifier.
        SAMLAuthError: Invalid metadata URL.

    Returns:
        Mapping[str, Any]: Returns a SAML metadata object as dictionary
    """
    get_metadata_trigger = dictor(settings.SAML2_AUTH,
                                  "TRIGGER.GET_METADATA_AUTO_CONF_URLS")
    if get_metadata_trigger:
        metadata_urls = run_hook(get_metadata_trigger, user_id)
        if metadata_urls:
            # Filter invalid metadata URLs
            filtered_metadata_urls = list(
                filter(lambda md: validate_metadata_url(md["url"]),
                       metadata_urls))
            return {"remote": filtered_metadata_urls}
        else:
            raise SAMLAuthError(
                "No metadata URL associated with the given user identifier.",
                extra={
                    "exc_type": ValueError,
                    "error_code": NO_METADATA_URL_ASSOCIATED,
                    "reason": "There was an error processing your request.",
                    "status_code": 500
                })

    metadata_local_file_path = settings.SAML2_AUTH.get(
        "METADATA_LOCAL_FILE_PATH")
    if metadata_local_file_path:
        return {"local": [metadata_local_file_path]}
    else:
        single_metadata_url = settings.SAML2_AUTH.get("METADATA_AUTO_CONF_URL")
        if validate_metadata_url(single_metadata_url):
            return {"remote": [{"url": single_metadata_url}]}
        else:
            raise SAMLAuthError(
                "Invalid metadata URL.",
                extra={
                    "exc_type": ValueError,
                    "error_code": INVALID_METADATA_URL,
                    "reason": "There was an error processing your request.",
                    "status_code": 500
                })