Example #1
0
    def put_blob(
        self,
        image_reference: str,
        digest: str,
        octets_count: int,
        data: requests.models.Response,
        max_chunk=1024 * 1024 * 1,  # 1 MiB
        mimetype: str = 'application/data',
    ):
        head_res = self.head_blob(
            image_reference=image_reference,
            digest=digest,
        )
        if head_res.ok:
            logger.info(f'skipping blob upload {digest=} - already exists')
            return

        data_is_requests_resp = isinstance(data, requests.models.Response)
        data_is_generator = isinstance(data, typing.Generator)
        data_is_filelike = hasattr(data, 'read')
        data_is_bytes = isinstance(data, bytes)

        if octets_count < max_chunk or data_is_filelike or data_is_requests_resp or data_is_bytes:
            if data_is_requests_resp:
                data = data.content
            elif data_is_generator:
                # at least GCR does not like chunked-uploads; if small enough, workaround this
                # and create one (not-that-big) bytes-obj
                _data = bytes()
                for chunk in data:
                    _data += chunk
            elif data_is_filelike:
                pass  # if filelike, http.client will handle streaming for us

            return self._put_blob_single_post(
                image_reference=image_reference,
                digest=digest,
                octets_count=octets_count,
                data=data,
            )
        else:
            if data_is_requests_resp:
                with data:
                    return self._put_blob_chunked(
                        image_reference=image_reference,
                        digest=digest,
                        octets_count=octets_count,
                        data_iterator=data.iter_content(chunk_size=max_chunk),
                        chunk_size=max_chunk,
                    )
            elif data_is_generator:
                return self._put_blob_chunked(
                    image_reference=image_reference,
                    digest=digest,
                    octets_count=octets_count,
                    data_iterator=data.iter_content(chunk_size=max_chunk),
                    chunk_size=max_chunk,
                )
            else:
                raise NotImplementedError
Example #2
0
def get_isoline_generator(r: requests.models.Response) -> str:
    if r.status_code != 200:
        print("Something wrong with request")
        l = json.dumps(r.json(), indent=1)
        print(l)
        return
    data = r.json()
    isoline = data["response"]["isoline"][0]["component"][0]["shape"]
    for el in isoline:
        yield el.replace(",", "")
Example #3
0
 def _create_schema(self, response: requests.models.Response, schema):
     attribute_map = schema.attribute_map
     attribute_map_values = attribute_map.values()
     attribute_map_reversed = {v: k for k, v in attribute_map.items()}
     if not isinstance(response, dict):
         response = response.json()
     attributes = dict()
     for key, value in response.items():
         if key in attribute_map_values:
             attributes[attribute_map_reversed[key]] = response.get(key)
     return schema(**attributes)
Example #4
0
def raise_for_status(r: requests.models.Response) -> None:
    try:
        r.raise_for_status()
    except requests.HTTPError as e:
        try:
            json = r.json()
        except:
            pass
        else:
            raise requests.HTTPError(json) from e
        raise e
Example #5
0
def isValidLink(link: requests.models.Response) -> bool:
    """
   Returns true if a link is a valid webpage, false otherwise
   """

    try:
        link.raise_for_status()
    except:
        if DEBUGGING:
            print("Error in %s." % (link))
        return False
    return True
Example #6
0
    def put_blob(
            self,
            image_reference: str,
            digest: str,
            octets_count: int,
            data: requests.models.Response,
            max_chunk=1024 * 1024 * 1,  # 1 MiB
    ):
        data_is_requests_response = isinstance(data, requests.models.Response)
        data_is_generator = isinstance(data, typing.Generator)
        data_is_filelike = hasattr(data, 'read')

        if octets_count < max_chunk or data_is_filelike:
            if data_is_requests_response:
                data = data.content
            elif data_is_generator:
                # at least GCR does not like chunked-uploads; if small enough, workaround this
                # and create one (not-that-big) bytes-obj
                _data = bytes()
                for chunk in data:
                    _data += chunk
            elif data_is_filelike:
                pass  # if filelike, http.client will handle streaming for us

            return self._put_blob_single_post(
                image_reference=image_reference,
                digest=digest,
                octets_count=octets_count,
                data=data,
            )
        else:
            if data_is_requests_response:
                with data:
                    return self._put_blob_chunked(
                        image_reference=image_reference,
                        digest=digest,
                        octets_count=octets_count,
                        data_iterator=data.iter_content(chunk_size=max_chunk),
                        chunk_size=max_chunk,
                    )
            elif data_is_generator:
                return self._put_blob_chunked(
                    image_reference=image_reference,
                    digest=digest,
                    octets_count=octets_count,
                    data_iterator=data.iter_content(chunk_size=max_chunk),
                    chunk_size=max_chunk,
                )
            else:
                raise NotImplementedError
Example #7
0
def raise_for_status(r: requests.models.Response) -> None:
    """Attempts to include relevant JSON response info in exception raised by
    "unsuccessful" request.
    """
    try:
        r.raise_for_status()
    except requests.HTTPError as e:
        try:
            json = r.json()
        except:
            pass
        else:
            raise requests.HTTPError(json) from e
        raise e
    def exception_handler(response: requests.models.Response):
        """
        Handle error in the response and display error message based on status code.

        :type response: ``requests.models.Response``
        :param response: response from API.

        :raises: raise DemistoException based on status code of response.
        """
        err_msg = ""
        if response.status_code in HTTP_ERROR:
            err_msg = HTTP_ERROR[response.status_code]
        elif response.status_code > 500:
            err_msg = HTTP_ERROR[500]
        elif response.status_code not in HTTP_ERROR:
            try:
                # Try to parse json error response
                error_entry = response.json()
                demisto.error(f"{LOGGING_INTEGRATION_NAME} {error_entry}")
                errors = error_entry.get('data', {}).get('errors', [])
                if errors:
                    err_msg = get_error_message(errors)
                elif response.status_code == 400:
                    err_msg = MESSAGES['BAD_REQUEST']
                else:
                    err_msg = error_entry.get('message', '')
            except ValueError:
                err_msg = '{}'.format(response.text)

        raise DemistoException(err_msg)
Example #9
0
    def save_file(self, filename: str, data: requests.models.Response):
        """Saves a file to the current directory."""
        def filter_filename(filename: str):
            """Filters a filename non alphabetic and non delimiters charaters."""
            valid_chars = "-_.() "
            return "".join(c for c in filename
                           if c.isalnum() or c in valid_chars)

        filename = filter_filename(filename)
        try:
            with open(filename, "wb") as f:
                for chunk in data.iter_content(chunk_size=1024):
                    if chunk:
                        f.write(chunk)
            self.logger.info(f'Saved file as "{filename}".')
        except OSError as exc:
            if (platform.system() == "Linux" and exc.errno
                    == 36) or (platform.system() == "Darwin"
                               and exc.errno == 63):  # filename too long
                (_, extension) = os.path.splitext(filename)  # this can fail
                # 'extension' already contains the leading '.', hence
                # there is no need for a '.' in between "{}{}"
                random_filename = f"{random_string(15)}{extension}"
                self.save_file(random_filename, data)
            else:
                raise  # re-raise if .errno is different than 36 or 63
        except Exception:
            raise
Example #10
0
def __judge_code(res: requests.models.Response) -> requests.models.Response:
    try:
        content = res.content.decode("utf-8")
        if re.match(r".+?\.(?:css|js)(\?.+)?$",
                    res.url) is None and content.find("<html") == -1:
            _logger.i(content, is_show=False)
    except UnicodeDecodeError:
        # _logger.i(res.content, is_show=False)
        pass
    if res.status_code != 200:
        if res.url != "https://app.vocabgo.com/student/":
            raise NetworkError(res.status_code, res.url,
                               res.content.decode("utf-8"))
    else:
        try:
            json_data = res.json()
        except JSONDecodeError:
            pass
        else:
            if res.url.find("gateway.vocabgo.com") != -1:
                if json_data and (json_data["code"] == 0 or json_data["code"]
                                  == 10002 or json_data["code"] == 21006):
                    _logger.e(
                        f"{json_data['code']}, {res.url}, {json_data['msg']}",
                        is_show=False)
                if json_data and json_data["code"] == 10017:
                    raise UpperLimitError(res.status_code, res.url,
                                          json_data["msg"])
    return res
def read_traffic(traffic: requests.models.Response = None,
                 repo: str = 'repo_name') -> list:
    """
   Create list of JSON objects with traffic info
   :param: 
      traffic:requests.models.Response - Object with traffic info
      repo:str - repository name 
   :sample: 
   {
        "timestamp" : "2018-06-08T00:00:00Z"
        "key"       : "ff7a5466-7c0a-11e8-ab26-0800275d93ce"
        "asset"     : "github/repo_name/traffic"
        "readings"  : {"traffic"" : 18}
   }
   """
    traffic = traffic.json()
    data = []
    for key in traffic['views']:
        timestamp = datetime.datetime.strptime(key['timestamp'],
                                               '%Y-%m-%dT%H:%M:%SZ')
        data.append({
            'timestamp': str(timestamp),
            'key': str(uuid.uuid4()),
            'asset': 'github/%s/traffic' % repo,
            'readings': {
                'count': key['uniques']
            }
        })
    return data
Example #12
0
    def _deserialize_response(self,
                              response: requests.models.Response,
                              response_cls: Any,
                              request: str = None) -> Tuple[Any, str]:
        """Try to create the appropriate pydantic model from the response.

        Parameters:
            response: The response to try to deserialize.
            response_cls: The class to use for deserialization.
            request: Optional string that will be displayed as the attempted
                request in the validator output.

        Returns:
            The deserialized object (or `None` if unsuccessful) and a
                human-readable summary

        """
        if not response:
            raise ResponseError("Request failed")
        try:
            json_response = response.json()
        except json.JSONDecodeError:
            raise ResponseError(
                f"Unable to decode response as JSON. Response: {response}")

        self._log.debug(
            f"Deserializing {json.dumps(json_response, indent=2)} as model {response_cls}"
        )

        return (
            response_cls(**json_response),
            "deserialized correctly as object of type {}".format(response_cls),
        )
    def extract_and_assign_field_names(self,
                                       response: requests.models.Response):
        """
        Extract values from response json and assign to attributes
        :param response: response from request
        :return:
        """
        try:

            # Need the response json
            fields_list = response.json().get("fields", None)
        except Exception as e:
            print(
                f"Unanticipated Exception while extracting field names from response. {e}. {self.url_agol_item_id}"
            )
        else:

            # Protect against None and return
            if fields_list is None:
                return

            # for field in the fields list get the name or substitute a string indicating an error/issue.
            accumulated_field_names_list = []
            for field in fields_list:
                accumulated_field_names_list.append(
                    field.get("name", "ERROR_DoIT"))

            # make a comma separated string of list items and assign
            self.column_names_string = ", ".join(accumulated_field_names_list)
def read_clones(clones: requests.models.Response = None,
                repo: str = 'repo_name') -> list:
    """
   Create list of JSON objects with daily clones, and write to file 
   :param: 
      clones:requests.models.Response - Object with clones info 
      repo:str - repository name
   :sample: 
   {
        "timestamp" : "2018-06-08"
        "key"       : "ff7a5466-7c0a-11e8-ab26-0800275d93ce"
        "asset"     : "github/repo_name/clones"
        "readings" : {"clones" : 5}
   }
   """
    clones = clones.json()
    data = []
    for key in clones['clones']:
        data.append({
            'timestamp': str(key['timestamp']),
            'key': str(uuid.uuid4()),
            'asset': 'github/%s/clones' % repo,
            'readings': {
                'count': key['uniques']
            }
        })
    return data
def download(filename: str, request: requests.models.Response):
    """
    Takes a filename and get or post request, then downloads the file
    while outputting a download status bar.
    Preconditions: filename must be valid
    """
    assert type(filename) == str, 'Error: filename must be a string'
    assert type(request) == requests.models.Response, 'Error: request must be a class<requests.models.Response>'
    
    with open(filename, 'wb') as f:
        start = time.perf_counter()
        if request is None:
            f.write(request.content)
        else:
            total_length = int(request.headers.get('content-length'))
            dl = 0
            for chunk in request.iter_content(chunk_size=1024*1024):
                dl += len(chunk)
                if chunk:
                    f.write(chunk)
                    f.flush()
                    done = int(50 * dl / int(total_length))
                    stars = '=' * done
                    spaces = ' ' * (50-done)
                    bps = dl//(time.perf_counter() - start)
                    percent = int((100*dl)/total_length)
                    print(f"\r[{stars}{spaces}] {bps} bps, {percent}%    ", end='\r', flush=True)
Example #16
0
def parse_tsp_csv(response: requests.models.Response) -> OrderedDict:
    """ Parses a Thrift Savings Plan output CSV file.

    Function takes in a requests response and returns an
    OrderedDict with newest closing cost at front of OrderedDict.
    """

    data = OrderedDict()

    text = response.iter_lines(decode_unicode=True)

    reader = csv.DictReader(text, dialect='tsp')

    for row in reader:
        # Date from TSP looks like "July 30. 2020"
        # There is indeed a period after the day of month.
        date = datetime.datetime.strptime(row['Date'], "%b %d. %Y")
        date = date.replace(hour=16, tzinfo=TIMEZONE)
        names = [
            'L Income', 'L 2025', 'L 2030', 'L 2035', 'L 2040', 'L 2045',
            'L 2050', 'L 2055', 'L 2060', 'L 2065', 'G Fund', 'F Fund',
            'C Fund', 'S Fund', 'I Fund'
        ]
        data[date] = [
            Decimal(row[name]) if row[name] else Decimal()
            for name in map(str.strip, names)
        ]

    return OrderedDict(sorted(data.items(), key=lambda t: t[0], reverse=True))
Example #17
0
def save_file(filename: str, data: requests.models.Response):
    """Saves a file to the current directory."""
    filename = filter_filename(filename)
    file_size = int(data.headers.get('content-length', 0))
    try:
        with open(filename, 'wb') as f:
            with tqdm(total=file_size, unit='B',
                  unit_scale=True, unit_divisor=1024,
                  desc=filename, initial=0,
                  ascii=True, miniters=1) as pbar:
                pbar.format_meter(n=file_size,total=file_size,elapsed=pbar.format_dict['elapsed'],initial=0,ascii=True,unit_divisor=1024,unit_scale=True, unit='B',miniters=1)

                for chunk in data.iter_content(chunk_size=1024):
                    if chunk:
                        f.write(chunk)
                        pbar.update(len(chunk))
                        
        print(f"Saved file as '{filename}'")
    except OSError as exc:
        if exc.errno == 36:  # filename too long
            (_, extension) = os.path.splitext(filename)  # this can fail
            # 'extension' already contains the leading '.', hence
            # there is no need for a '.' in between "{}{}"
            random_filename = f"{random_string(15)}{extension}"
            save_file(random_filename, data)
        else:
            raise  # re-raise if .errno is different then 36
    except Exception:
        raise
Example #18
0
    def exception_handler(response: requests.models.Response):
        """
        Handle error in the response and display error message based on status code.

        :type response: ``requests.models.Response``
        :param response: response from API.

        :raises: raise DemistoException based on status code of response.
        """

        err_msg = ""
        if response.status_code in HTTP_ERROR:
            if response.status_code in [401, 403]:
                demisto.error(f"{LOGGING_INTEGRATION_NAME} {response.json()}")
            err_msg = HTTP_ERROR[response.status_code]
        elif response.status_code > 500:
            err_msg = HTTP_ERROR[500]
        elif response.status_code not in HTTP_ERROR:
            err_msg = 'Error in API call [{}] - {}' \
                .format(response.status_code, response.reason)
            headers = response.headers
            if 'application/json' in headers.get('Content-Type', ''):
                error_entry = response.json()
                if error_entry.get('message'):
                    err_msg = '{}'.format(error_entry.get('message'))

        raise DemistoException(err_msg)
Example #19
0
def _check_error(r: requests.models.Response,
                 details: str = "") -> Union[str, dict, list]:
    error, code, _ = _raise_for_status(r)
    if not error:
        try:
            data = r.json()
            return data
        except:  # NOQA
            return r.text
    if code == 412:
        raise NessiePreconidtionFailedException(
            "Unable to complete transaction, please retry " + details, error,
            r)
    if code == 401:
        raise NessieUnauthorizedException(
            "Unauthorized on api endpoint " + details, error, r)
    if code == 403:
        raise NessiePermissionException(
            "Not permissioned to view entity at " + details, error, r)
    if code == 404:
        raise NessieNotFoundException("No entity exists at " + details, error,
                                      r)
    if code == 409:
        raise NessieConflictException("Entity already exists at " + details,
                                      error, r)
    if code == 500:
        raise NessieServerException("Server error at " + details, error, r)
    raise NessieException("unknown error", error, r)
Example #20
0
    def __init__(self, response: requests.models.Response,
                 attempt: int) -> None:
        """Create the ApifyApiError instance.

        Args:
            response: The response to the failed API call
            attempt: Which attempt was the request that failed
        """
        self.message: Optional[str] = None
        self.type: Optional[str] = None

        self.message = f'Unexpected error: {response.text}'
        try:
            response_data = response.json()
            if 'error' in response_data:
                self.message = response_data['error']['message']
                self.type = response_data['error']['type']
        except ValueError:
            pass

        super().__init__(self.message)

        self.name = 'ApifyApiError'
        self.status_code = response.status_code
        self.attempt = attempt
        self.http_method = response.request.method
Example #21
0
def save_page_to_file(response: requests.models.Response, filename: str):
    """Save content from response to given file."""
    with open(filename, 'wb') as f:
        for chunk in response.iter_content(chunk_size=1024):
            if chunk:  # filter out keep-alive new chunks
                f.write(chunk)
        print("Image saved to file:", filename, "\n")
Example #22
0
 def _download_video(self, fp: str, resp: requests.models.Response) -> None:
     """Write the media to file at given fp from the response"""
     with open(fp, "wb") as outfile:
         for chunk in resp.iter_content(chunk_size=1024):
             if chunk:
                 outfile.write(chunk)
                 outfile.flush()
def read_referrers(
    referrers: requests.models.Response = None,
    repo: str = 'repo_name',
    timestamp: str = datetime.datetime.now()) -> list:
    """
   Create a list of JSON objects with daily referrers 
   :param: 
      referrers:requests.models.Response - Object with referral info
      repo:str - reposiitory name
      timestamp:str - date when data is generated
   :sample:
   {
   }
   """

    referrers = referrers.json()
    data = []
    # {'count': 79, 'uniques': 19, 'referrer': 'Google'}
    for obj in referrers:
        data.append({
            'timestamp':
            str(timestamp),
            'key':
            str(uuid.uuid4()),
            'asset':
            'github/%s/referrerals/%s' % (repo, obj['referrer']),
            'readings': {
                'count': obj['uniques']
            }
        })
    return data
Example #24
0
def process_by_result_map(response: requests.models.Response,
                          result_map: Dict[StatusCodeRange, StateInfo]):
    def get_details_from_json(json_response, what):
        for key, value in json_response.items():
            if key == what:
                return value
            if isinstance(value, dict):
                result = get_details_from_json(value, what)
                if result:
                    return result

    status_code = response.status_code
    summary = f"{status_code}: {http_responses[status_code]}"
    details = ""

    for status_code_range, state_info in result_map.items():
        if status_code in status_code_range:
            if state_info.type == 'json':
                details = response.json()
                details = get_details_from_json(details, state_info.title)
            elif state_info.type == 'str':
                details = response.text

            sys.stderr.write(f"{state_info.title}: {details}\n{summary}\n")
            sys.exit(state_info.state)

    sys.stderr.write(f"Details for Status Code are not defined\n{summary}\n")
    sys.exit(3)
Example #25
0
def clean_a_bit(data: r.models.Response) -> pd.Series:
    data = data.json()
    namedData = dict()
    cleanedData = dict()
    for i in data:
        name = i['extensions']['operationName']
        namedData[name] = i['data']
    now = datetime.now(tz=pytz.timezone('Europe/Paris'))

    if 'PlayerTrackingContextQuery' in namedData.keys():
        cleanedData['streamer_id'] = namedData['PlayerTrackingContextQuery'][
            'user']['id']
        cleanedData['streamer_login'] = namedData[
            'PlayerTrackingContextQuery']['user']['login']
        cleanedData['game_id'] = namedData['PlayerTrackingContextQuery'][
            'user']['broadcastSettings']['game']['id']
        cleanedData['name'] = namedData['PlayerTrackingContextQuery']['user'][
            'broadcastSettings']['game']['name']
        cleanedData['type'] = namedData['PlayerTrackingContextQuery']['user'][
            'broadcastSettings']['game']['__typename']
    if 'VideoPreviewOverlay' in namedData.keys():
        cleanedData['PreviewImage'] = namedData['VideoPreviewOverlay']['user'][
            'stream']['previewImageURL']
    if 'StreamMetadata' in namedData.keys():
        cleanedData['lastBroadcast_title'] = namedData['StreamMetadata'][
            'user']['lastBroadcast']['title']
        cleanedData['current_created_at'] = namedData['StreamMetadata'][
            'user']['stream']['createdAt']
    if 'UseViewCount' in namedData.keys():
        cleanedData['current_viewers'] = namedData['UseViewCount']['user'][
            'stream']['viewersCount']
    series = pd.Series(cleanedData)
    series['time'] = now
    return series.to_frame().T.set_index('time')
Example #26
0
 def save_response_content(response: requests.models.Response,
                           destination: str,
                           chunk_size: int = 32768) -> None:
     with open(destination, "wb") as f:
         for chunk in response.iter_content(chunk_size):
             if chunk:  # filter out keep-alive new chunksqs
                 f.write(chunk)
    def decode_response(self,
                        response: requests.models.Response) -> ADOResponse:
        """Decode the response from ADO, checking for errors.

        :param response: The response to check and parse

        :returns: The JSON data from the ADO response

        :raises ADOHTTPException: Raised if the request returned a non-200 status code
        :raises ADOException: Raise if the response was not JSON
        """

        self.log.debug("Fetching response from ADO")

        if response.status_code < 200 or response.status_code >= 300:
            raise ADOHTTPException(
                f"ADO returned a non-200 status code, configuration={self}",
                response,
            )

        try:
            content: ADOResponse = response.json()
        except:
            raise ADOException("The response did not contain JSON")

        return content
Example #28
0
def error_log(response: requests.models.Response, api_name: str) -> bool:
    error_log_path = parent_path + '/log/error_log.log'
    if not response.json()['ok']:
        with open(error_log_path, 'a') as error_log_file:
            error_log_message = f'[error][{api_name}][{datetime.datetime.now()}] call \'{api_name}\' api error. \n[error message]{response.json()["error"]}\n'
            error_log_file.write(error_log_message)
            return True
    return False
Example #29
0
def write_json_file(filename: str, r: requests.models.Response,
                    logger: logging.Logger) -> None:
    try:
        with open(filename, "w", encoding="utf-8") as output_file:
            json.dump(r.json(), output_file)
    except Exception as error:
        logger.critical(f"Writing output file failed: {error}. Nothing to do.")
        sys.exit(1)
Example #30
0
 def _check_api_response(r: requests.models.Response) -> None:
     if not r.status_code == requests.codes.ok:
         try:
             msg = r.json()
             msg = f"{msg['name']}: {msg['description']}"
         except ValueError:
             msg = r.text
         raise SzurubooruHTTPError(msg)
Example #31
0
def save_file(filename: str, data: requests.models.Response):
    """Saves a file to the current directory."""
    filename = filter_filename(filename)
    try:
        with open(filename, 'wb') as f:
            for chunk in data.iter_content(chunk_size=1024):
                if chunk:
                    f.write(chunk)
        print(f"Saved file as '{filename}'")
    except OSError as exc:
        if exc.errno == 36:  # filename too long
            (_, extension) = os.path.splitext(filename)  # this can fail
            # 'extension' already contains the leading '.', hence
            # there is no need for a '.' in between "{}{}"
            random_filename = f"{random_string(15)}{extension}"
            save_file(random_filename, data)
        else:
            raise  # re-raise if .errno is different then 36
    except Exception:
        raise