Beispiel #1
0
def setup_hook_httpntlmauth(request):
    if "httpntlmauth" in request:
        from requests_ntlm import HttpNtlmAuth
        auth_account = request.pop("httpntlmauth")
        request["auth"] = HttpNtlmAuth(auth_account["username"],
                                       auth_account["password"])
Beispiel #2
0
    def urlProcessor(self, url):
        global foundURLs
        global counter
        ERROR1 = 'An error occurred'
        ERROR2 = 'Correlation ID'
        try:
            #resp = None
            #Do a request with a bullshit url
            urlList = url.split('/')
            if len(urlList) > 3:
                urlList.pop()
            fakeUrl = '/'.join(urlList).strip(
                '\n') + '/baaaaaaaa_said_the_sheepman.dll'
            fakeResp = None

            try:
                #Manage Friendly 404s
                #Checks whether dummy URL and actual URL produce same size response
                #Also uses a size error bound used in determining distance between dummy URL and actual URL and error message recognition
                errorBound = 100
                fakeRespSize = 0
                respSize = 0

                if authed:
                    if cookie is not None:
                        fakeResp = requests.get(fakeUrl, cookies=cookie)
                    else:
                        fakeResp = requests.get(fakeUrl,
                                                auth=HttpNtlmAuth(
                                                    username, password))
                else:
                    fakeResp = requests.get(fakeUrl)

                fakeRespSize = len(fakeResp.text)

            except requests.HTTPError, e:
                #If it's catching these then Friendly 404s are not being used and it's just f*****g out
                pass

            #Do request with legit url
            if authed:
                if cookie is not None:
                    self.resp = requests.get(url, cookies=cookie)
                else:
                    self.resp = requests.get(url,
                                             auth=HttpNtlmAuth(
                                                 username, password))
            else:
                self.resp = requests.get(url)

            respSize = len(self.resp.text)

            #Determine response type and check whether it's a Friendly 404
            if (self.resp.status_code == 200) and (fakeResp is not None) and (
                    fakeRespSize == respSize or
                (abs(respSize - fakeRespSize) < errorBound)
                    or ERROR1 in self.resp.text or ERROR2 in self.resp.text):
                #This is a Friendly 404s
                out = "[-] [%s][%s][%sb] - %s" % (counter, 'Friendly 404',
                                                  respSize, url.strip())
                self.printer(out, RED)
                counter = counter + 1
            else:
                #These are URLs that are found
                if self.resp.status_code == 200:
                    out = "[+] [%s][%s][%sb] - %s" % (
                        counter, self.resp.status_code, respSize, url.strip())
                    self.printer(out, GREEN)
                    foundURLs.append(url)
                    if downloadFiles:
                        self.fileDownloader(url)
                if self.resp.status_code == 400:
                    out = "[-] [%s][%s][%sb] - %s" % (
                        counter, self.resp.status_code, respSize, url.strip())
                    self.printer(out, RED)
                if self.resp.status_code == 404:
                    out = "[-] [%s][%s][%sb] - %s" % (
                        counter, self.resp.status_code, respSize, url.strip())
                    self.printer(out, RED)
                if self.resp.status_code == 401 or self.resp.status_code == 403:
                    out = "[-] [%s][%s][%sb] - %s" % (
                        counter, self.resp.status_code, respSize, url.strip())
                    self.printer(out, BLUE)
                if self.resp.status_code == 302:
                    out = "[-] [%s][%s][%sb] - %s" % (
                        counter, self.resp.status_code, respSize, url.strip())
                    self.printer(out, YELLOW)
                if self.resp.status_code == 500:
                    out = "[-] [%s][%s][%sb] - %s" % (
                        counter, self.resp.status_code, respSize, url.strip())
                    self.printer(out, PURPLE)
                counter = counter + 1
def html_main(ad, dest_path):
    # Set output directory path
    #dest_path = r"C:\Users\JamesC\OneDrive - Queensland University of Technology\IFN701Project\Deliverable"     #sys.argv[1]
    # Create a header, question and answer list
    head = ['h1', 'h2', 'h3', 'p']
    ques = []
    ans = []

    # Create a list of excluding keywords
    excwords = ['further', 'useful link', 'intro']

    # Create page list
    pagelinks = []

    # Set site domain
    #ad = "http://d3test13.data3.com.au"        #sys.argv[2]
    # Set the list name
    sharepoint_listname = "Pages"
    # Set Sharepoint API
    url = ad + "/d3process/cs/cas/_api/web/getfolderbyserverrelativeurl('/d3process/cs/cas/" + sharepoint_listname + "')"
    # Sharepoint authentication
    auth = HttpNtlmAuth("\\[email protected]", "HOWareyou123")
    # Set headers
    headers = {
        "Accept": "application/json; odata=verbose",
        "Content-Type": "application/json; odata=verbose",
        "odata": "verbose",
        "X-RequestForceAuthentication": "true"
    }
    # Get page links
    response = dr.request_connect(url, auth, headers)
    dr.get_links(response, pagelinks, auth, ad)
    #
    # # Get test files
    # #dr.get_testfiles(pagelinks, auth, headers)
    #
    # # Loop through pages
    for page in pagelinks:
        page = page.strip().replace(' ', '%20')
        # Get html content
        html = requests.get(page, auth=auth)
        # Convert the html content into a beautiful soup object
        soup = bs(html.text, "lxml")

        #### Soup on local files
        #dir_path = r"C:\Users\JamesC\OneDrive - Queensland University of Technology\IFN701Project\Deliverable\Code\html"
        #t = ['WHS Contacts', 'Reporting a Hazard or Incident', 'WHS Roles and Responsibilities', 'HSE Services']
        # ['Office Ergonomics', 'Hazardous Chemicals', 'Office Ergonomics for Mobile Workers', 'Office Ergonomics for Tall Workers', 'WHS Contacts', 'Reporting a Hazard or Incident', 'WHS Roles and Responsibilities', 'HSE Services', 'Sit-Stand Workstations']
        # for t_elem in t:
        #     soup = bs(open(dir_path+"\\"+t_elem+".html", encoding='UTF-8').read(), "html.parser")
        #     page = "https://staffnet13.data3.com.au/processes/cs/cas/Pages/WHS%20Roles%20and%20Responsibilities.aspx"
        #     ####

        # Get page title
        pageTitle = find_title(soup)

        # Narrow down html to main contents
        soup2 = soup.find(
            'div', {
                "id":
                "ctl00_PlaceHolderMain_PageContent__ControlWrapper_RichHtmlField"
            })

        # Enable the style reproductions
        style_repro(soup2)

        # Create global page answer
        pg_answer = ""

        ####  Extract Q&A through heading structure
        pg_answer = getQA(soup2, head, pageTitle, ques, ans, excwords,
                          pg_answer, page)
        # Add global page Q&A
        if len(pg_answer) > 120:
            # Check and process global page answer
            pg_answer = add_pagelink(pg_answer, page)
            # Add global page title and answer into lists
            add_to_list("", pg_answer, ques, ans, pageTitle)

        #print("Page " + page + " processed......")

    #  Q&A to tsv file
    file = output_df(ques, ans, dest_path)
    # Set another path for post-processing
    new = dest_path + "\\" + strftime("%Y%m%d%H%M%S",
                                      time.localtime()) + "HTML.tsv"
    # Execute post-processing
    post_proce(file, new)
Beispiel #4
0
    def _check(self, instance):
        addr, ntlm_domain, username, password, client_cert, client_key, method, data, http_response_status_code, \
            timeout, include_content, headers, response_time, content_match, reverse_content_match, tags, \
            disable_ssl_validation, ssl_expire, instance_ca_certs, weakcipher, check_hostname, ignore_ssl_warning, \
            skip_proxy, allow_redirects = self._load_conf(instance)

        start = time.time()

        def send_status_up(logMsg):
            self.log.debug(logMsg)
            service_checks.append((
                self.SC_STATUS, Status.UP, "UP"
            ))

        def send_status_down(loginfo, message):
            self.log.info(loginfo)
            if include_content:
                message += '\nContent: {}'.format(content[:CONTENT_LENGTH])
            service_checks.append((
                self.SC_STATUS,
                Status.DOWN,
                message
            ))

        service_checks = []
        try:
            parsed_uri = urlparse(addr)
            self.log.debug("Connecting to {}".format(addr))

            suppress_warning = False
            if disable_ssl_validation and parsed_uri.scheme == "https":
                explicit_validation = 'disable_ssl_validation' in instance
                if ignore_ssl_warning:
                    if explicit_validation:
                        suppress_warning = True
                else:
                    # Log if we're skipping SSL validation for HTTPS URLs
                    if explicit_validation:
                        self.log.debug("Skipping SSL certificate validation for {} based on configuration".format(addr))

                    # Emit a warning if disable_ssl_validation is not explicitly set and we're not ignoring warnings
                    else:
                        self.warning("Parameter disable_ssl_validation for {} is not explicitly set, "
                                     "defaults to true".format(addr))

            instance_proxy = self.get_instance_proxy(instance, addr)
            self.log.debug("Proxies used for {} - {}".format(addr, instance_proxy))

            auth = None
            if password is not None:
                if username is not None:
                    auth = (username, password)
                elif ntlm_domain is not None:
                    auth = HttpNtlmAuth(ntlm_domain, password)

            sess = requests.Session()
            sess.trust_env = False
            if weakcipher:
                base_addr = '{uri.scheme}://{uri.netloc}/'.format(uri=parsed_uri)
                sess.mount(base_addr, WeakCiphersAdapter())
                self.log.debug("Weak Ciphers will be used for {}. Supported Cipherlist: {}".format(
                               base_addr, WeakCiphersHTTPSConnection.SUPPORTED_CIPHERS))

            with warnings.catch_warnings():
                # Suppress warnings from urllib3 only if disable_ssl_validation is explicitly set to True
                #  and ignore_ssl_warning is True
                if suppress_warning:
                    warnings.simplefilter('ignore', InsecureRequestWarning)

                r = sess.request(method.upper(), addr, auth=auth, timeout=timeout, headers=headers,
                                 proxies=instance_proxy, allow_redirects=allow_redirects,
                                 verify=False if disable_ssl_validation else instance_ca_certs,
                                 json=data if method.upper() in DATA_METHODS and isinstance(data, dict) else None,
                                 data=data if method.upper() in DATA_METHODS and isinstance(data, basestring) else None,
                                 cert=(client_cert, client_key) if client_cert and client_key else None)

        except (socket.timeout, requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e:
            length = int((time.time() - start) * 1000)
            self.log.info("{} is DOWN, error: {}. Connection failed after {} ms".format(addr, str(e), length))
            service_checks.append((
                self.SC_STATUS,
                Status.DOWN,
                "{}. Connection failed after {} ms".format(str(e), length)
            ))

        except socket.error as e:
            length = int((time.time() - start) * 1000)
            self.log.info("{} is DOWN, error: {}. Connection failed after {} ms".format(addr, repr(e), length))
            service_checks.append((
                self.SC_STATUS,
                Status.DOWN,
                "Socket error: {}. Connection failed after {} ms".format(repr(e), length)
            ))

        except Exception as e:
            length = int((time.time() - start) * 1000)
            self.log.error("Unhandled exception {}. Connection failed after {} ms".format(str(e), length))
            raise

        # Store tags in a temporary list so that we don't modify the global tags data structure
        tags_list = list(tags)
        # Only add the URL tag if it's not already present
        if not filter(re.compile('^url:').match, tags_list):
            tags_list.append('url:{}'.format(addr))

        # Only report this metric if the site is not down
        if response_time and not service_checks:
            # Stop the timer as early as possible
            running_time = time.time() - start
            self.gauge('network.http.response_time', running_time, tags=tags_list)

        # Check HTTP response status code
        if not (service_checks or re.match(http_response_status_code, str(r.status_code))):
            if http_response_status_code == DEFAULT_EXPECTED_CODE:
                expected_code = "1xx or 2xx or 3xx"
            else:
                expected_code = http_response_status_code

            message = "Incorrect HTTP return code for url {}. Expected {}, got {}.".format(
                      addr, expected_code, str(r.status_code))

            if include_content:
                message += '\nContent: {}'.format(r.content[:CONTENT_LENGTH])

            self.log.info(message)

            service_checks.append((self.SC_STATUS, Status.DOWN, message))

        if not service_checks:
            # Host is UP
            # Check content matching is set
            if content_match:
                # r.text is the response content decoded by `requests`, of type `unicode`
                content = r.text if type(content_match) is unicode else r.content
                if re.search(content_match, content, re.UNICODE):
                    if reverse_content_match:
                        send_status_down('{} is found in return content with the reverse_content_match option'
                                         .format(content_match),
                                         'Content "{}" found in response with the reverse_content_match'
                                         .format(content_match))
                    else:
                        send_status_up("{} is found in return content".format(content_match))

                else:
                    if reverse_content_match:
                        send_status_up("{} is not found in return content with the reverse_content_match option"
                                       .format(content_match))
                    else:
                        send_status_down("{} is not found in return content".format(content_match),
                                         'Content "{}" not found in response.'.format(content_match))

            else:
                send_status_up("{} is UP".format(addr))

        # Report status metrics as well
        if service_checks:
            can_status = 1 if service_checks[0][1] == "UP" else 0
            self.gauge('network.http.can_connect', can_status, tags=tags_list)

            # cant_connect is useful for top lists
            cant_status = 0 if service_checks[0][1] == "UP" else 1
            self.gauge('network.http.cant_connect', cant_status, tags=tags_list)

        if ssl_expire and parsed_uri.scheme == "https":
            status, days_left, seconds_left, msg = self.check_cert_expiration(instance, timeout, instance_ca_certs,
                                                                              check_hostname, client_cert, client_key)
            tags_list = list(tags)
            tags_list.append('url:{}'.format(addr))
            self.gauge('http.ssl.days_left', days_left, tags=tags_list)
            self.gauge('http.ssl.seconds_left', seconds_left, tags=tags_list)

            service_checks.append((self.SC_SSL_CERT, status, msg))

        return service_checks
Beispiel #5
0
    def makeNtlmAuth(self, username, password):
        logger.debug('[+] Make HttpNtlmAuth %s' % username)
        auth = HttpNtlmAuth('%s\\%s' % (self.domain, username), password,
                            self.session)

        self.auth = auth
Beispiel #6
0
    db = sqlite.connect("test.db")
    cur = db.cursor()

    # urllib3.disable_warnings()
    # http = urllib3.PoolManager()
    # urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
    # urllib3.disable_warnings(urllib3.exceptions.HTTPError)
    # ua   = "Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.27 Safari/537.17"
    # cert = False
    # http.verify = cert
    # http.headers.update({"User-Agent": ua})

    user = "******"
    pw = "But#51m5"
    http = requests.Session()
    http.auth = HttpNtlmAuth(user, pw)

    done = False
    while not done:
        bov = PSBuildsOfVersion(version.regime()).getAllBuilds()
        #print("{}: {}".format(version.full(), bov))

        for b in bov:
            ver_id = insertVersion(db, (b.major() + b.minor(), b.buildn()))

            cs = b.getChangesets()
            for c in cs:
                # set name
                date = c.date.strftime("%d/%m/%y")
                time = c.date.strftime("%H:%M:%S")
                name = "{}_{}".format(c.date.strftime("%y_%m_%d_%H_%M_%S"),
Beispiel #7
0
import pyodbc as odbc
import requests as R
from requests_ntlm import HttpNtlmAuth
import time

T = 0
I = 0

url = 'http://msdo.maxus.ru/api/p_doubles_from_trash.html'

s = R.Session()
s.auth = HttpNtlmAuth('PRIM\\Kalyuzhnyy.Aleksey', '129шывфАа')
s.headers.update({'Content-Type': 'application/json; charset=utf-8'})


def progress(count):
    percents = float("{0:.1f}".format(100.0 * count / float(T)))
    print(str(percents) + '%')


def sendRequest(data=None):
    s.params = data
    try:
        req = s.post(url)
        print(req.content.decode())
    except Exception as e:
        print(str(e))


def extractChilds(id):
    Q = '''
    ca_cn = '''+CA COMMON NAME HERE+'''
    chal_url = f'http://{ca_ip}/certsrv/mscep_admin/mscep.dll'
    cert_url = f'http://{ca_ip}/certsrv/mscep/mscep.dll'
    cert_country = '''+2-DIG COUNTRY FOR CERT HERE+'''
    cert_state = '''+STATE FOR CERT HERE+'''
    cert_city = '''+CITY FOR CERT HERE+'''
    cert_org = '''+ORGANIZATION FOR CERT HERE+'''
    cert_email = '''+EMAIL FOR CERT HERE+'''
if True:  # define find if string contains numbers function

    def hasNumbers(x):
        return any(char.isdigit() for char in x)


if True:  # get challenge password
    req = requests.get(chal_url, auth=HttpNtlmAuth(ca_username, ca_password))
    html = req.content
    data = BeautifulSoup(html, 'html.parser')
    readable_data = data.prettify()
    readable_lines = readable_data.splitlines()
    readable_lines = [i.strip(' ') for i in readable_lines]
    values = []

    for i in readable_lines:
        if hasNumbers(i) is True:
            values.append(i)

    for i in values:
        if '=' in i:
            values.pop(values.index(i))
        elif 'password' in i:
    def create_ntlm_session(self,
                            alias,
                            url,
                            auth,
                            headers={},
                            cookies={},
                            timeout=None,
                            proxies=None,
                            verify=False,
                            debug=0,
                            max_retries=3,
                            backoff_factor=0.10,
                            disable_warnings=0,
                            retry_status_list=[],
                            retry_method_list=DEFAULT_RETRY_METHOD_LIST):
        """ Create Session: create a HTTP session to a server

        ``url`` Base url of the server

        ``alias`` Robot Framework alias to identify the session

        ``headers`` Dictionary of default headers

        ``cookies`` Dictionary of cookies

        ``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication

        ``timeout`` Connection timeout

        ``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication

        ``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
                 Defaults to False.

        ``debug`` Enable http verbosity option more information
                https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel

        ``max_retries`` Number of maximum retries each connection should attempt.
                        By default it will retry 3 times in case of connection errors only.
                        A 0 value will disable any kind of retries regardless of other retry settings.
                        In case the number of retries is reached a retry exception is raised.

        ``disable_warnings`` Disable requests warning useful when you have large number of testcases

        ``backoff_factor`` Introduces a delay time between retries that is longer after each retry.
                           eg. if backoff_factor is set to 0.1
                           the sleep between attemps will be: 0.0, 0.2, 0.4
                           More info here: https://urllib3.readthedocs.io/en/latest/reference/urllib3.util.html

        ``retry_method_list`` List of uppercased HTTP method verbs where retries are allowed.
                              By default retries are allowed only on HTTP requests methods that are considered to be
                              idempotent (multiple requests with the same parameters end with the same state).
                              eg. set to ['POST', 'GET'] to retry only those kind of requests.

        ``retry_status_list`` List of integer HTTP status codes that, if returned, a retry is attempted.
                              eg. set to [502, 503] to retry requests if those status are returned.
                              Note that max_retries must be greater than 0.
        """
        if not HttpNtlmAuth:
            raise AssertionError('Requests NTLM module not loaded')
        elif len(auth) != 3:
            raise AssertionError('Incorrect number of authentication arguments'
                                 ' - expected 3, got {}'.format(len(auth)))
        else:
            ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]),
                                     auth[2])
            logger.info('Creating NTLM Session using : alias=%s, url=%s, \
                        headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \
                        proxies=%s, verify=%s, debug=%s ' %
                        (alias, url, headers, cookies, ntlm_auth, timeout,
                         proxies, verify, debug))

            return self._create_session(alias=alias,
                                        url=url,
                                        headers=headers,
                                        cookies=cookies,
                                        auth=ntlm_auth,
                                        timeout=timeout,
                                        max_retries=max_retries,
                                        backoff_factor=backoff_factor,
                                        proxies=proxies,
                                        verify=verify,
                                        debug=debug,
                                        disable_warnings=disable_warnings,
                                        retry_status_list=retry_status_list,
                                        retry_method_list=retry_method_list)
Beispiel #10
0
 def create_windows_authentication_param(self):
     auth = None
     if self._username:
         # if username and password were added - use ntlm authentication
         auth = HttpNtlmAuth(self._username, self._password)
     return auth
Beispiel #11
0
def index():

    if request.method=="GET":

        try:

            # path = request.json
            # path = os.path.normpath(path.get("path"))
            #
            # os.makedirs(path)
            # return json.dumps({"msg": f"{path}"})


            fmt_dict = {
                "C2C": ('codedValue', 'codedValue', "Default for zero-fill"),
                "T2N": ('crfDecode', 'crfCodedValue', "CRF text to CRF code"),
                "N2P": ('crfCodedValue', 'codedValue', "CRF code to CDISC preferred term"),
                "P2N": ('codedValue', 'crfCodedValue', "CDISC preferred term to CRF code"),
                "T2P": ('crfDecode', 'codedValue', "CRF text to CDISC preferred term"),
                "P2T": ('codedValue', 'crfDecode', "CDISC preferred term to CRF text"),
                "P2S": ('codedValue', 'stdCrfCodedValue', "CDISC preferred term to TLF code"),
                "P2C": ('codedValue', 'stdCrfDecode', "CDISC preferred term to TLF text"),
                "S2C": ('stdCrfCodedValue', 'stdCrfDecode', "TLF code to TLF text"),
                "C2S": ('stdCrfDecode', 'stdCrfCodedValue', "TLF text to TLF code")}


            r=requests.get("http://sgcandidapp1/Candid/api/v1/1/analysis/1000/controlledterms",auth=HttpNtlmAuth("seagen.com\sas_test","Welcome1"))
            data=json.loads(r.text)

            sasfile=os.path.normpath("\\\\sgsasfsv1\\biometrics\\projects_dev\\junk\\formats.sas")


            format_catalog=[]
            for i in data:
                # Create a new format
                fmt=Fmt(i['sasFormatName'],i['description'],i['isExtensible'])

                ct = i.get('controlledTermItems')

                if ct.__len__() > 0:
                        # crfDecode,crfCodedValue,codedValue,stdCrfCodedValue,stdCrfDecode
                    for k, v in fmt_dict.items():
                        for j in ct:
                            a=j[v[0]]
                            b= j[v[1]]
                            if j[v[0]] and j[v[1]]:
                                fmt.category=k
                                fmt.category_desc=v[2]
                                fmt.add_members(j[v[0]],j[v[1]])

                    if fmt.dictvals.__len__()>0:
                        format_catalog.append(fmt)

                        is_num=all(isinstance(item[0], int) for item in fmt.dictvals)
                        is_inf=all(isinstance(item[1], int) for item in fmt.dictvals)

                        # Identify invalue/value of char or num type
                        if not is_inf:
                            fmt.fmt_value=Format_val.I
                        else:
                            fmt.fmt_value=Format_val.V
                        if not is_num:
                            fmt.fmt_type=Format_type.C
                        else:
                            fmt.fmt_type=Format_type.N


            Fmt.write_catalog(format_catalog,sasfile)

            return ("1")

        except Exception as e:
            return ("0")
user = '******'  # or '.\user'
password = '******'

http_proxy = 'http://127.0.0.1:8080'
proxy_dict = {'http': http_proxy, 'https': http_proxy}

retries = Retry(
    total=10,
    read=5,
    connect=6,
    #these options don't seem to help
    #backoff_factor=1,
    #method_whitelist=(['HEAD', 'TRACE', 'GET', 'POST', 'CONNECT', 'OPTIONS', 'DELETE']),
    #status_forcelist=[500, 502, 503, 504, 403, 407]
)

session = requests.Session()
session.verify = False
session.mount('http://', HTTPAdapter(max_retries=retries))
session.mount('https://', HTTPAdapter(max_retries=retries))

session.proxies = proxy_dict
session.auth = HttpNtlmAuth(user, password)

#this works: HTTP GET
r = session.get('http://neverssl.com/')
print(r.text)

#HTTPS CONNECT fails!
r2 = session.get('https://www.google.com')
print(r2.text)
def get_json_response(test_case_id, test_case_rev):
    json_URL = get_json_URL(test_case_id, test_case_rev)
    r = requests.get('%s' % json_URL,
                     auth=HttpNtlmAuth('halamerica\\' + LOGIN, PASSWORD))
    json_response = r.content.decode('utf-8')
    return json_response