Пример #1
0
 def __init__(self, hunter):
     post_data.update({
         'fulltext': hunter['keyword'],
         'exparea': hunter['area']
     })
     self.post_data = post_data
     self.case_id = hunter['case_id']
     self.username = hunter['username']
     filename = generate_filename_by_username(self.username)
     if not os.path.exists(filename):
         get_cookies()
     self.cookie = load_cookies(filename)
Пример #2
0
def test_status():
    cookies = get_cookies()
    print(cookies)
    response = requests.get("{}/{}".format(BASE_URI, "status"), cookies=cookies)
    assert response.status_code == 200
    body = response.json()
    assert body.get("status") == "ok"
Пример #3
0
Файл: ta.py Проект: 3omer/ta
async def submit(request):
    if is_json_client(request):
        try:
            b = await request.json()
        except Exception as e:
            return JSONResponse({"message": "Empty or malformed Json"}, 400)
        data, errors = RequestFields.validate_or_error(b)

        if errors:
            return JSONResponse(dict(errors), 400)

        pin_calculation = PinBlock(b.get("pin"), b.get("pan"), b.get("twk"),
                                   b.get("tmk"))
        pin = pin_calculation.encrypted_pin_block()
        return JSONResponse({"pin_block": pin}, 200)

    form = await request.form()
    data, errors = RequestFields.validate_or_error(form)
    print(errors)
    if errors:
        form = forms.Form(RequestFields, values=data, errors=errors)
        context = {"request": request, "form": form}
        return templates.TemplateResponse("index.html", context)

    pin_calculation = PinBlock(form.get("pin"), form.get("pan"),
                               form.get("twk"), form.get("tmk"))
    pin = pin_calculation.encrypted_pin_block()

    id = get_cookies(request)

    context = {"request": request, "form": form, "pin": pin}
    return templates.TemplateResponse("success.html", context)
Пример #4
0
def test_post_vcfs_success():
    def convert_cookies(cookies):
        return ["{}={}".format(key, value) for key, value in cookies.items()]

    url = "{}/{}".format(BASE_URI, "vcfs")
    cookies = convert_cookies(get_cookies())
    headers = 'content-type: multipart/form-data'
    file = "{}/{}".format(RESOURCE_FOLDER, "test_vcf_file.vcf")
    command = "curl -w \"%{{http_code}}\" -X POST -b {} '{}?db_name={}' -H \'{}\' -F vcf_file=@{}".format(cookies[0],
                                                                                                          url,
                                                                                                          DB_NAME,
                                                                                                          headers,
                                                                                                          file)
    out, err = run_command(command)
    assert int(out) == 200, {"OUT: ": out, "ERROR: ": err, "COMMAND: ": command}
Пример #5
0
def handle_logout(request):
    logger.info("Starting handle_logout")
    cookies = get_cookies(request["headers"])
    # check for refresh token, which we need to call logout
    if CONFIG["REFRESH_COOKIE"] in cookies:
        logger.info("Posting to logout URL")
        resp = post_to_url(url=wkc_data["end_session_endpoint"],
                           client_id=CONFIG["CLIENT_ID"],
                           client_secret=CONFIG["CLIENT_SECRET"],
                           refresh_token=cookies[CONFIG["REFRESH_COOKIE"]])
        logger.info("Back from post to logout URL, resp={r}".format(r=resp))
    # unset cookies
    r = redirect("/")
    cookies[CONFIG["AUTH_COOKIE"]] = ""
    cookies[CONFIG["REFRESH_COOKIE"]] = ""
    r = set_cookies(response=r, cookies=cookies)
    logger.info("Returning response to client")
    return r
Пример #6
0
def get_sub_expiry() -> int:
    def is_sub_cookie(cookie):
        return cookie['name'] == 'SUB'
    sub_cookie = list(filter(is_sub_cookie, get_cookies()))[0]
    return sub_cookie['expiry']
Пример #7
0
def get_cookie() -> str:
    def build_pair(cookie):
        return '{}={}'.format(cookie['name'], cookie['value'])

    cookie_pairs = [build_pair(cookie) for cookie in get_cookies()]
    return "; ".join(cookie_pairs)
Пример #8
0
def scrape(division=False, csv_file=None, newcol=False):
    csv_fname = generate_filename('MAXPREPS') + '.csv'
    cookies = get_cookies()
    s = requests.Session()

    done_text = 'We do not have rankings for this selection, please try another search'

    if not division:
        pagination = True
        cur_page = 1
        while pagination:
            soup = request(s, RANKINGS_URL.format(cur_page), cookies)
            if soup:
                if done_text in str(soup):
                    break
                else:
                    print(
                        '-----------------------Page {}-----------------------'
                        .format(cur_page))
                    cur_page = cur_page + 1

                    try:
                        trs = soup.select('table tr')[1:]
                    except:
                        trs = []

                    for tr in trs:
                        rp = RowParser(tr)
                        name = rp.get_name()
                        rating = rp.get_rating()
                        strength = rp.get_strength()
                        p_url = rp.get_profle_url()
                        s_num = rp.get_search_num()

                        prof_soup = request(s, p_url, cookies)
                        if soup:
                            pp = ProfileParser(s, cookies, prof_soup, s_num,
                                               name, rating, strength, p_url)
                            data = pp.get_data()
                            write_csv(csv_fname, data)
                            print('[+] Done >> {}'.format(name))
    else:
        # this is just for added state class/ranks cols
        if newcol:
            with open(csv_file, 'r') as csvfile:
                reader = csv.reader(csvfile)
                for row in reader:
                    name = row[5]
                    url = row[4]
                    soup = request(s, url, cookies)
                    if soup:
                        fp = FootballProfile(name, soup)
                        data = [[
                            row[0], row[1], row[2], row[3], row[4], row[5],
                            row[6], row[7], row[8], row[9], row[10], row[11],
                            row[12], row[13], row[14], row[15], row[16],
                            row[17], row[18],
                            fp.state_class_division_name(),
                            fp.state_division_rank()
                        ]]
                        write_csv('__' + csv_file, data)
                        print('[+] Done >> {}'.format(name))
        else:
            for div in DIVISIONS:
                print(
                    '-----------------------DIVISION: {}-----------------------'
                    .format(div))
                state_soup = request(s, RANKINGS_BY_DIVISION.format(1, div),
                                     cookies)
                if state_soup:
                    state_class = get_state_class(state_soup)
                    if state_class:
                        for sc in state_class:
                            clas_url = sc[1]
                            clas_name = sc[0]
                            print(
                                '-----------------------STATE CLASS: {}-----------------------'
                                .format(clas_name))

                            pagination = True
                            cur_page = 1
                            while pagination:
                                # '/rankings/football-fall-16/{}/division/fl/asZCeSbCLkCIkW5UXU_cOw/division-8a.htm'
                                url = re.sub(r'/[0-9]{1,3}/', '/{}/', clas_url)
                                soup = request(s, url.format(cur_page),
                                               cookies)
                                if soup:
                                    if done_text in str(soup):
                                        pagination = False
                                    else:
                                        print(
                                            '-----------------------Page {}-----------------------'
                                            .format(cur_page))
                                        cur_page = cur_page + 1

                                        try:
                                            trs = soup.select('table tr')[1:]
                                        except:
                                            trs = []

                                        for tr in trs:
                                            rp = RowParser(tr)
                                            name = rp.get_name()
                                            rating = rp.get_rating()
                                            strength = rp.get_strength()
                                            p_url = rp.get_profle_url()
                                            s_num = rp.get_search_num()

                                            prof_soup = request(
                                                s, p_url, cookies)
                                            if soup:
                                                pp = ProfileParser(
                                                    s, cookies, prof_soup,
                                                    s_num, name, rating,
                                                    strength, p_url)
                                                data = pp.get_data()
                                                write_csv(csv_fname, data)
                                                print('[+] Done >> {}'.format(
                                                    name))
                    else:
                        pagination = True
                        cur_page = 1
                        while pagination:
                            soup = request(
                                s, RANKINGS_BY_DIVISION.format(cur_page, div),
                                cookies)
                            if soup:
                                if done_text in str(soup):
                                    pagination = False
                                else:
                                    print(
                                        '-----------------------Page {}-----------------------'
                                        .format(cur_page))
                                    cur_page = cur_page + 1

                                    try:
                                        trs = soup.select('table tr')[1:]
                                    except:
                                        trs = []

                                    for tr in trs:
                                        rp = RowParser(tr)
                                        name = rp.get_name()
                                        rating = rp.get_rating()
                                        strength = rp.get_strength()
                                        p_url = rp.get_profle_url()
                                        s_num = rp.get_search_num()

                                        prof_soup = request(s, p_url, cookies)
                                        if soup:
                                            pp = ProfileParser(
                                                s, cookies, prof_soup, s_num,
                                                name, rating, strength, p_url)
                                            data = pp.get_data()
                                            write_csv(csv_fname, data)
                                            print(
                                                '[+] Done >> {}'.format(name))
Пример #9
0
    tsdm_cookies = utils.read_cookies()
    browser.get(sign_page)
    browser.delete_all_cookies()
    print("wait login page...")
    time.sleep(10)
    for cookie in tsdm_cookies:
        browser.add_cookie({
            "domain": ".tsdm.live",
            "name": cookie,
            "value": tsdm_cookies[cookie],
            "path": '/',
            "expires": None
        })
    browser.get(sign_page)
    if browser.current_url == sign_page:
        break
    else:
        utils.get_cookies()
browser.maximize_window()
browser.find_element_by_xpath('/html/body/div[6]/div[2]/div/div[1]/div[1]/form/table[1]/tbody/tr/td/ul/li[1]/center/a') \
    .click()
pages = browser.window_handles
browser.switch_to.window(pages[0])
browser.find_element_by_xpath('/html/body/div[6]/div[2]/div/div[1]/div[1]/form/table[2]/tbody/tr[2]/td[1]/input') \
    .send_keys(content)
browser.find_element_by_xpath('/html/body/div[6]/div[2]/div/div[1]/div[1]/form/table[1]/tbody/tr/td/div/a[1]') \
    .click()
print('no signature waiting...')
time.sleep(4)
browser.quit()
Пример #10
0
def download_gallery(site):
    start = time.time()
    # for offensive warning
    need_cookies = False
    cookies = None
    html = utils.get_html(site)
    if not html:
        print('Failed to retrieve gallery page, process will be aborted!')
        return
    if utils.is_warning_page(html):
        print('Page has offensive content, setting cookies to get around it')
        need_cookies = True
        cookies = utils.get_cookies(site)
        html = utils.get_html_with_cookies(site, cookies)
    metadata = get_gallery_metadata(html)
    urls = get_page_urls(html)
    sections = metadata["Length"].split()
    total_images = int(sections[0]) if sections else 0
    title = metadata["Title"]
    print('Below is the informaiton of the gallery...')
    print_metadata(metadata)
    print('Start downloading...')
    title = title.replace('/', ' of ')
    if not utils.create_dir(title):
        return
    if total_images:
        utils.print_progress(0, total_images)
    else:
        print(
            "Failed to get total number of images, progress bar is disabled!")
    i = 0
    img_fails = []
    gallery_page_fails = []
    img_page_fails = []

    #download images in each gallery page
    for url in urls:
        page_html = utils.get_html_with_cookies(
            url, cookies) if need_cookies else utils.get_html(url)
        if not page_html:
            gallery_page_fails.append(url)
            continue
        image_urls = get_image_urls(page_html)
        for image_url in image_urls:
            image_page_html = utils.get_html(image_url)
            if not image_page_html:
                img_page_fails.append(image_url)
                continue
            image_src = get_image_src(image_page_html)
            parts = image_src.split('.')
            extension = (
                '.' + parts[-1] if parts[-1] else '.jpg') if parts else '.jpg'
            file_name = get_file_name(total_images, i + 1) + extension
            file_path = title + '/' + file_name
            if not os.path.exists(file_path):
                if not utils.get_image(image_src, file_path):
                    img_fails.append(file_name)
            i += 1
            if total_images:
                utils.print_progress(i, total_images)

    #downloading result
    succeed = True
    if gallery_page_fails or img_page_fails:
        succeed = False
        print('Failed to load following pages:')
        for url in gallery_page_urls:
            print(url)
        for url in img_page_fails:
            print(url)
    if img_fails:
        succeed = False
        print('Failed to download following %s files...' % len(img_fails))
        for img in img_fails:
            print(img)
    if succeed:
        print('All files are downloaded successfully!')
    end = time.time()
    hours, rem = divmod(end - start, 3600)
    minutes, seconds = divmod(rem, 60)
    print("Total time elapsed {:0>2}m:{:02.0f}s".format(
        int(hours) * 60 + int(minutes), seconds))
Пример #11
0
def check_session(request):
    logger.info("Starting check_session")
    cookies = get_cookies(request["headers"])
    # update aurl with state data
    source_url = request["uri"]
    if request["querystring"]:
        source_url = source_url + "?{qs}".format(qs=request["querystring"])
    logger.info("Determined source_url is {s}".format(s=source_url))
    source_url_secret = CONFIG["STATE_SECRET"] + source_url
    state = {
        "source_url": source_url,
        "hash": sha256(source_url_secret.encode("utf-8")).hexdigest(),
        "nonce": get_rand_string(10)
    }
    state = base64.b64encode(json.dumps(state).encode("utf-8")).decode("utf-8")
    logger.info("Encoded state: {state}".format(state=state))
    aurl_with_state = "{aurl}&state={state}".format(aurl=aurl, state=state)
    # check for auth token
    if CONFIG["AUTH_COOKIE"] in cookies:
        logger.info("Got access token")
        # try to validate the token
        try:
            decoded = validate_jwt(api=CONFIG["VAL_API_URL"],
                                   token=cookies[CONFIG["AUTH_COOKIE"]],
                                   key_set=keys,
                                   aud=CONFIG["CLIENT_ID"])
            logger.info("Access token is valid")
            return request
        except ExpiredSignatureError as e:
            # token is not valid
            logger.info(
                "Access token has expired, so going to attempt to refresh")
            if CONFIG["REFRESH_COOKIE"] in cookies:
                # we have a refresh token
                logger.info("Refreshing token")
                resp = post_to_url(
                    url=wkc_data["token_endpoint"],
                    grant_type="refresh_token",
                    client_id=CONFIG["CLIENT_ID"],
                    client_secret=CONFIG["CLIENT_SECRET"],
                    refresh_token=cookies[CONFIG["REFRESH_COOKIE"]])
                logger.info("Called to refresh token")
                resp = json.loads(resp)
                if "error" in resp:
                    logger.info(
                        "There was an error refreshing the token, so need to log in again"
                    )
                    return redirect(aurl)
                else:
                    access_token = resp["access_token"]
                    logger.info("Got new access token, returning to client")
                    r = redirect("/")
                    cookies[CONFIG["AUTH_COOKIE"]] = access_token
                    r = set_cookies(response=r,
                                    cookies=cookies,
                                    max_age=CONFIG.get("MAX_AGE", "10"))
                    return r
            else:
                # return a 302 redirect as we don't have a refresh token
                logger.info(
                    "No refresh token present, so need to log in again")
                return redirect(aurl_with_state)
    else:
        logger.info("No access token present, so need to log in")
        return redirect(aurl_with_state)
Пример #12
0
def analyse_cookies(host):
    cookies = utils.get_cookies(host)
    return render_template('analyse_cookies.html', cookies=cookies, host=host)
Пример #13
0
def _request_getAnnotatedVariants(data):
    url = "{}/getAnnotatedVariants".format(BASE_URI)
    return requests.post(url=url, cookies=get_cookies(), json=data, headers={"Content-type": "application/json"})
Пример #14
0
def analyse_cookies(host):
    cookies = utils.get_cookies(host)
    return render_template('analyse_cookies.html', cookies=cookies, host=host)