Пример #1
0
def get_downloaded_source(url, https=None, root=DOWNLOADS_PATH):
    if not isdir(root):
        makedirs(root)
    if https:
        path = url[8:]
    else:
        path = url[7:]
    s = split_posix(path)
    if len(s) > 1:
        p = join(root, *s[:-1])
        if not isdir(p):
            makedirs(p)
        p = join(p, s[-1])
    else:
        p = join(root, s[0])
    if isfile(p):
        return [p]
    print "=> Downloading:", url
    r = get_url(url)
    if r.status_code != 200:
        exit("Couldn't download %s (Got %d)" % (url, r.status_code))
    print "=> Saving to:", p
    f = open(p, 'wb')
    f.write(r.content)
    f.close()
    return [p]
Пример #2
0
def check_update(dlg: QProgressDialog) -> str:
    """Check for update."""
    ver_list = [int(v) for v in __version__.split('.') if v.isdigit()]
    logger.info(f"Getting update for \"{__version__}\":")
    m = len(ver_list)
    for i in range(m):
        if i == 0:
            text = "major"
        elif i == 1:
            text = "minor"
        else:
            text = "micro"
        dlg.setLabelText(f"Checking for {text}...")
        QCoreApplication.processEvents()
        if dlg.wasCanceled():
            return ""
        next_ver = ver_list[:m]
        next_ver[i] += 1
        for j in range(i + 1, m):
            next_ver[j] = 0
        if i == 0:
            next_ver[1] = 1
        elif i == 1:
            if next_ver[1] > 12:
                dlg.setValue(i + 1)
                continue
        url = (f"https://github.com/KmolYuan/Pyslvs-UI/releases/tag/"
               f"v{next_ver[0]}.{next_ver[1]:02}.{next_ver[2]}")
        request = get_url(url)
        dlg.setValue(i + 1)
        if request.status_code == 200:
            dlg.setValue(m)
            return url
    return ""
Пример #3
0
def get_downloaded_source(url, https=None, root=DOWNLOADS_PATH):
    if not isdir(root):
        makedirs(root)
    if https:
        path = url[8:]
    else:
        path = url[7:]
    s = split_posix(path)
    if len(s) > 1:
        p = join(root, *s[:-1])
        if not isdir(p):
            makedirs(p)
        p = join(p, s[-1])
    else:
        p = join(root, s[0])
    if isfile(p):
        return [p]
    log.info("Downloading: %s" % url)
    r = get_url(url)
    if r.status_code != 200:
        exit("Couldn't download %s (Got %d)" % (url, r.status_code))
    log.info("Saving to: %s" % p)
    f = open(p, 'wb')
    f.write(r.content)
    f.close()
    return [p]
Пример #4
0
def retourner_bug():
    """Telegram a bug."""
    logging.info("Page de {}".format(APP_BUG))
    if request.method:
        logging.info("Requête reçue:\nForm: {}\nArgs: {}".format(
            str(request.form), str(request.args)))

    if request.method == WEB_POST and request.form[WEB_BOUTON] == WEB_ENVOYER:
        ligneCsv = {
            CSV_NOM: request.form[CSV_NOM],
            CSV_PRENOM: request.form[CSV_PRENOM],
            CSV_DESCRIPTION: request.form[WEB_TEXTE]
        }
        ajouter_bug(ligneCsv)
        message = "Bug rapporté par {} {}\n{}".format(
            ligneCsv[CSV_PRENOM], ligneCsv[CSV_NOM], ligneCsv[CSV_DESCRIPTION])
        TELEGRAM_API_MESSAGE_PAYLOAD["text"] = message
        r = get_url(TELEGRAM_API_URL, params=TELEGRAM_API_MESSAGE_PAYLOAD)
        if r.status_code == 200:
            flash("Telegram envoyé avec succès!")
        else:
            flash("Telegram non envoyé!")
        return redirect(url_for("retourner_accueil"))

    return render_template("{}.html".format(APP_BUG),
                           active=APP_BUG,
                           **APP_PATHS)
Пример #5
0
 def ipinfo(ip):
     # Get info from ipinfo api
     url = 'http://ipinfo.io/' + ip + '/json'
     response = get_url(url)
     try:
         data = response.json()
         return data
     except:
         print('Check your input')
Пример #6
0
def is_last_version_app():
    from requests import get as get_url
    from . import __version__ as current_version

    response = get_url('https://pypi.org/pypi/Redmine-CLI-Tool/json')
    if response.status_code != 200:
        return True
    data = response.json()
    pypi_version = tuple(map(int, str(data['info']['version']).split('.')))
    return current_version >= pypi_version
Пример #7
0
def parse() -> Dict:

    r = get_url(ROOT_URL)
    if r.status_code != 200:
        print('Can not load main page')
        return None

    info = parse_main_page(r.content)
    info = convert(info)
    return info
Пример #8
0
def get_metadata(path, root=ROOT_URL):
    """Get metadata relative to metadata/computeMetadata/v1"""
    HEADERS = {"Metadata-Flavor": "Google"}
    url = f"{root}/{path}"
    try:
        resp = get_url(url, headers=HEADERS)
        resp.raise_for_status()
        return resp.text
    except RequestException:
        log.error(f"Error while getting metadata from {url}")
        return None
Пример #9
0
def parse_main_page(html: str):
    data = list()
    tree = BeautifulSoup(html, "lxml")
    day_tabs = tree.find_all("a", class_="day-tab")
    for day_tab in day_tabs:
        link = ROOT_URL + day_tab.attrs["href"]
        print(link)
        date, _, _ = parse_url(link)
        r = get_url(link)
        if r.status_code != 200:
            print('Can not load page')
            # @TODO: add proper logging
            continue

        page_data = parse_one_day_page(r.content)
        page_data = add_date_to_pagedata(page_data, date)
        data.append(page_data)

    return data
 def get_link(self, url, scrap_func):
     if isinstance(url, str):
         request = get_url(url)
         content = request.content
         self.__urls.extend(scrap_func(content))
Пример #11
0
 def fetch(cls, tx_id, testnet=False, ffrest=False):
     if fresh or (tx_id not in cls.cache):
         url = f"{cls.get_url(test_net)}/tx/{tx_id}.hex"
         response = requests.get_url(url)
Пример #12
0
def download_file_to_path(url, path):
    request = get_url(url, stream=True)
    with open(path, mode='bw') as f:
        for part in request.iter_content(chunk_size=1024):
            if part:
                f.write(part)
Пример #13
0
def is_url(url):
	assert url.scheme == 'http', 'Error: Invalid url...'
	assert url.netloc, 'Error: Invalid url...'
	test = get_url(urlunsplit(url), proxies=PROX)
	assert test.status_code == 200, 'Error: Invalid url...'