Example #1
0
def update_wallpaper():

    # Grab the configuration
    try:
        config = load_config()
    except AssertionError as e:
        return f"Unable to load configuration file. {str(e)}"
    except:
        return "Unable to load configuration file. Unknown error."

    # Grab the latest data
    try:
        data = load_data()
    except AssertionError as e:
        return f"Unable to load data file. {str(e)}"
    except:
        return "Unable to load data file. Unknown error."

    # Get the current time (UTC), create deltas, and parse sunrise/sunset times
    now = dt.datetime.now(dt.timezone.utc)
    dawn_delta = dt.timedelta(minutes = config["dawn_window"])
    dusk_delta = dt.timedelta(minutes = config["dusk_window"])
    sunrise = dup.parse(data["sunrise"])
    sunset = dup.parse(data["sunset"])

    # Pick the correct wallpaper
    wallpaper = ""
    if duu.within_delta(now, sunrise, dawn_delta):
        wallpaper = config["dawn"]
    elif duu.within_delta(now, sunset, dusk_delta):
        wallpaper = config["dusk"]
    elif sunrise < now < sunset:
        wallpaper = config["day"]
    elif now < sunrise or sunset < now:
        wallpaper = config["night"]

    # Make sure a wallpaper is picked
    if wallpaper == "":
        return "Could not change wallpaper. Logical error in time calculations."

    # Change the wallpaper
    subprocess.call(f"gsettings set org.gnome.desktop.background picture-uri file://{wallpaper}", shell = True)
    subprocess.call(f"gsettings set org.gnome.desktop.screensaver picture-uri file://{wallpaper}", shell = True)
    return f"Wallpaper and screensaver changed to {wallpaper}."
Example #2
0
def crawl_ip3366(url):
    base_url = f"{url}&page=%s"
    index = 1
    time_now = datetime.now()
    continue_flag = True
    while continue_flag:
        url = base_url % index
        resp = session.request('get', url)
        sel = Selector(text=resp.data.decode('gb2312'))
        lst = sel.xpath(
            '//table[@class="table table-bordered table-striped"]/tbody/tr')
        if not lst:
            continue_flag = False
        for item in lst:
            ip = item.xpath('./td[1]/text()').extract_first()
            port = item.xpath('./td[2]/text()').extract_first()
            _anonymous = item.xpath('./td[3]/text()').extract_first()
            if _anonymous == '高匿代理IP':
                anonymous = '高匿'
            elif _anonymous == '普通代理IP':
                anonymous = '普匿'
            else:
                anonymous = '透明'
            type = item.xpath('./td[4]/text()').extract_first()
            address = item.xpath('./td[5]/text()').extract_first()
            speed = item.xpath('./td[6]/text()').extract_first()
            _last_check_time = item.xpath('./td[7]/text()').extract_first()
            last_check_time = du_parse(_last_check_time)
            if not within_delta(time_now, last_check_time, timedelta(days=1)):
                continue_flag = False
            print(
                f"""ip: {ip}, port: {port}, address: {address}, type: {type}, speed: {speed}, last_check_time: {last_check_time}"""
            )

            proxies = {
                'http': f'http://{ip}:{port}',
                'https': f'http://{ip}:{port}'
            }
            proxy_info = {
                'ip': ip,
                'port': port,
                'address': address,
                'type': type,
                'anonymous': anonymous,
                'speed': speed,
                'connect_time': None,
                'living_time': None,
                'last_check_time': last_check_time,
                'source': 'ip3366'
            }
            tasks.append(proxies_checker(proxies, proxy_info))

        index += 1
Example #3
0
def crawl_89ip():
    headers = {
        'User-Agent':
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36'
    }
    base_url = 'http://www.89ip.cn/index_%s.html'
    index = 1
    time_now = datetime.now()
    continue_flag = True
    while continue_flag:
        url = base_url % index
        resp = session.request('get', url, headers=headers)
        sel = Selector(text=resp.data.decode('utf8'))
        lst = sel.xpath('//table[@class="layui-table"]/tbody/tr')
        if not lst:
            continue_flag = False
        for item in lst:
            ip = item.xpath('normalize-space(./td[1])').extract_first()
            port = item.xpath('normalize-space(./td[2])').extract_first()
            address = item.xpath('normalize-space(./td[3])').extract_first()
            _last_check_time = item.xpath(
                'normalize-space(./td[5])').extract_first()
            last_check_time = du_parse(_last_check_time)
            if not within_delta(time_now, last_check_time, timedelta(days=1)):
                continue_flag = False
            print(
                f"""ip: {ip}, port: {port}, address: {address}, last_check_time: {last_check_time}"""
            )

            proxies = {
                'http': f'http://{ip}:{port}',
                'https': f'http://{ip}:{port}'
            }
            proxy_info = {
                'ip': ip,
                'port': port,
                'address': address,
                'type': 'HTTP',
                'anonymous': '未知',
                'speed': None,
                'connect_time': None,
                'living_time': None,
                'last_check_time': last_check_time,
                'source': '89ip'
            }
            tasks.append(proxies_checker(proxies, proxy_info))

        index += 1

    loop.run_until_complete(asyncio.wait(tasks))
    conn.commit()
Example #4
0
def qy():
    base_url = 'http://www.qydaili.com/free/?action=china&page=%s'
    index = 1
    time_now = datetime.now()
    continue_flag = True
    while continue_flag:
        url = base_url % index
        resp = session.request('get', url)
        sel = Selector(text=resp.data.decode('utf8'))
        lst = sel.xpath('//div[@class="container"]/table/tbody/tr')
        if not lst:
            continue_flag = False
        for item in lst:
            ip = item.xpath('./td[1]/text()').extract_first()
            port = item.xpath('./td[2]/text()').extract_first()
            _anonymous = item.xpath('./td[3]/text()').extract_first()
            anonymous = _anonymous if _anonymous != '匿名' else '普匿'
            type = item.xpath('./td[4]/text()').extract_first()
            address = item.xpath('./td[5]/text()').extract_first()
            speed = item.xpath('./td[6]/text()').extract_first()
            _last_check_time = item.xpath('./td[7]/text()').extract_first()
            last_check_time = du_parse(_last_check_time)
            if not within_delta(time_now, last_check_time, timedelta(days=1)):
                continue_flag = False
            print(
                f"""ip: {ip}, port: {port}, address: {address}, type: {type}, speed: {speed}, last_check_time: {last_check_time}"""
            )

            proxies = {
                'http': f'http://{ip}:{port}',
                'https': f'http://{ip}:{port}'
            }
            proxy_info = {
                'ip': ip,
                'port': port,
                'address': address,
                'type': type,
                'anonymous': anonymous,
                'speed': speed,
                'connect_time': None,
                'living_time': None,
                'last_check_time': last_check_time,
                'source': 'qiyun'
            }
            tasks.append(proxies_checker(proxies, proxy_info))
        index += 1

    loop.run_until_complete(asyncio.wait(tasks))
    conn.commit()
Example #5
0
def get_recent_entries():
    for entry in data:
        if dutil.within_delta(dutil.datetime.now(tz=tz.tzlocal()),
                              entry["end"], timedelta(hours=24 * 4)):
            time = f"{entry['interval'].hours}h {entry['interval'].minutes}m"
            desc = entry.get("annotation", "")
            logged = "✓" if "logged" in entry["tags"] else "✘"
            range_start = entry['start'].strftime(TIME_FORMAT)
            range_end = entry['end'].strftime(TIME_FORMAT)
            if len(desc) >= 20:
                desc = desc[:20] + "..."
            yield [
                entry["id"],
                entry["start"].strftime(DATE_FORMAT),
                entry["tags"][2],
                time,
                f"{range_start}-{range_end}",
                logged,
                desc,
            ]
Example #6
0
    def testWithinDeltaWithNegativeDelta(self):
        d1 = datetime(2016, 1, 1)
        d2 = datetime(2015, 12, 31)

        self.assertTrue(within_delta(d2, d1, timedelta(days=-1)))
Example #7
0
    def testWithinDelta(self):
        d1 = datetime(2016, 1, 1, 12, 14, 1, 9)
        d2 = d1.replace(microsecond=15)

        self.assertTrue(within_delta(d1, d2, timedelta(seconds=1)))
        self.assertFalse(within_delta(d1, d2, timedelta(microseconds=1)))
Example #8
0
    def testWithinDeltaWithNegativeDelta(self):
        d1 = datetime(2016, 1, 1)
        d2 = datetime(2015, 12, 31)

        self.assertTrue(within_delta(d2, d1, timedelta(days=-1)))
Example #9
0
    def testWithinDelta(self):
        d1 = datetime(2016, 1, 1, 12, 14, 1, 9)
        d2 = d1.replace(microsecond=15)

        self.assertTrue(within_delta(d1, d2, timedelta(seconds=1)))
        self.assertFalse(within_delta(d1, d2, timedelta(microseconds=1)))
Example #10
0
def test_utils_within_delta_with_negative_delta():
    d1 = datetime(2016, 1, 1)
    d2 = datetime(2015, 12, 31)

    assert within_delta(d2, d1, timedelta(days=-1))
Example #11
0
def test_utils_within_delta():
    d1 = datetime(2016, 1, 1, 12, 14, 1, 9)
    d2 = d1.replace(microsecond=15)

    assert within_delta(d1, d2, timedelta(seconds=1))
    assert not within_delta(d1, d2, timedelta(microseconds=1))