def scrape(arr):
    dispFilter = get_dispensary_filter(arr)
    headset_scraper = HeadsetDispensaryScraper(
        HttpClient(), HeadsetCategoryExtractor(HttpClient()))
    result = run(dispFilter.get_state_names(), headset_scraper.produce,
                 headset_scraper.consume)

    return json.dumps(result)
def scrape(arr):
    dispFilter = get_dispensary_filter(arr)
    leaflyScraper = LeaflyDispensaryScraper(
        dispFilter, HttpClient(), LeaflyDetailsExtractor(HttpClient()))
    result = run(dispFilter.get_state_names(), leaflyScraper.produce,
                 leaflyScraper.consume)

    return json.dumps(result)
Exemple #3
0
    def run(self):
        """ 通过提交委托下载文本 """
        _urls = []
        for v in filter(lambda x: not self.check[x['Subnum'] - 1], self.index):
            _urls.append(self.home + v['Href'])

        try:
            print('run:')
            HClient = HttpClient(_urls)
            for x in HClient.page_generater():
                if x == 0:
                    break
                elif x != None:
                    _text = get_text(x)
                    self.Text[_text['No']] = _text
                    self.check[int(_text['No']) - 1] = True
        except:
            print('download text fail.')
            raise
            return -1
        HClient.close()
        if all(self.check) == True:
            return 1
        else:
            return 0
Exemple #4
0
    def run(self):
        while True:
            IDlength = self.client.recv(1)
            if (IDlength):
                lenth1 = int(IDlength[0])
                ID = self.client.recv(lenth1)
                Taglength = self.client.recv(1)
                if (Taglength):
                    lenth2 = int(Taglength[0])
                    Tagdata = self.client.recv(lenth2)
                    Datalength = self.client.recv(1)
                    if (Datalength):
                        lenth3 = int(Datalength[0])
                        Data = self.client.recv(lenth3)
            if (IDlength):
                IDstring = b64encode(ID).decode()
                print(IDstring + '\n')
                Tagstring = str(Tagdata[0])
                print(Tagstring + '\n')
                Datastring = b64encode(Data).decode()
                print(Datastring + '\n')
                config = ConfigHelper('config.yaml')
                client = HttpClient(*config.api)
                public_id = IDstring
                tag = int(Tagstring)
                data = Datastring
                client.main_entry(public_id, tag, data, *config.reference_llh)

            else:
                break
        ip = self.client.getpeername()
        print('start data transform:\n', ip)
        n = bytes[0x34, 0x12]
        self.clent.send(n, ip)
        print("close:", self.client.getpeername())
 def test_mainfunctionality(self):
     param = PARAM
     test_db = NoSQL(
         param["database"]["engine"], {
             "host": param["database"]["host"],
             "port": param["database"]["port"],
             "db": param["database"]["db"]["urlcache"]
         })
     url = "http://www.seas.upenn.edu/~yunkai/"
     # clear cache and other initilization
     del_job = Job(url, {})
     test_db.delete(del_job.identifier)
     # test none cached url
     job_parameters = {}
     test_job = Job(url, job_parameters)
     h = HttpClient()
     header, _ = h.request(url, method="HEAD")
     header["last-modified"] = "Tue, 19 Apr 2015 02:33:38 GMT"
     cached, result = UrlChecker(test_job, param, header)
     self.assertFalse(cached)
     self.assertEqual(result["url"], url)
     # test cached url
     cached, result = UrlChecker(test_job, param, header)
     self.assertTrue(cached)
     self.assertEqual(result["url"], url)
     header["last-modified"] = "Tue, 21 Apr 2015 02:33:38 GMT"
     cached, result = UrlChecker(test_job, param, header)
     self.assertFalse(cached)
     # test the different url with same identifier (This is rare)
     test_job.url = """https://alliance.seas.upenn.edu/~cis520/wiki/\
         index.php?n=Lectures.Lectures"""
     cached, result = UrlChecker(test_job, param, header)
     self.assertFalse(cached)
     self.assertEqual(result["url"], test_job.url)
def scrape(arr):
    dispFilter = get_dispensary_filter(arr)
    potguide_scraper = PotGuideDispensaryScraper(HttpClient(),
                                                 PotGuideDispInfoExtractor(),
                                                 dispFilter)
    result = run(dispFilter.get_state_names(), potguide_scraper.produce,
                 potguide_scraper.consume)
    return json.dumps(result)
Exemple #7
0
def scrape(arr):
    dispFilter = get_dispensary_filter(arr)
    wmScraper = WeedMapsDispensaryScraper(dispFilter, HttpClient(),
                                          WeedMapsDespensaryExtractor())
    result = run(dispFilter.get_state_names(), wmScraper.produce,
                 wmScraper.consume)

    return json.dumps(result)
 def __init__(self, client, email, password):
     self._session = client
     self._client = HttpClient(client)
     self._email = email
     self._password = password
     self._finish = False
     self._commenttime = '1970'
     self._imgurl = asyncio.Queue()
Exemple #9
0
    def check_index(self):
        """ 目录检查,更新check, """
        _urls = (self.home + "/novelview/infotop/ncode/" + self.ncode,
                 self.home + '/' + self.ncode)
        try:
            print('check index:')
            HClient = HttpClient(_urls)
            _pages = HClient.pages
        except:
            print('Error!!get page fail.')
            print('please tey again.')
            raise
            return -1
        if None in _pages:
            print('get page fail.')
            print('please try again.')
            return -1

        _info = get_info(_pages[0])
        _index = get_index(_pages[1])

        mysql = My_sqlconnecter()
        _info_sql = mysql.read_info(self.ncode)
        _index_sql = mysql.read_index(self.ncode)
        mysql.disconnect()

        #检测数据库,是否为新书籍
        if (_info_sql == None) | (_index_sql == None):
            print("New Book:", _info[0]['ncode'], '\n\t', _info[0]['Title'])
            self.info = _info
            self.index = _index
            self.check = [False] * len(_index)
            return 2
        #检查数据更新时间,是否已完成下载
        if (_info_sql.UpDate == _info[0]['update']) & (len(_index)
                                                       == len(_index_sql)):
            if all(map(lambda x: x.Check, _index_sql)):
                print('Book:', self.ncode, ' has Downloaded complete.')
                self.info = _info_sql
                self.index = _index_sql
                self.check = list(map(lambda x: x.Check, _index_sql))
                return 0
        #逐个更新check
        self.check = []
        for i, v in enumerate(_index):
            if i < len(_index_sql):
                if len(v) == 5:
                    self.check.append(_index_sql[i].Check)
                elif (len(v) == 6) & (v['Spandate'] == _index_sql[i].Spandate):
                    self.check.append(_index_sql[i].Check)
                else:
                    self.check.append(False)
            else:
                self.check.append(False)

        self.info = _info
        self.index = _index
        return 1
Exemple #10
0
    def setUp(self):
        """Test fixtures."""

        self._http_client = HttpClient()

        self._httpcore_mock = mock()
        self._response_mock = mock()

        when(self._http_client).get_http_core().thenReturn(self._httpcore_mock)
Exemple #11
0
    def __init__(self, apikey, name, commands, workers=5):
        self.apikey = apikey
        self.name = name
        self.commands = commands
        self.lastUpdate = 0
        self.updateTimeout = 30

        self.workerPool = concurrent.futures.ThreadPoolExecutor(
            max_workers=workers)
        self.workerSemaphore = Semaphore(workers)

        self.httpClient = HttpClient()
        self.httpClient.userAgent = 'Telegram Bot (@%s)' % (name)
Exemple #12
0
def update():
    # Start logging
    init_logging()
    # Create dbmanager and httpclient
    db_client = DBManager(db_file, db_logger_name)
    http_client = HttpClient(host, url_paths, http_logger_name)
    
    # Create db if not existing
    if db_client.is_empty():
        db_client.create_db()

    #print(db_client.fetch_products_from_db())
    
    for product_group, url_path in url_paths.items():
        html_file = http_client.fetch_html_file(host, url_path)
        json_file = http_client.parse_html_file(html_file)
        
        db_client.add_products(product_group, json_file)
Exemple #13
0
    def setUp(self):
        logging.config.fileConfig(os.path.join(os.getcwd(), "logging.conf"))
        self.file_path = os.path.abspath(os.path.dirname(__file__))
        my_headers = [('User-Agent', 'Mozilla/4.0'), ('X-From', 'UA')]
        my_user_pass = ('kiril', 'supersecret')

        self.client = HttpClient(
            connect_timeout=5,  # socket timeout on connect
            transfer_timeout=3,  # socket timeout on send/recv
            max_redirects=10,
            set_referer=True,
            keep_alive=3,  # Keep-alive socket up to N requests
            headers=my_headers,  # send custom headers
            http_version="1.1",  # use custom http/version
            auth=my_user_pass,  # http auth
            retry=5,
            retry_delay=5)  # wait betweet tries

        #
        # Enter the path to the cookies file in setting file
        #
        dictionary = self.client.configure_from_file(
            os.path.join(self.file_path, "http_client_setting.ini"))
        self.client.logger = logging.getLogger("httpclient_test")
        os.chdir("../")
        self.children = multiprocessing.Value('i', 0)

        self.p = multiprocessing.Process(target=self.process,
                                         args=(self.children, ),
                                         daemon=False)
        self.p.start()
        self.pid = self.p.pid
        print("slave >> " + str(self.pid))
        print("head  >> " + str(os.getpid()))
        print("child >> " + str(self.children.value))
        self.config = configparser.ConfigParser()
        self.config.read(
            os.path.join(self.file_path, "..", "setting", "setting.ini"))
        print(os.path.join(self.file_path, "..", "setting", "setting.ini"))
        self.ip = self.config['ip_port_setting']["ip"]
        self.port = self.config['ip_port_setting']["port"]
        self.domen = self.ip + ":" + self.port
        self.data_base = DataBese(
            os.path.join(self.file_path, "..", "setting", "setting.ini"))
 def test_normal_functionality(self):
     url = "http://www.seas.upenn.edu/~yunkai/"
     new_job = Job(url, {})
     h = HttpClient()
     resp, content = h.request(url)
     param = PARAM
     SaveAndStatistics(
         new_job,
         content,
         param,
         response_header=resp,
         url_cache={"last-modified": "Wed, 22 Apr 2015 20:13:17 GMT"})
     db = NoSQL(
         param["database"]["engine"], {
             "host": param["database"]["host"],
             "port": param["database"]["port"],
             "db": param["database"]["db"]["content"]
         })
     self.assertEqual(db.dictget(new_job.identifier, "content"), content)
     self.assertEqual(db.dictget(new_job.identifier, "url"), new_job.url)
Exemple #15
0
    def __init__(self, dir):
        dir = dir if path.isabs(dir) else path.join(os.getcwd(), dir)
        self.path = PathInfo(dir)
        self.url = UrlInfo()
        self.str = StringInfo()
        self.config = parse_config(self.path.config_file)
        self.client = HttpClient(self)
        self.incomplete_cache = datetime.now().timestamp(
        ) < self.config.counter.end.timestamp()

        comment_dirname = '%s_%s_%s' % (
            self.config.counter.start.strftime('%y%m%d-%H%M%S'),
            self.config.counter.end.strftime('%y%m%d-%H%M%S'),
            self.config.counter.encoding,
        )
        self.path.comment_dir = path.join(self.path.temp_dir, self.str.version,
                                          comment_dirname)

        if not path.isdir(self.path.comment_dir):
            os.makedirs(self.path.comment_dir)
        if not path.isdir(self.path.log_dir):
            os.makedirs(self.path.log_dir)
        basicConfig(
            level=self.config.logging.level,
            format=self.config.logging.format,
            handlers=[
                FileHandler(
                    path.join(
                        self.path.log_dir,
                        datetime.now(
                            tz=TZ).strftime('nicocc-%y%m%d-%H%M%S.log'),
                    ),
                    encoding='utf-8',
                )
            ],
        )
Exemple #16
0
# -*- coding: utf-8 -*-
from httpclient import HttpClient

client = HttpClient()

resp = client.get('http://127.0.0.1/?c=4', query={'a': 1, 'b': 2})
print(resp.status_code)
print(resp.text)

resp = client.get('http://127.0.0.1/?c=4', query={'a': 1, 'b': 2})
print(resp.status_code)
print(resp.text)
    def __init__(self, dispesary_filter, http_client):
        self._http_client = http_client
        self._weedmaps_disp_extractor = WeedMapsDespensaryExtractor(dispesary_filter, WeedMapsDetailsExtractor(HttpClient()))

        self._url = "https://api-g.weedmaps.com/wm/v2/location?include%5B%5D=regions.listings&region_slug={0}&page_size=150&page={1}"
Exemple #18
0
# Callback Methode zum Einlesen und Senden der Messwerte. Exceptions sollen abgefangen werden,
# da sonst der ganze Prozess beendet wird.
def read_temp_sensor():
    logger.debug("Send TEMP.")
    values = {'temperature': 23.1, 'timestamp': time.time()}
    try:
        client.send_data(values)
    except Exception as e:
        logger.error("Failed to send temperature.")


# Konfigurationseinstellungen aus config.json laden. Exceptions sollen abgefangen werden,
# da sonst der ganze Prozess beendet wird.
config = {}
with open('config.json') as json_file:
    config = json.load(json_file)

client = HttpClient(config['Secret'], logger)
cron = Cronjob(logger)

# Die einzelnen Methoden in der Cronjob Klasse registrieren.
cron.append_work(id="HEART", action=send_heartbeat, interval=10)
cron.append_work(id="TEMP", action=read_temp_sensor, interval=2)

try:
    cron.start()
except KeyboardInterrupt:
    print("Cancelled")
except Exception:
    logging.exception('Programmabbruch')
Exemple #19
0
pinBtn = machine.Pin(39, machine.Pin.IN)

pinTrig.value(1)

wlan = network.WLAN(network.STA_IF)
wlan.active(True)
if not wlan.isconnected():
    print('Connecting to network...')
    wlan.connect('***', '**')
    while not wlan.isconnected():
        time.sleep_ms(50)
        pass
print('Connected to network (', wlan.ifconfig(), ')')

print('Starting QR Code reading')
h = HttpClient()
while True:
    time.sleep_ms(200)

    if pinBtn.value() == 0:
        print('Button Pressed')
        pinTrig.value(0)
    else:
        pinTrig.value(1)

    if uart_qr.any() > 0:
        query_url = uart_qr.readline().decode('ascii').replace('\r',
                                                               '').replace(
                                                                   '\n', '')
        url = 'http://192.168.1.250:6005/office' + query_url
        resp = h.get(url)
 def __init__(self, client):
     self._client = HttpClient(client)
     self._albumq = asyncio.Queue()
     self._photoq = asyncio.Queue()
     self._count = 0
     self._flag = False
Exemple #21
0
def start():
    use_user_data_flag = True
    try:
        user_data_url = 'http://169.254.169.254/latest/user-data'
        response = HttpClient(method='GET', url=user_data_url).process
        status_code = response.status_code
        if status_code != 200:
            use_user_data_flag = False
        else:
            conf = response.json()
    except:
        use_user_data_flag = False

    if not use_user_data_flag:
        file = glob.glob("".join(["/root/conf*.json"]))
        global i
        while True:
            try:
                conf = json.load(open(file[0], 'r'))
            except (IOError, ValueError):
                time.sleep(5)
                continue
            else:
                break
    if conf.has_key('udp_worm'):
        conf1 = conf['udp_worm']
        local_network = conf1['local_network']
        node_number = conf1['node_number']
        KVM_gateway_address = conf1['KVM_gateway_address']
        scan_ip_from = conf1['scan_ip_from']
        scan_ip_to = conf1['scan_ip_to']
        scan_interval = conf1['scan_interval']
        scan_port = conf1['scan_port']
        worm_behavior = conf1['worm_behavior']
        if (i == 0):
            os.system('route del -net default')
            os.system('route add -net default gw %s' % KVM_gateway_address)
        if (worm_behavior):
            cmd = 'nohup ./waf --run "scratch/udpcli-socket --scan_ip_from=%s --scan_ip_to=%s --scan_port=%s --scan_interval=%s --local_network=%s --node_number=%s --KVM_gateway_address=%s" > /root/infection_data.out &' % (
                scan_ip_from, scan_ip_to, scan_port, scan_interval,
                local_network, node_number, KVM_gateway_address)
        else:
            cmd = 'nohup ./waf --run "scratch/udpdst-socket --scan_ip_from=%s --scan_ip_to=%s --scan_port=%s --scan_interval=%s --local_network=%s --node_number=%s --KVM_gateway_address=%s" > /root/infection_data.out &' % (
                scan_ip_from, scan_ip_to, scan_port, scan_interval,
                local_network, node_number, KVM_gateway_address)

    elif conf.has_key('tcp_background'):
        conf1 = conf['tcp_background']
        local_network = conf1['local_network']
        node_number = conf1['node_number']
        KVM_gateway_address = conf1['KVM_gateway_address']
        service_net_from = conf1['service_net_from']
        service_net_to = conf1['service_net_to']
        service_port = conf1['service_port']
        probability_client = conf1['probability_client']
        cmd = 'nohup ./waf --run "scratch/tcp --service_net_from=%s --service_net_to=%s --service_port=%s --local_network=%s --node_number=%s --KVM_gateway_address=%s --probability_client=%s" > /root/tcp.out &' % (
            service_net_from, service_net_to, service_port, local_network,
            node_number, KVM_gateway_address, probability_client)
    else:
        print('Please set right mode!')
        exit(1)
    os.chdir('/root/ns-allinone-3.21/ns-3.21/')
    os.system(cmd)
Exemple #22
0
    def test_get_http_core(self):
        """Test of the 'get_http_core' method."""

        http_client = HttpClient()

        self.assertTrue(isinstance(http_client.get_http_core(), HttpCore))
Exemple #23
0
#!/usr/bin/python

from httpclient import HttpClient

country = 'DE'
currency = 'EUR'
language = 'de'

http = HttpClient()
http.GET('http://pricelist.skype.com/destinations/1.0/'+country+'/'+currency+'/'+language+'/xml/')

print str(http.Page)
Exemple #24
0
 def __init__(self, url):
     self.__httpClient = HttpClient(url)