Example #1
0
def fetch(opath=None, verbose=False):
    """
     retrieves oui.txt from IEEE and writes to data file
     :param opath: fullpath of oui.txt
     :param verbose: write updates to stdout
    """
    # determine if data path is legit
    if opath is None: opath = OUIPATH
    if not os.path.isdir(os.path.dirname(opath)):
        print("Path to data is incorrect {0}".format(opath))
        sys.exit(1)

    # fetch oui file from ieee
    fout = None

    # set up url request
    req = url_request(OUIURL)
    req.add_header('User-Agent',
                   "PyRIC +https://github.com/wraith-wireless/PyRIC/")
    try:
        # retrieve the oui file and parse out generated date
        if verbose: print('Fetching ', OUIURL)
        res = url_open(req)
        if verbose: print("Parsing OUI file")

        if verbose: print("Opening data file {0} for writing".format(opath))
        fout = open(opath, 'w')
        gen = datetime.datetime.utcnow().isoformat(
        )  # use current time as the first line
        fout.write(gen + '\n')

        # pull out ouis
        t = time.time()
        cnt = 0
        for l in res:
            if '(hex)' in l:
                # extract oui and manufacturer
                oui, manuf = l.split('(hex)')
                oui = oui.strip().replace('-', ':')
                manuf = manuf.strip()
                if manuf.startswith("IEEE REGISTRATION AUTHORITY"):
                    manuf = "IEEE REGISTRATION AUTHORITY"

                # write to file & update count
                fout.write('{0}\t{1}\n'.format(oui, manuf))
                cnt += 1
                if verbose: print("{0}:\t{1}\t{2}".format(cnt, oui, manuf))
        print("Wrote {0} OUIs in {1:.3} secs".format(cnt, time.time() - t))
    except url_error as e:
        print("Error fetching oui file: {0}".format(e))
    except IOError as e:
        print("Error opening output file {0}".format(e))
    except Exception as e:
        print("Error parsing oui file: {0}".format(e))
    finally:
        if fout: fout.close()
Example #2
0
def fetch(opath=None,verbose=False):
    """
     retrieves oui.txt from IEEE and writes to data file
     :param opath: fullpath of oui.txt
     :param verbose: write updates to stdout
    """
    # determine if data path is legit
    if opath is None: opath = OUIPATH
    if not os.path.isdir(os.path.dirname(opath)):
        print("Path to data is incorrect {0}".format(opath))
        sys.exit(1)

    # fetch oui file from ieee
    fout = None

    # set up url request
    req = url_request(OUIURL)
    req.add_header('User-Agent',"PyRIC +https://github.com/wraith-wireless/PyRIC/")
    try:
        # retrieve the oui file and parse out generated date
        if verbose: print('Fetching ', OUIURL)
        res = url_open(req)
        if verbose: print("Parsing OUI file")

        if verbose: print("Opening data file {0} for writing".format(opath))
        fout = open(opath,'w')
        gen = datetime.datetime.utcnow().isoformat() # use current time as the first line
        fout.write(gen+'\n')

        # pull out ouis
        t = time.time()
        cnt = 0
        for l in res:
            if '(hex)' in l:
                # extract oui and manufacturer
                oui,manuf = l.split('(hex)')
                oui = oui.strip().replace('-',':')
                manuf = manuf.strip()
                if manuf.startswith("IEEE REGISTRATION AUTHORITY"):
                    manuf = "IEEE REGISTRATION AUTHORITY"

                # write to file & update count
                fout.write('{0}\t{1}\n'.format(oui,manuf))
                cnt += 1
                if verbose: print("{0}:\t{1}\t{2}".format(cnt,oui,manuf))
        print("Wrote {0} OUIs in {1:.3} secs".format(cnt,time.time()-t))
    except url_error as e:
        print("Error fetching oui file: {0}".format(e))
    except IOError as e:
        print("Error opening output file {0}".format(e))
    except Exception as e:
        print("Error parsing oui file: {0}".format(e))
    finally:
        if fout: fout.close()
Example #3
0
def fetch(url, hash):
    filename = os.path.basename(url)
    if not os.path.exists(filename):
        req = url_request(url)
        res = url_open(req)
        dat = res.read()
        with open(filename, 'wb+') as f:
            f.write(dat)
    with open(filename, 'r') as f:
        dat = f.read()
    h = hash_algorithm(dat)
    assert h.hexdigest() == hash
Example #4
0
    def distance_geographic_between_store_and_user(self,start_id_store_id_with_first_category, end_id_store_id_with_first_category):
        data_query=self.data_query
        user_id=self.user_id
        first_category=self.first_category


        get_user_latitude=data_query.user_latitude_Coordinates(user_id=user_id)
        get_user_latitude_str=str(get_user_latitude[0])

        get_user_longtitude=data_query.user_longtitude_Coordinates(user_id=user_id)
        get_user_longtitude_str=str(get_user_longtitude[0])

        # this query: extract stores coordinates that stores have this first category type
        get_store_coordinates=data_query.get_store_coordinates_for_this_first_category(first_category)
        # get_store_coordinates = shopping.objects.filter(
        #     first_category=num).values_list('gid', 'long', 'lat').order_by('gid')


        headers = {
            'Accept': 'application/json; charset=utf-8'
        }

        str_url = str()
        user_coordinates = get_user_longtitude_str+','+get_user_latitude_str
        for i in range(start_id_store_id_with_first_category, end_id_store_id_with_first_category):
            long = str(get_store_coordinates[i][1])
            comma = ','
            lat = str(get_store_coordinates[i][2])
            append_sign_each_point = '%7C'
            str_url += long + comma + lat + append_sign_each_point
        request_distance = url_request('https://api.openrouteservice.org/matrix?api_key=5b3ce3597851110001cf624855704328a35746098c6f6f287a22cd66&profile=driving-car&locations='
                                       + user_coordinates + '%7C' + str_url + '&metrics=distance', headers=headers)
        response_body = json.loads(urlopen(request_distance).read())
        distance_result_with_0 = list(response_body['distances'][0])
        distance_result_remove_dist_between_user_and_user = distance_result_with_0[1:]
        return distance_result_remove_dist_between_user_and_user
Example #5
0
    def downloader(self):
        '''the download thread'''
        while self.tiles_pending() > 0:
            time.sleep(self.tile_delay)

            keys = sorted(self._download_pending.keys())

            # work out which one to download next, choosing by request_time
            tile_info = self._download_pending[keys[0]]
            for key in keys:
                if self._download_pending[
                        key].request_time > tile_info.request_time:
                    tile_info = self._download_pending[key]

            url = tile_info.url(self.service)
            path = self.tile_to_path(tile_info)
            key = tile_info.key()

            try:
                if self.debug:
                    print("Downloading %s [%u left]" % (url, len(keys)))
                req = url_request(url)
                if url.find('google') != -1:
                    req.add_header('Referer', 'https://maps.google.com/')
                resp = url_open(req)
                headers = resp.info()
            except url_error as e:
                #print('Error loading %s' % url)
                if not key in self._tile_cache:
                    self._tile_cache[key] = self._unavailable
                self._download_pending.pop(key)
                if self.debug:
                    print("Failed %s: %s" % (url, str(e)))
                continue
            if 'content-type' not in headers or headers['content-type'].find(
                    'image') == -1:
                if not key in self._tile_cache:
                    self._tile_cache[key] = self._unavailable
                self._download_pending.pop(key)
                if self.debug:
                    print("non-image response %s" % url)
                continue
            else:
                img = resp.read()

            # see if its a blank/unavailable tile
            md5 = hashlib.md5(img).hexdigest()
            if md5 in BLANK_TILES:
                if self.debug:
                    print("blank tile %s" % url)
                    if not key in self._tile_cache:
                        self._tile_cache[key] = self._unavailable
                self._download_pending.pop(key)
                continue

            mp_util.mkdir_p(os.path.dirname(path))
            h = open(path + '.tmp', 'wb')
            h.write(img)
            h.close()
            try:
                os.unlink(path)
            except Exception:
                pass
            os.rename(path + '.tmp', path)
            self._download_pending.pop(key)
        self._download_thread = None
Example #6
0
    def downloader(self):
        '''the download thread'''
        while self.tiles_pending() > 0:
            time.sleep(self.tile_delay)

            keys = sorted(self._download_pending.keys())

            # work out which one to download next, choosing by request_time
            tile_info = self._download_pending[keys[0]]
            for key in keys:
                if self._download_pending[key].request_time > tile_info.request_time:
                    tile_info = self._download_pending[key]

            url = tile_info.url(self.service)
            path = self.tile_to_path(tile_info)
            key = tile_info.key()

            try:
                if self.debug:
                    print("Downloading %s [%u left]" % (url, len(keys)))
                req = url_request(url)
                if url.find('google') != -1:
                    req.add_header('Referer', 'https://maps.google.com/')
                resp = url_open(req)
                headers = resp.info()
            except url_error as e:
                #print('Error loading %s' % url)
                if not key in self._tile_cache:
                    self._tile_cache[key] = self._unavailable
                self._download_pending.pop(key)
                if self.debug:
                    print("Failed %s: %s" % (url, str(e)))
                continue
            if 'content-type' not in headers or headers['content-type'].find('image') == -1:
                if not key in self._tile_cache:
                    self._tile_cache[key] = self._unavailable
                self._download_pending.pop(key)
                if self.debug:
                    print("non-image response %s" % url)
                continue
            else:
                img = resp.read()

            # see if its a blank/unavailable tile
            md5 = hashlib.md5(img).hexdigest()
            if md5 in BLANK_TILES:
                if self.debug:
                    print("blank tile %s" % url)
                    if not key in self._tile_cache:
                        self._tile_cache[key] = self._unavailable
                self._download_pending.pop(key)
                continue

            mp_util.mkdir_p(os.path.dirname(path))
            h = open(path+'.tmp','wb')
            h.write(img)
            h.close()
            try:
                os.unlink(path)
            except Exception:
                pass
            os.rename(path+'.tmp', path)
            self._download_pending.pop(key)
        self._download_thread = None