def continuity(url): from util import md5 format = '%25s: %s' # first fetch the file with the normal http handler opener = urllib_request.build_opener() urllib_request.install_opener(opener) fo = urllib_request.urlopen(url) foo = fo.read() fo.close() m = md5.new(foo) print(format % ('normal urllib', m.hexdigest())) # now install the keepalive handler and try again opener = urllib_request.build_opener(HTTPHandler()) urllib_request.install_opener(opener) fo = urllib_request.urlopen(url) foo = fo.read() fo.close() m = md5.new(foo) print(format % ('keepalive read', m.hexdigest())) fo = urllib_request.urlopen(url) foo = '' while 1: f = fo.readline() if f: foo = foo + f else: break fo.close() m = md5.new(foo) print(format % ('keepalive readline', m.hexdigest()))
def onSettingsChanged(self): try: init() urllib_request.urlopen("%s/reload" % ELEMENTUMD_HOST) urllib_request.urlopen("%s/cmd/clear_page_cache" % ELEMENTUMD_HOST) except: pass
def onNotification(self, sender, method, data): try: urllib_request.urlopen( "%s/notification?sender=%s&method=%s&data=%s" % (ELEMENTUMD_HOST, sender, method, base64.b64encode(data))) except: pass
def onSettingsChanged(self): log.debug('{0}.onSettingsChanged()'.format(self)) try: init() urllib_request.urlopen("%s/reload" % ELEMENTUMD_HOST) urllib_request.urlopen("%s/cmd/clear_page_cache" % ELEMENTUMD_HOST) except: pass
def test_404_unescaped(self): # autoescape can be over-ridden try: urlopen(server.base_url + '/error/404-template-<script>') except HTTPError as err: eq_(err.code, NOT_FOUND) text = err.read().decode('utf-8') ok_(' "/error/404-template-<script>' in text) ok_('\n' not in text) # since whitespace=oneline else: ok_(False, '/error/404-template-<script> should raise a 404')
def register(search, search_movie, search_episode, search_season=None): try: payload = json.loads(base64.b64decode(sys.argv[1])) except: notify(getElementumLocalizedString(30102), time=1000) return results = () method = { "search": search, "search_movie": search_movie, "search_season": search_season, "search_episode": search_episode, }.get(payload["method"]) or (lambda *a, **kw: []) try: results = () try: objects = method(payload["search_object"]) if objects is not None: results = tuple(objects) except Exception as e: import traceback map(log.error, traceback.format_exc().split("\n")) notify( py2_encode( "%s: %s" % (getElementumLocalizedString(30224), repr(e)), 'utf-8')) try: urllib_request.urlopen("%s/provider/%s/failure" % (ELEMENTUMD_HOST, ADDON_ID)) except: pass finally: try: req_data = json.dumps(results) if not PY2 and isinstance(req_data, str): req_data = req_data.encode() req = urllib_request.Request(payload["callback_url"], data=req_data) with closing(urllib_request.urlopen(req)) as response: log.debug("callback returned: %d" % response.getcode()) except Exception as e: import traceback map(log.error, traceback.format_exc().split("\n")) notify( py2_encode( "%s: %s" % (getElementumLocalizedString(30224), repr(e)), 'utf-8')) try: urllib_request.urlopen("%s/provider/%s/failure" % (ELEMENTUMD_HOST, ADDON_ID)) except: pass
def create_driver(): global driver driver = None try: urllib_request.urlopen("http://127.0.0.1:9222/json") except urllib_error.URLError: print("Unable to start WebDriver, Chrome is not responding.") return chrome_options = Options() chrome_options.experimental_options["debuggerAddress"] = "127.0.0.1:9222" driver = webdriver.Chrome(local.CHROME_DRIVER_PATH, chrome_options=chrome_options)
def _fetch(self, url, form_data={}, headers={}, compression=True, jdata=False): """ Perform an HTTP GET or POST request. Args: url (str): The URL to GET or POST. form_data (dict): A dictionary of form data to POST. If empty, the request will be a GET, if it contains form data it will be a POST. Kwargs: headers (dict): A dictionary describing any headers you would like to add to the request. (eg. ``{'X-Test': 'testing'}``) compression (bool): If ``True`` (default), try to use gzip compression. Returns: An :class:`HttpResponse` object containing headers and other meta-information about the page and the page content. """ req = urllib_request.Request(url) if form_data: if jdata: form_data = json.dumps(form_data) elif isinstance(form_data, six.string_types): form_data = form_data else: form_data = urllib_parse.urlencode(form_data, True) form_data = form_data.encode('utf-8') if six.PY3 else form_data req = urllib_request.Request(url, form_data) req.add_header('User-Agent', self._user_agent) for key in headers: req.add_header(key, headers[key]) if compression: req.add_header('Accept-Encoding', 'gzip') if jdata: req.add_header('Content-Type', 'application/json') host = req.host if six.PY3 else req.get_host() req.add_unredirected_header('Host', host) try: response = urllib_request.urlopen(req, timeout=15) except urllib_error.HTTPError as e: if e.code == 403: self._update_opener(drop_tls_level=True) response = urllib_request.urlopen(req, timeout=15) return HttpResponse(response)
def test_404_escaped(self): # Check that templates are HTML escaped by default try: # Requests converts <script> into %3Cscript%3E before sending URL. # So use urlopen instead of requests.get urlopen(server.base_url + '/error/404-escaped-<script>') except HTTPError as err: eq_(err.code, NOT_FOUND) text = err.read().decode('utf-8') ok_(' "/error/404-escaped-<script>"' in text) ok_('\n' in text) # error-404.json template has newlines else: ok_(False, '/error/404-escaped-<script> should raise a 404')
def onNotification(self, sender, method, data): log.debug('{0}.onNotification({1}, {2}, {3})'.format( self, sender, method, py2_decode(data))) if PY2: data_base64 = base64.b64encode(data) else: data_base64 = base64.b64encode(data.encode("utf-8")) try: urllib_request.urlopen( "%s/notification?sender=%s&method=%s&data=%s" % (ELEMENTUMD_HOST, sender, method, data_base64)) except: pass
def add_uri(self, uri, **kwargs): """ .. WARNING:: Deprecated, please use add_torrent. """ if uri is None: raise ValueError('add_uri requires a URI.') # there has been some problem with T's built in torrent fetcher, # use a python one instead parsed_uri = urlparse(uri) torrent_data = None if parsed_uri.scheme in ['ftp', 'ftps', 'http', 'https']: torrent_file = urlopen(uri) torrent_data = torrent_file.read() torrent_data = base64.b64encode(torrent_data).decode('utf-8') if parsed_uri.scheme in ['file']: filepath = uri # uri decoded different on linux / windows ? if len(parsed_uri.path) > 0: filepath = parsed_uri.path elif len(parsed_uri.netloc) > 0: filepath = parsed_uri.netloc torrent_file = open(filepath, 'rb') torrent_data = torrent_file.read() torrent_data = base64.b64encode(torrent_data).decode('utf-8') warnings.warn('add_uri has been deprecated, please use add_torrent instead.', DeprecationWarning) if torrent_data: return self.add(torrent_data, **kwargs) else: return self.add(None, filename=uri, **kwargs)
def check_client_registration(self): if self.client_registered and self.discovery_complete: if not self.server_list: LOG.debug('Server list is empty. Unable to check') return False try: media_server = self.server_list[0]['server'] media_port = self.server_list[0]['port'] LOG.debug('Checking server [%s] on port [%s]' % (media_server, media_port)) file_handle = urlopen('http://%s:%s/clients' % (media_server, media_port)) client_result = file_handle.read() if self.client_id in client_result: LOG.debug('Client registration successful') LOG.debug('Client data is: %s' % client_result) return True LOG.debug('Client registration not found') LOG.debug('Client data is: %s' % client_result) except: # pylint: disable=bare-except LOG.debug('Unable to check status') return False
def get_media_url(self, host, media_id): web_url = self.get_url(host, media_id) logger.log_debug('HugeFiles: get_link: %s' % (web_url)) html = self.net.http_GET(web_url).content r = re.findall('File Not Found', html) if r: raise ResolverError('File Not Found or removed') # Grab data values data = helpers.get_hidden(html) data.update(captcha_lib.do_captcha(html)) logger.log_debug('HugeFiles - Requesting POST URL: %s with data: %s' % (web_url, data)) html = self.net.http_POST(web_url, data).content # Re-grab data values data = helpers.get_hidden(html) data['referer'] = web_url headers = {'User-Agent': common.EDGE_USER_AGENT} logger.log_debug('HugeFiles - Requesting POST URL: %s with data: %s' % (web_url, data)) request = urllib_request.Request(web_url, data=urllib_parse.urlencode(data), headers=headers) try: stream_url = urllib_request.urlopen(request).geturl() except: return logger.log_debug('Hugefiles stream Found: %s' % stream_url) return stream_url
def request(url, params={}, headers={}, data=None, method=None): if params: url = "".join([url, "?", urllib_parse.urlencode(params)]) req = urllib_request.Request(url) if method: req.get_method = lambda: method req.add_header("User-Agent", USER_AGENT) req.add_header("Accept-Encoding", "gzip") for k, v in headers.items(): req.add_header(k, v) if data: req.data = data try: with closing(urllib_request.urlopen(req)) as response: data = response.read() if response.headers.get("Content-Encoding", "") == "gzip": import zlib data = zlib.decompressobj(16 + zlib.MAX_WBITS).decompress(data) response.data = data response.json = lambda: parse_json(data) response.xml = lambda: parse_xml(data) return response except Exception as e: import traceback map(log.error, traceback.format_exc().split("\n")) notify("%s: %s" % (getLocalizedString(30224), repr(e).encode('utf-8'))) return None, None
def download_file(self, episode): """ Download and write the file to disc """ # open the url try: handle = urlopen(episode.url) except URLError as err: self.send_error(episode, episode.url, err) return # check http status for success (2xx) http_status = handle.getcode() if (200 > http_status) or (299 < http_status): self.send_error(episode, episode.url, "HTTP STATUS: %s" % http_status) return meta = handle.info() episode.size = int(meta.getheaders("Content-Length")[0]) total = 0 try: with open(episode.file_name, "wb") as podcast_file: while True: chunk = handle.read(1024) if not chunk: break total = total + len(chunk) podcast_file.write(chunk) self.send_status(total, episode) except IOError as err: self.send_error(episode, episode.file_name, err)
def add_uri(self, uri, **kwargs): """ .. WARNING:: Deprecated, please use add_torrent. """ if uri is None: raise ValueError('add_uri requires a URI.') # there has been some problem with T's built in torrent fetcher, # use a python one instead parsed_uri = urlparse(uri) torrent_data = None if parsed_uri.scheme in ['ftp', 'ftps', 'http', 'https']: torrent_file = urlopen(uri) torrent_data = torrent_file.read() torrent_data = base64.b64encode(torrent_data).decode('utf-8') if parsed_uri.scheme in ['file']: filepath = uri # uri decoded different on linux / windows ? if len(parsed_uri.path) > 0: filepath = parsed_uri.path elif len(parsed_uri.netloc) > 0: filepath = parsed_uri.netloc torrent_file = open(filepath, 'rb') torrent_data = torrent_file.read() torrent_data = base64.b64encode(torrent_data).decode('utf-8') warnings.warn( 'add_uri has been deprecated, please use add_torrent instead.', DeprecationWarning) if torrent_data: return self.add(torrent_data, **kwargs) else: return self.add(None, filename=uri, **kwargs)
def stock_price(code): url = 'https://financialmodelingprep.com/api/company/price/' + code req = Request(url=url) with closing(urlopen(req)) as f: body = f.read().decode('utf-8') body = body.replace('<pre>', '').replace('</pre>', '') data = json.loads(body) return data
def openURL(url): xbmc.log("Opening %s" % url) req = urllib_request.Request(url) req.add_header('Referer', 'https://www.empflix.com/') response = urllib_request.urlopen(req) link = response.read().decode('utf-8') response.close() return link
def _json(url): with closing(urllib_request.urlopen(url)) as response: if response.code == 300: raise PlayerException(response.info().getheader('Location')) elif response.code == 301: raise RedirectException(response.info().getheader('Location')) elif 302 <= response.code <= 307: pause_current_playing_file_avoiding_doubling_request() _infoLabels = InfoLabels(getInfoLabels()) if 'mediatype' not in _infoLabels or not _infoLabels['mediatype']: _infoLabels['mediatype'] = 'episode' _infoLabels['dbtype'] = 'episode' if PLATFORM['kodi'] >= 19: item = xbmcgui.ListItem( path=response.geturl(), label=_infoLabels["label"], label2=_infoLabels["label2"] ) else: item = xbmcgui.ListItem( path=response.geturl(), label=_infoLabels["label"], label2=_infoLabels["label2"], thumbnailImage=_infoLabels["thumbnail"] ) item.setArt({ "thumb": _infoLabels["artthumb"], "poster": _infoLabels["artposter"], "tvshowposter": _infoLabels["arttvshowposter"], "banner": _infoLabels["artbanner"], "fanart": _infoLabels["artfanart"], "clearart": _infoLabels["artclearart"], "clearlogo": _infoLabels["artclearlogo"], "landscape": _infoLabels["artlandscape"], "icon": _infoLabels["articon"] }) if 'castmembers' in _infoLabels: if PLATFORM['kodi'] >= 17: item.setCast(_infoLabels['castmembers']) del _infoLabels['castmembers'] _infoLabels = normalize_labels(_infoLabels) item.setInfo(type='Video', infoLabels=_infoLabels) xbmcplugin.setResolvedUrl(HANDLE, True, item) return payload = to_unicode(response.read()) try: if payload: return json.loads(payload) except: raise Exception(payload)
def read(url): _log("read " + url) f = urllib_request.urlopen(url) data = f.read() f.close() if not isinstance(data, str): data = data.decode("utf-8", "strict") return data
def onNotification(self, sender, method, data): if method and "elementum_restart" in method: self.restart() self.reboot(True) return try: if PY2: data_base64 = base64.b64encode(data) else: data_base64 = base64.b64encode(data.encode("utf-8")) urllib_request.urlopen( "%s/notification?sender=%s&method=%s&data=%s" % (ELEMENTUMD_HOST, sender, method, data_base64)) except: pass
def get_redirect_url(url, headers={}): class NoRedirection(urllib_request.HTTPErrorProcessor): def http_response(self, request, response): return response opener = urllib_request.build_opener(NoRedirection, urllib_request.HTTPHandler) urllib_request.install_opener(opener) request = urllib_request.Request(url, headers=headers) response = urllib_request.urlopen(request) return response.geturl()
def _get_file(self, data): if self._is_uri(data): if data.startswith('file://'): return open(data[7:], 'rb') elif data.startswith('http://'): return urllib2.urlopen(data) elif all(hasattr(data, a) for a in ('read', 'seek', 'close')): return data else: return BytesIO(bytes(data, encoding='utf-8'))
def validate(ticket): """ Will attempt to validate the ticket. If validation fails, then False is returned. If validation is successful, then True is returned and the validated username is saved in the session under the key `CAS_USERNAME_SESSION_KEY`. """ cas_username_session_key = current_app.config['CAS_USERNAME_SESSION_KEY'] current_app.logger.debug("validating token {0}".format(ticket)) cas_validate_url = create_cas_validate_url( current_app.config['CAS_VALIDATE_SERVER'], current_app.config['CAS_VALIDATE_ROUTE'], url_for('cas.login', _external=True), ticket) current_app.logger.debug( "Making GET request to {0}".format(cas_validate_url)) try: response = urlopen(cas_validate_url).read() ticketid = _parse_tag(response, "cas:user") strs = [s.strip() for s in ticketid.split('|') if s.strip()] username, is_valid = None, False if len(strs) == 1: username = strs[0] is_valid = True user_info = json.loads(_parse_tag(response, "cas:other")) current_app.logger.info(user_info) except ValueError: current_app.logger.error("CAS returned unexpected result") is_valid = False return is_valid if is_valid: current_app.logger.debug("valid") session[cas_username_session_key] = username user = UserCache.get(username) session["acl"] = dict( uid=user_info.get("uuid"), avatar=user.avatar if user else user_info.get("avatar"), userId=user_info.get("id"), userName=user_info.get("name"), nickName=user_info.get("nickname"), parentRoles=user_info.get("parents"), childRoles=user_info.get("children"), roleName=user_info.get("role")) session["uid"] = user_info.get("uuid") current_app.logger.debug(session) current_app.logger.debug(request.url) else: current_app.logger.debug("invalid") return is_valid
def AddonFailure(self, addonId): if ADDON.getSetting("provider_disable_failing") == u"false": return 0 if addonId in self._failures: self._failures[addonId] += 1 else: self._failures[addonId] = 1 log.warning("Recorded failure %d for %s" % (self._failures[addonId], addonId)) if self._failures[addonId] > int(ADDON.getSetting("provider_allowed_failures")): try: time.sleep(10) notify(getLocalizedString(30111)) urllib_request.urlopen("%s/provider/%s/disable" % (da_inc_HOST, addonId)) except: notify(getLocalizedString(30112)) return 0 return self._failures[addonId]
def urlopen(request): if not urlopen._opener_installed: handler = [] proxy_http = ucr_get('proxy/http') if proxy_http: handler.append(urllib_request.ProxyHandler({'http': proxy_http, 'https': proxy_http})) handler.append(HTTPSHandler()) opener = urllib_request.build_opener(*handler) urllib_request.install_opener(opener) urlopen._opener_installed = True return urllib_request.urlopen(request, timeout=60)
def create_driver(): global driver, browser driver = None browser = local.DEFAULT_BROWSER if browser == "chrome": try: urllib_request.urlopen("http://127.0.0.1:9222/json") except urllib_error.URLError: print("Unable to start WebDriver, Chrome is not responding.") return chrome_options = webdriver.chrome.options.Options() chrome_options.experimental_options[ "debuggerAddress"] = "127.0.0.1:9222" driver = webdriver.Chrome(local.CHROME_DRIVER_PATH, chrome_options=chrome_options) elif browser == "firefox": driver = MarionetteWrapper(marionette_driver.marionette.Marionette()) else: print("Unknown browser: " + browser) browser = None
def get_media_url(self, host, media_id): web_url = self.get_url(host, media_id) headers = {'User-Agent': common.RAND_UA} request = urllib_request.Request(web_url, headers=headers) response = urllib_request.urlopen(request, context=self.context) html = response.read() source = re.search(r'''file:\s*["']([^"']+)''', html) if source: headers.update({'Referer': web_url}) return source.group(1) + helpers.append_headers(headers) raise ResolverError('File not found')
def getResponse(url, headers, size): try: if size > 0: size = int(size) headers['Range'] = 'bytes=%d-' % size req = urllib_request.Request(url, headers=headers) resp = urllib_request.urlopen(req, timeout=30) return resp except: return None
def fetch_public_key(repo): """Download RSA public key Travis will use for this repo. Travis API docs: http://docs.travis-ci.com/api/#repository-keys """ keyurl = 'https://api.travis-ci.org/repos/{0}/key'.format(repo) data = json.loads(urlopen(keyurl).read().decode()) if 'key' not in data: errmsg = "Could not find public key for repo: {}.\n".format(repo) errmsg += "Have you already added your GitHub repo to Travis?" raise ValueError(errmsg) return data['key']
def stop(self): if hasattr(self, '_process'): try: with closing(urlopen(self.shutdownURL)) as c: c.read() rtncode = self._process.poll() if (rtncode is None): self._process.terminate() except Exception: self.log.info("Failed to shutdown server at %s" % self.shutdownURL) traceback.print_exc() self._process.kill()
def search(url): from six.moves import urllib_request itemlist = list() ids = list() # Ejemplos de urls validas: # https://fastestvpn.com/blog/acestream-channels/ # http://acetv.org/js/data.json try: data = six.ensure_str(urllib_request.urlopen(url).read()) data = re.sub(r"\n|\r|\t|\s{2}| ", "", data) if data: try: for n, it in enumerate(eval(re.findall('(\[.*?])', data)[0])): label = it.get("name", it.get("title", it.get("label"))) id = it.get("id", it.get("url")) id = re.findall('([0-9a-f]{40})', id, re.I)[0] icon = it.get("icon", it.get("image", it.get("thumb"))) new_item = Item(label= label if label else translate(30030) % (n,id), action='play', id=id) if icon: new_item.icon = icon itemlist.append(new_item) except: itemlist = [] for patron in [r"acestream://([0-9a-f]{40})", '(?:"|>)([0-9a-f]{40})(?:"|<)']: n = 1 logger(re.findall(patron, data, re.I)) for id in re.findall(patron, data, re.I): if id not in ids: ids.append(id) itemlist.append(Item(label= translate(30030) % (n,id), action='play', id= id)) n += 1 if itemlist: break except: pass if itemlist: return itemlist else: xbmcgui.Dialog().ok(HEADING, translate(30031) % url)
def _get_url(self, url): """Returns the response content from the given url """ return urlopen(url).read()
def add_torrent(self, torrent, timeout=None, **kwargs): """ Add torrent to transfers list. Takes a uri to a torrent or base64 encoded torrent data in ``torrent``. Additional arguments are: ===================== ===== =========== ============================================================= Argument RPC Replaced by Description ===================== ===== =========== ============================================================= ``bandwidthPriority`` 8 - Priority for this transfer. ``cookies`` 13 - One or more HTTP cookie(s). ``download_dir`` 1 - The directory where the downloaded contents will be saved in. ``files_unwanted`` 1 - A list of file id's that shouldn't be downloaded. ``files_wanted`` 1 - A list of file id's that should be downloaded. ``paused`` 1 - If True, does not start the transfer when added. ``peer_limit`` 1 - Maximum number of peers allowed. ``priority_high`` 1 - A list of file id's that should have high priority. ``priority_low`` 1 - A list of file id's that should have low priority. ``priority_normal`` 1 - A list of file id's that should have normal priority. ===================== ===== =========== ============================================================= Returns a Torrent object with the fields. """ if torrent is None: raise ValueError('add_torrent requires data or a URI.') torrent_data = None parsed_uri = urlparse(torrent) if parsed_uri.scheme in ['ftp', 'ftps', 'http', 'https']: # there has been some problem with T's built in torrent fetcher, # use a python one instead torrent_file = urlopen(torrent) torrent_data = torrent_file.read() torrent_data = base64.b64encode(torrent_data).decode('utf-8') if parsed_uri.scheme in ['file']: filepath = torrent # uri decoded different on linux / windows ? if len(parsed_uri.path) > 0: filepath = parsed_uri.path elif len(parsed_uri.netloc) > 0: filepath = parsed_uri.netloc torrent_file = open(filepath, 'rb') torrent_data = torrent_file.read() torrent_data = base64.b64encode(torrent_data).decode('utf-8') if not torrent_data: if torrent.endswith('.torrent') or torrent.startswith('magnet:'): torrent_data = None else: might_be_base64 = False try: # check if this is base64 data if PY3: base64.b64decode(torrent.encode('utf-8')) else: base64.b64decode(torrent) might_be_base64 = True except Exception: pass if might_be_base64: torrent_data = torrent args = {'metainfo': torrent_data} if torrent_data else {'filename': torrent} for key, value in iteritems(kwargs): argument = make_rpc_name(key) (arg, val) = argument_value_convert('torrent-add', argument, value, self.rpc_version) args[arg] = val return list(self._request('torrent-add', args, timeout=timeout).values())[0]
def geturl(url, headers_cb): req = Request(url, data=None, headers=headers_cb(url)) return urlopen(req).read()
def _load_network_resource(url): with contextlib.closing(urllib2.urlopen(url, timeout=60.0)) as f: # We fully rely of mime type to remote server b/c according # of specs it MUST support RAML mime mime_type = f.headers.get('Content-Type') return f.read(), mime_type
def json_load(ae, others): """httprequest other players. Sends own data and gets back array of all other players within sight. This function runs in a background thread """ #TODO pass nearest, nearest.hp and own hp merge in some way tm_now = time.time() jstring = json.dumps([ae.refid, ae.last_time, ae.x, ae.y, ae.z, ae.h_speed, ae.v_speed, ae.pitch, ae.direction, ae.roll, ae.pitchrate, ae.yaw, ae.rollrate, ae.power_setting, ae.damage], separators=(',',':')) if ae.nearest: n_id = ae.nearest.refid n_damage = ae.nearest.other_damage ae.nearest.other_damage = 0.0 else: n_id = "" n_damage = 0.0 params = urllib_parse.urlencode({"id":ae.refid, "tm":tm_now, "x":ae.x, "z":ae.z, "json":jstring, "nearest":n_id, "damage":n_damage}) others["start"] = tm_now #used for polling freqency urlstring = "http://www.eldwick.org.uk/sharecalc/rpi_json.php?{0}".format(params) try: r = urllib_request.urlopen(urlstring) if r.getcode() == 200: #good response jstring = r.read().decode("utf-8") if len(jstring) > 50: #error messages are shorter than this olist = json.loads(jstring) #smooth time offset value ae.del_time = ae.del_time * 0.9 + olist[0] * 0.1 if ae.del_time else olist[0] #own damage is cumulative and not reset on server until dead! ae.damage = olist[1] #if ae.damage > 2.0 * DAMAGE_FACTOR: #explode return to GO etc #print(ae.damage) olist = olist[2:] """ synchronisation system: sends time.time() which is used to calculate an offset on the server and which is inserted as the second term in the json string. When the list of other players comes back from the server it is preceded by the same offset time inserted in this json. This is used to adjust the last_time for all the other avatars. """ nearest = None ae.rtime = 60 for o in olist: if not(o[0] in others): others[o[0]] = Aeroplane("models/biplane.obj", 0.1, o[0]) oa = others[o[0]] #oa is other aeroplane, ae is this one! oa.refif = o[0] #exponential smooth time offset values oa.del_time = oa.del_time * 0.9 + o[1] * 0.1 if oa.del_time else o[1] oa.last_time = o[2] + oa.del_time - ae.del_time # o[1] inserted by server code dt = tm_now - oa.last_time if oa.x == 0.0: oa.x, oa.y, oa.z = o[3], o[4], o[5] nx = o[3] + o[6] * math.sin(math.radians(o[9])) * dt ny = o[4] + o[7] * dt nz = o[5] + o[6] * math.cos(math.radians(o[9])) * dt distance = math.hypot(nx - ae.x, nz - ae.z) if not nearest or distance < nearest: nearest = distance ae.nearest = oa oa.x_perr, oa.y_perr, oa.z_perr = oa.x - nx, oa.y - ny, oa.z - nz oa.x_ierr += oa.x_perr oa.y_ierr += oa.y_perr oa.z_ierr += oa.z_perr oa.d_err = ((oa.direction - (o[9] + o[12] * dt) + 180) % 360 - 180) / 2 oa.h_speed = o[6] oa.v_speed = o[7] oa.pitch = o[8] oa.roll = o[10] oa.pitchrate = o[11] oa.yaw = o[12] oa.rollrate = o[13] oa.power_setting = o[14] oa.damage = o[15] if nearest: ae.rtime = NR_TM + (max(min(nearest, FA_DIST), NR_DIST) - NR_DIST) / \ (FA_DIST - NR_DIST) * (FA_TM - NR_TM) #TODO tidy up inactive others; flag not to draw, delete if inactive for long enough return True else: print(jstring) return False else: print(r.getcode()) return False except Exception as e: print("exception:", e)
def fetch(self): # get time since last download try: time_delta = time.time() - os.stat('/tmp/re_bplan.json').st_ctime except OSError: time_delta = 86400 # download data if older than 10 minutes if time_delta > 600: try: os.remove('/tmp/re_bplan.json') except OSError: pass # call org2org to fetch the bplan geojson from the FIZ-Broker cmd = 'ogr2ogr -s_srs EPSG:25833 -t_srs WGS84 -f geoJSON /tmp/re_bplan.json WFS:"http://fbinter.stadt-berlin.de/fb/wfs/geometry/senstadt/re_bplan?TYPENAMES=GML2" re_bplan' subprocess.call(cmd,shell=True); # open geojson geojson = json.load(open('/tmp/re_bplan.json','r')) n = 0 for feature in geojson["features"]: # prepare values dictionary place_values = {} # get identifier try: place_values['identifier'] = feature['properties']['spatial_alias'].replace(' ','') except AttributeError: continue # switch lat and lon in (multi) polygon and get center latMin,latMax,lonMin,lonMax = 90,-90,180,-180 if feature['geometry']['type'] == 'Polygon': for path in feature['geometry']['coordinates']: for point in path: point[0],point[1] = point[1],point[0] latMin = min(latMin,point[0]) latMax = max(latMax,point[0]) lonMin = min(lonMin,point[1]) lonMax = max(lonMax,point[1]) place_values['polygon'] = json.dumps([feature['geometry']['coordinates']]) else: for polygon in feature['geometry']['coordinates']: for path in polygon: for point in path: point[0],point[1] = point[1],point[0] latMin = min(latMin,point[0]) latMax = max(latMax,point[0]) lonMin = min(lonMin,point[1]) lonMax = max(lonMax,point[1]) place_values['polygon'] = json.dumps(feature['geometry']['coordinates']) # get lat and lon place_values['lat'] = str((latMax + latMin) * 0.5) place_values['lon'] = str((lonMax + lonMin) * 0.5) # get area description if feature['properties']['BEREICH']: place_values['description'] = feature['properties']['BEREICH'] else: place_values['description'] = '' # see if is marked active if feature['properties']['FESTSG']: if feature['properties']['FESTSG'].lower() == 'ja': place_values['active'] = False else: place_values['active'] = True else: place_values['active'] = False # update the place or create a new one place,created = Place.objects.update_or_create(identifier=place_values['identifier'],defaults=place_values) if created: n += 1 try: district = District.objects.get(name=feature['properties']['BEZIRK']) place.entities.add(district) place.save() except District.DoesNotExist: pass # get address from open street map url = "http://open.mapquestapi.com/nominatim/v1/reverse.php?format=json&lat=%s&lon=%s" % (place.lat,place.lon) response = urlopen(url).read() data = json.loads(response) if 'road' in data['address']: place.address = data['address']['road'] else: place.address = '' place.save() print(place, 'created') time.sleep(1) print(n,'places created')