def get_page(self): if self.ids.use_proxy.active: kwargs = { 'proxy_host': '147.215.1.189', 'proxy_port': 3128 } else: kwargs = {} req = UrlRequest('http://httpbin.org/image/png', **kwargs) req.wait() #print(type(req.result)) with open('img.png','wb') as f: f.write(req.result) self.ids.label_id.text = 'Image downloaded'
def get_exchange_stats(self): # get the exchange status self.PlungeApp.logger.debug("getting exchange stats") url = "http://%s:%s/exchanges" % (self.PlungeApp.config.get('server', 'host'), self.PlungeApp.config.get('server', 'port')) req = UrlRequest(url, self.save_exchange_stats, self.exchange_stats_error, self.exchange_stats_error) req.wait() if req.resp_status != 200: self.PlungeApp.logger.debug("not saving exchange stats") return # calculate the exchange totals # ask unique_ask_orders = [] for sample in self.stats[self.primary_exchange][self.primary_currency]['ask']['orders']: for order in sample: if round(order['amount'], 4) in unique_ask_orders: continue unique_ask_orders.append(round(order['amount'], 4)) self.primary_exchange_total_ask = 0 for amount in unique_ask_orders: self.primary_exchange_total_ask += amount # bid unique_bid_orders = [] for sample in self.stats[self.primary_exchange][self.primary_currency]['bid']['orders']: for order in sample: if round(order['amount'], 4) in unique_bid_orders: continue unique_bid_orders.append(round(order['amount'], 4)) self.primary_exchange_total_bid = 0 for amount in unique_bid_orders: self.primary_exchange_total_bid += amount self.PlungeApp.logger.debug("getting exchange stats finished")
def buttonClicked(self,btn): global phoneNumber #print(self.size,int(self.size[1] / 40)) temp = self.phoneNumber.text print(temp) otpScreen = makeScreen(OTPPage , "otpScreen" ) screens.add_widget(otpScreen) if(temp =="1234567890" ): screens.current = "otpScreen" print("test-value") return if( len(temp) == 10):#username not phone number params = urllib.urlencode({ "phone_number" : unicode(self.phoneNumber.text) } ) headers = {'Content-type':'application/x-www-form-urlencoded'} req = UrlRequest('https://secure-garden-80717.herokuapp.com/signup' , method = "POST" , req_headers = headers, req_body = params) #req = UrlRequest("https://inputtools.google.com/request" , on_success = success , on_error = error , on_failure = fail , req_body = data) #print( type( req ) ) #print(params ) req.wait() print(req.result) phoneNumber = self.phoneNumber.text print(phoneNumber) screens.current = "otpScreen" # flag=True else : self.popup = Popup(title='', content=Label(text='Incomplete Phone Number. \n Please Check Again'), size_hint=(None, None), size=(400, 400),auto_dismiss=True) #self.add_widget(self.popup) self.popup.open() return 0
def search_event(event, return_=False): """Returns the result of a search request An event is unique by its name, location and dates. """ url = _HOSTNAME + _INDEX + 'event/_search' method = "GET" d = models.to_dict(event) del d['id'] data = {"query": {"bool": {"must": [{"match_phrase": {k: v}} for k, v in d.iteritems()] } } } success = None if return_ else \ lambda req, res: update_or_create_event(event, res) req = UrlRequest(url=url, req_body=json.dumps(data), method=method, timeout=1, on_error=print_error, on_success=success) if return_: req.wait() # We need the result before continuing return req.result
def post_on_twitter(self, text): """ Postarea rezultatelor jocului in Twitter. :param text: Textul postarii :return: void """ req = UrlRequest('http://url.md/tw/?content=%s&pass=00113' % quote( text ) ) req.wait()
def userPressed(self, instance): params = urllib.urlencode({ 'userName':self.userName.text, 'passHash': self.password.text, 'creating':"true" }) headers = {'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain'} req = UrlRequest(serverURL+'/createUser', on_success=self.user_posted, req_body=params, req_headers=headers) req.wait()
def dllevel(self,instance): print instance.info print "making request" params = urllib.urlencode({'fullname': instance.info['filename']}) req = UrlRequest(serverURL+'/downloadLevel', on_success=self.got_level, timeout=1000, req_body=params ,on_error=self.on_error,on_failure=self.on_failure, on_redirect=self.on_redirect) req.levelname = instance.info['name'] print "made request" req.wait() print "wait over"
class DataGetter(object): def __init__ (self): pass def getInfo(self): self.req = UrlRequest('http://localhost/index.php', self.print_r) try: self.req.run() except Exception, e: print e else:
def make_request(self, text_input): headers = {'User-Agent': 'timokramer/repodigger'} req = UrlRequest( 'https://api.github.com/repos/' + text_input + '/issues', on_success=self.parse_request, on_failure=self.parse_failure, on_error=self.parse_error, req_headers=headers, debug=True ) req.wait() if req.is_finished: print("Request Finished")
def apiCheck(keyID, vCode): pilots = [] skills = {} baseUrl = config.serverConn.svrAddress + 'account/APIKeyInfo.xml.aspx?keyID=%s&vCode=%s' apiURL = baseUrl % (keyID, vCode) print(apiURL) # Console debug def api_process(self, result): XMLData = parseString(result) key = XMLData.getElementsByTagName('key') keyInfo = {'accessMask': key[0].getAttribute('accessMask'), 'type': key[0].getAttribute('type'), 'expires': key[0].getAttribute('expires')} dataNodes = XMLData.getElementsByTagName('row') for row in dataNodes: # TODO: Need to find out a way to not call this if the key has no access to this data. AccessMask? skills = skillCheck(keyID, vCode, row.getAttribute('characterID')) # Returned columns: characterID,characterName,corporationID,corporationName,allianceID,allianceName,factionID,factionName pilots.append([keyID, vCode, row.getAttribute('characterID'), row.getAttribute('characterName'), row.getAttribute('corporationID'), row.getAttribute('corporationName'), keyInfo['type'], keyInfo['expires'], skills]) print('Pilots at end of api_process: ' + str(pilots)) return pilots def api_fail(self, result): XMLData = parseString(result) error = XMLData.getElementsByTagName('error') errorInfo = {'number': error[0].getAttribute('code'), 'text': error[0].firstChild.nodeValue} status = '%s Returned Error:\n%s\n%s' % (config.serverConn.svrName, errorInfo['number'], errorInfo['text']) onError(status) def api_error(self, error): status = 'Error Connecting to %s:\n%s\nAt: %s' % (config.serverConn.svrName, str(error), config.serverTime) onError(status) req = UrlRequest(apiURL, on_success=api_process, on_error=api_error, on_failure=api_fail, req_headers=config.headers) req.wait() return pilots
def request_all_milestones(self): headers = {'User-Agent': 'timokramer/repodigger'} url = 'https://api.github.com/repos/' + self.get_repo_string() + '/milestones?state=all' req = UrlRequest( url, on_success=self.parse_milestones_request, on_failure=self.parse_failure, on_error=self.parse_error, req_headers=headers, debug=True ) req.wait() if req.is_finished: print("Request Finished")
def uploadCrash(crashstr): print "uploading crash to ", ui_elements.serverURL #req = UrlRequest('/listLevels', on_success=self.got_levels, timeout=1000) import urllib from kivy.network.urlrequest import UrlRequest params = urllib.urlencode({ 'version':__version__, "crashData":crashstr }) headers = {'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain'} req = UrlRequest(ui_elements.serverURL+'/uploadCrash', on_success=None, req_body=params, req_headers=headers) req.wait() print "crash uploaded"
def listLevels(self): headers = {'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain'} params = {"cursor":self.cursor, "limit":self.pagesize,"sortKey": self.sortSpinner.text} if self.reverseButton.state == 'down':params['reverse']=True print "requesting levels", serverURL+'/queryLevels',params params = urllib.urlencode(params) print self.reverseButton.state req = UrlRequest(serverURL+'/queryLevels', on_success=self.got_levels, req_headers=headers ,on_error=self.on_error,on_failure=self.on_failure, on_redirect=self.on_redirect,req_body=params) print "waiting" req.wait() print "waited"
def request_all_issues(self, rd, text_input): headers = {'User-Agent': 'timokramer/repodigger'} req = UrlRequest( 'https://api.github.com/repos/' + text_input + '/issues?state=all', on_success=self.parse_issues_request, on_failure=self.parse_failure, on_error=self.parse_error, req_headers=headers, debug=True ) req.wait() if req.is_finished: print("Request Finished") self.set_repo_string(text_input) rd.request_success()
def req(*args): r = UrlRequest(urlStation + args[1]) r.wait() jData = r.result stb = jData.get("stationboard") accuStr = [] for i in range(len(stb)): tName = stb[i].get("name") tDest = stb[i].get("to") tDepTime = stb[i].get("stop").get("departure") displayStr = tName + ", " displayStr += tDepTime[11:16] + ", " displayStr += tDest if i < 7: accuStr += "\n" + displayStr return accuStr
def uploadLevel(self): print "uploading level" lname = self.mtref.nameBox.text updata = self.mtref.gameref.serials.exportDict() #req = UrlRequest('/listLevels', on_success=self.got_levels, timeout=1000) import base64 params = urllib.urlencode({ 'author':self.userName.text, 'passHash': self.password.text, 'name':lname,"levelData":json.dumps(updata), "sshot":base64.b64encode(open(lname+".png", 'r').read()) }) headers = {'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain'} req = UrlRequest(serverURL+'/uploadLevel', on_success=self.level_posted, req_body=params, req_headers=headers) req.wait()
def getapidata(self): self.__success = False self.__errormsg = '' self.__failuremsg = '' data = UrlRequest('{0}/{1}/?format=json'.format(self.__url, self.view), on_success=self.__getdatos, on_failure=self.__getfailure, on_error=self.__geterror) data.wait() if not self.__success: if self.__errormsg != '': __title = 'Url Error Message' __message = self.__errormsg elif self.__failuremsg != '': __title = 'Url Failure Message' __message = self.__failuremsg raise AppMiscError(__title, __message) return self.__tmpresult
def get_brands(self): url = self.prepare_url(BRANDS_PATH) self.oReq = UrlRequest(url, req_headers=HEADERS,on_failure=self.OnError,on_error=self.OnError) self.NewWait(0.05) response_dict = self.oReq.result if response_dict: return [Brand(brand_dict) for brand_dict in response_dict] return []
def get_types(self, brandname): url = self.prepare_url(TYPES_PATH.format(brandname)) self.oReq = UrlRequest(url, req_headers=HEADERS,on_failure=self.OnError,on_error=self.OnError) self.NewWait(0.05) response_dict = self.oReq.result if response_dict: return [Type(type_dict) for type_dict in response_dict] return []
def get_codeset(self, setid): url = self.prepare_url(CODESET_PATH.format(setid), {'output':'direct'}) self.oReq = UrlRequest(url, req_headers=HEADERS,on_failure=self.OnError,on_error=self.OnError) self.NewWait(0.05) response_dict = self.oReq.result if response_dict: return [IRCode(code_dict) for code_dict in response_dict] return []
def get_functions(self, setid): url = self.prepare_url(FUNCTIONS_PATH.format(setid)) self.oReq = UrlRequest(url, req_headers=HEADERS,on_failure=self.OnError,on_error=self.OnError) self.NewWait(0.05) response_dict = self.oReq.result if response_dict: return [Function(function_dict) for function_dict in response_dict] return []
def get_code(self, setid, function): url = self.prepare_url(CODE_PATH.format(setid, function), {'output':'direct'}) self.oReq = UrlRequest(url, req_headers=HEADERS,on_failure=self.OnError,on_error=self.OnError) self.NewWait(0.05) response_dict = self.oReq.result if response_dict: return IRCode(response_dict) return ""
def get_pool_stats(self): # get the pool status self.PlungeApp.logger.debug("getting pool stats") url = 'http://%s:%s/status' % (self.PlungeApp.config.get('server', 'host'), self.PlungeApp.config.get('server', 'port')) req = UrlRequest(url, self.save_pool_stats, self.pool_stats_error, self.pool_stats_error) req.wait() if req.resp_status != 200: self.PlungeApp.logger.debug("not saving pool stats") return self.update_lists(self.pool['buy_liquidity'], self.pool_buy_liquidity) self.update_lists(self.pool['sell_liquidity'], self.pool_sell_liquidity) self.update_lists((self.pool['buy_liquidity'] + self.pool['sell_liquidity']), self.pool_total_liquidity) self.update_pool_stats() self.PlungeApp.logger.debug("getting pool stats finished.")
def send_settings(self, ip_address, participant_name, game_variant, condition): params = { 'participant_name': str(participant_name), 'game_variant': game_variant, # Evil, good, or normal hangman? 'condition': int(condition), # Social or neutral robot? 'participant_number': 47 } data = json.dumps(params) url = 'http://' + ip_address + ':1235/settings' headers = {'Content-type': 'application/json'} req = UrlRequest(url, req_headers=headers, req_body=data, method='PUT') req.wait()
def getapidatafiltered(self): self.__success = False self.__errormsg = "" self.__failuremsg = "" data = UrlRequest( "{0}/{1}&format=json".format(self.__url, self.view), on_success=self.__getdatos, on_failure=self.__getfailure, on_error=self.__geterror, ) data.wait() if not self.__success: if self.__errormsg != "": __title = "Url Error Message" __message = self.__errormsg elif self.__failuremsg != "": __title = "Url Failure Message" __message = self.__failuremsg raise AppMiscError(__title, __message) return self.__tmpresult
def get_models(self, brandname, typename): brandname=urllib.quote(brandname,safe="") typename=urllib.quote(typename,safe="") url = self.prepare_url(MODELS_PATH.format(brandname, typename)) self.oReq = UrlRequest(url, req_headers=HEADERS,on_failure=self.OnError,on_error=self.OnError) self.NewWait(0.05) response_dict = self.oReq.result if response_dict: return [Model(model_dict) for model_dict in response_dict] return []
def cypher(self, query): self.query = query self.params = json.dumps({ "statements" : [ { "statement" : ""+self.query+"", "resultDataContents" : [ "row", "graph" ] } ] }) self.req = UrlRequest(self.base_url + '/db/data/transaction/commit', on_success=self.post_query ,req_headers=headers, on_error=self.print_error, req_body = self.params , debug = True, method = 'POST') self.req.wait(delay=0.5)
def skillCheck(keyID, vCode, characterID): skills = {} baseUrl = config.serverConn.svrAddress + 'char/CharacterSheet.xml.aspx?keyID=%s&vCode=%s&characterID=%s' apiURL = baseUrl % (keyID, vCode, characterID) def skill_process(self, result): XMLData = parseString(result) skillDataNodes = XMLData.getElementsByTagName('row') for skillRow in skillDataNodes: skills[skillRow.getAttribute('typeID')] = skillRow.getAttribute('level') # print(skills) # Console debug def skill_error(self, error): status = 'This key may not have access to pilot skills.\nError Using %s:\n%s\nAt: %s' % (config.serverConn.svrName, str(error), config.serverTime) onError(status) req = UrlRequest(apiURL, on_success=skill_process, on_error=skill_error, req_headers=config.headers) req.wait() return skills
class KivyConnNeo(object): def __init__(self,base_url="http://127.0.0.1:7474"): self.base_url = base_url self.results = [] def post_query(self,req,result): #print json.dumps(result,indent=4, sort_keys=True) if result: for key, value in result.iteritems(): if key == 'errors': for s in value: print s['message'] elif key == 'results': for s in value: for t in s['data']: for e in t['row']: self.results.append(e) def print_error(self,req,error): print type(error) print error def cypher(self, query): self.query = query self.params = json.dumps({ "statements" : [ { "statement" : ""+self.query+"", "resultDataContents" : [ "row", "graph" ] } ] }) self.req = UrlRequest(self.base_url + '/db/data/transaction/commit', on_success=self.post_query ,req_headers=headers, on_error=self.print_error, req_body = self.params , debug = True, method = 'POST') self.req.wait(delay=0.5)
def login(self): url = BASE_URL + LOGIN_PATH SetVar('ITach_Mail',oDBSelector.oTxtMail.text) SetVar('ITach_Password',oDBSelector.oTxtPassword.text) Var_Save('ITach_Mail','') Var_Save('ITach_Password','') login_account = Account() login_account.Email = oDBSelector.oTxtMail.text login_account.Password = oDBSelector.oTxtPassword.text self.oReq = UrlRequest(url, req_body=login_account.ToData(), req_headers=HEADERS,on_failure=self.OnError,on_error=self.OnError) self.NewWait(0.05) aResponse = self.oReq.result if aResponse: self.oAccount = Account(aResponse['Account'])
def search_location(self): search_template = "http://api.openweathermap.org/data/2.5" + "find?q={}&type=like" search_url = search_template.format(self.search_input.text) request = UrlRequest(search_url, self.found_location) print(request) print('Request sent!')
def search_location(self): search_template = "https://samples.openweathermap.org/data/2.5/find?q={}&appid=b6907d289e10d714a6e88b30761fae22" # search_template = "https://api.openweathermap.org/data/2.5/find?q={}&typle=like&appid=xyz" # Replace 'xyz' with your API Key (APPID) search_url = search_template.format(self.search_input.text) request = UrlRequest(search_url, self.found_location)
def update_weather(self): weather_domain="http://api.openweathermap.org/data/2.5/weather?q={},{}&units=metric&APPID=f57ecb9a1d30725e5b652cad7e9c5a1e" weather_url = weather_domain.format(*self.location) request = UrlRequest(weather_url, self.weather_retrieved)
def id2location(pilotRowID, ids, pilotRows): locationNames = {0: 'Unanchored'} locationIDs = [] privateLocationIDs = [] conquerableIDs = [] numItems = list(range(len(ids))) # print(ids) # Console debug for x in numItems: if is32(ids[x]) is False: # 32 bit value: Only known to Pilot or Corp via API privateLocationIDs.append(ids[x]) elif 66000000 < ids[x] < 66014933: # Office in Station needs conversion officeID = ids[x] - 6000001 if officeID not in locationIDs: locationIDs.append(officeID) elif 66014934 < ids[x] < 67999999: # Office in Conquerable Station needs conversion officeID = ids[x] - 6000000 if officeID not in locationIDs: locationIDs.append(officeID) elif 60014861 < ids[x] < 60014928: # Conquerable Station if ids[x] not in conquerableIDs: conquerableIDs.append(ids[x]) elif 60000000 < ids[x] < 61000000: # Station if ids[x] not in locationIDs: locationIDs.append(ids[x]) elif 61000000 <= ids[x] < 66000000: # Conquerable Outpost if ids[x] not in conquerableIDs: conquerableIDs.append(ids[x]) elif ids[x] < 60000000: # locationID < 60000000 then the asset is somewhere in space if ids[x] not in locationIDs: locationIDs.append(ids[x]) else: # I am currently unsure how to translate this value, most likely an unexpected value. if ids[x] not in locationIDs: locationNames.update({int(ids[x]): str(ids[x])}) if locationIDs != []: # We still have some ids we don't know try: idList = ("', '".join(map(str, locationIDs[:]))) con = lite.connect(config.staticDB) with con: cur = con.cursor() statement = "SELECT itemID, itemName FROM invnames WHERE itemID IN ('" + idList + "')" cur.execute(statement) rows = cur.fetchall() # print((len(rows))) # Console debug for row in rows: # print(row) # Console debug locationNames.update({int(row[0]): str(row[1])}) except lite.Error as err: error = ('SQL Lite Error: ' + str(err.args[0]) + str(err.args[1:]) ) # Error String ids = idList.split("', '") numItems = range(len(ids)) for y in numItems: locationNames.update({int(ids[y]): str(ids[y])}) onError(error) finally: if con: con.close() if privateLocationIDs != []: # We have some Pilot or Corp locations we don't know if pilotRows[pilotRowID].keyType == 'Corporation': baseUrl = config.serverConn.svrAddress + 'corp/locations.xml.aspx?keyID=%s&vCode=%s&characterID=%s&IDs=%s' else: # Should be an account key baseUrl = config.serverConn.svrAddress + 'char/locations.xml.aspx?keyID=%s&vCode=%s&characterID=%s&IDs=%s' # Calculate the number of ids we have left. Server has hard maximum of 250 IDs per query. # So we'll need to split this into multiple queries. numIDs = len(privateLocationIDs) idList = [] if numIDs > 250: startID = 0 endID = 250 while startID < numIDs: idList.append(','.join( map(str, privateLocationIDs[startID:endID]))) startID = startID + 250 if ((numIDs - endID)) > 250: endID = endID + 250 else: endID = numIDs else: idList.append(','.join(map(str, privateLocationIDs[0:numIDs]))) numIdLists = list(range(len(idList))) for x in numIdLists: # Iterate over all of the id lists generated above. # Download the TypeName Data from API server apiURL = baseUrl % (pilotRows[pilotRowID].keyID, pilotRows[pilotRowID].vCode, pilotRows[pilotRowID].characterID, idList[x]) # print(apiURL) # Console debug def typeNames_process(self, result): XMLData = parseString(result) dataNodes = XMLData.getElementsByTagName('row') for row in dataNodes: locationNames.update({ int(row.getAttribute('itemID')): str(row.getAttribute('itemName')) }) def typeNames_error(self, error): status = 'Error Connecting to %s:\n%s\nAt: %s' % ( config.serverConn.svrName, str(error), config.serverTime) ids = idList[x].split(',') numItems = range(len(ids)) for y in numItems: locationNames.update({int(ids[y]): str(ids[y])}) onError(status) print(status) req = UrlRequest(apiURL, on_success=typeNames_process, on_error=typeNames_error, req_headers=config.headers) req.wait() if conquerableIDs != []: # We have some conquerableIDs we don't know idList = [] apiURL = config.serverConn.svrAddress + 'eve/ConquerableStationList.xml.aspx' def typeNames_process(self, result): XMLData = parseString(result) dataNodes = XMLData.getElementsByTagName('row') for row in dataNodes: if int(row.getAttribute('stationID')) in idList: locationNames.update({ int(row.getAttribute('stationID')): str(row.getAttribute('stationName')) }) def typeNames_error(self, error): status = 'Error Connecting to %s:\n%s\nAt: %s' % ( config.serverConn.svrName, str(error), config.serverTime) ids = idList[x].split(',') numItems = range(len(ids)) for y in numItems: locationNames.update({int(ids[y]): str(ids[y])}) onError(status) print(status) req = UrlRequest(apiURL, on_success=typeNames_process, on_error=typeNames_error, req_headers=config.headers) req.wait() return locationNames
class cThread_CheckIP(threading.Thread): oWaitLock = threading.Lock() def __init__(self, uIP, bOnlyOnce, fTimeOut, oCaller): threading.Thread.__init__(self) self.uIP = uIP self.bOnlyOnce = bOnlyOnce self.oCaller = oCaller self.fTimeOut = fTimeOut self.bStopWait = False self.oReq = None def run(self): bReturnNow = False if self.bOnlyOnce: cThread_CheckIP.oWaitLock.acquire() if len(self.oCaller.aResults) > 0: bReturnNow = True cThread_CheckIP.oWaitLock.release() if bReturnNow: return self.SendCommand() def SendCommand(self): self.bStopWait = False uUrlFull = "http://" + self.uIP + "/web/about" try: self.oReq = UrlRequest(uUrlFull, method="GET", timeout=self.fTimeOut, on_error=self.OnError, on_success=self.OnReceive) self.NewWait(0.05) if self.oReq.resp_status is not None: uResult = self.oReq.result if "<e2abouts>" in uResult: if PY2: oXmlRoot = fromstring( uResult.encode('ascii', 'xmlcharrefreplace')) else: oXmlRoot = fromstring(uResult) oXmlAbout = oXmlRoot.find("e2about") uModel = GetXMLTextValue(oXmlAbout, "e2model", False, "Enigma") uFoundHostName = "" try: uFoundHostName = socket.gethostbyaddr(self.uIP)[0] except Exception as e: # Logger.error("Cant get Hostname:"+oRet.sFoundIP+" "+str(e)) pass cThread_CheckIP.oWaitLock.acquire() self.oCaller.aResults.append({ "ip": self.uIP, "port": 80, "model": uModel, "ipversion": "IPv4", "hostname": uFoundHostName }) try: uIP = "" aIPs = socket.getaddrinfo(uFoundHostName, None) for tIP in aIPs: uIP = "[" + tIP[-1][0] + "]" if ":" in uIP: break if ":" in uIP: self.oCaller.aResults.append({ "ip": uIP, "port": 80, "model": uModel, "ipversion": "IPv6", "hostname": uFoundHostName }) except Exception as e: pass cThread_CheckIP.oWaitLock.release() except Exception as e: self.oCaller.ShowError("Error on send:", e) return def NewWait(self, delay): while self.oReq.resp_status is None: self.oReq._dispatch_result(delay) sleep(delay) if self.bStopWait: self.bStopWait = False break def OnError(self, request, error): self.bStopWait = True def OnFailure(self, request, result): self.bStopWait = True def OnReceive(self, oRequest, oResult): self.bStopWait = True
def patchLeave(self, JSON): # How to find which line you are in? # requests.delete(url=self.url[:-5] + JSON + ".json") UrlRequest(self.url[:-5] + JSON + ".json", method='DELETE')
def patchDestroy(self, JSON): # requests.delete(url=self.url[:-5] + JSON + ".json") UrlRequest(self.url[:-5] + JSON + ".json", method='DELETE')
def starbases_process(self, result): XMLData = parseString(result) starbaseNodes = XMLData.getElementsByTagName("row") cacheuntil = XMLData.getElementsByTagName( 'cachedUntil') cacheExpire = datetime.datetime(*(time.strptime(( cacheuntil[0].firstChild.nodeValue ), "%Y-%m-%d %H:%M:%S")[0:6])) config.starbaseCachedUntil = cacheExpire for row in starbaseNodes: itemIDs = [] locationIDs = [] if int(row.getAttribute( 'typeID')) not in itemIDs: itemIDs.append( int(row.getAttribute('typeID'))) baseUrl = 'https://api.eveonline.com/corp/StarbaseDetail.xml.aspx?keyID=%s&vCode=%s&itemID=%s' apiURL = baseUrl % (config.pilotRows[x].keyID, config.pilotRows[x].vCode, row.getAttribute('itemID')) # print(apiURL) # Console debug def starbase_detail( self, result ): # Try to connect to the API server XMLData = parseString(result) starbaseDetailNodes = XMLData.getElementsByTagName( "row") fuel = [] for entry in starbaseDetailNodes: if int(entry.getAttribute( 'typeID')) not in itemIDs: itemIDs.append( int( entry.getAttribute( 'typeID'))) fuel.append( int(entry.getAttribute('typeID'))) fuel.append( int(entry.getAttribute( 'quantity'))) if int(row.getAttribute( 'locationID')) not in locationIDs: locationIDs.append( int(row.getAttribute( 'locationID'))) if int(row.getAttribute( 'moonID')) not in locationIDs: locationIDs.append( int(row.getAttribute('moonID'))) itemNames = id2name('item', itemIDs) locationNames = id2location( x, locationIDs, config.pilotRows) tempStarbaseRows.append( Starbase( row.getAttribute('itemID'), int(row.getAttribute('typeID')), itemNames[int( row.getAttribute('typeID'))], locationNames[int( row.getAttribute( 'locationID'))], locationNames[int( row.getAttribute('moonID'))], int(row.getAttribute('state')), row.getAttribute('stateTimestamp'), row.getAttribute( 'onlineTimestamp'), fuel, row.getAttribute( 'standingOwnerID'))) # itemID,typeID,locationID,moonID,state,stateTimestamp,onlineTimestamp,standingOwnerID def server_error(self, error): status = 'Error Connecting to %s:\n%s\nAt: %s' % ( config.serverConn.svrName, str(error), config.serverTime) onError(status) print(status) target.state = ('Connecting to ' + config.serverConn.svrName) UrlRequest(apiURL, on_success=starbase_detail, on_error=server_error, req_headers=config.headers) target.state = ('Connecting to ' + config.serverConn.svrName) UrlRequest(apiURL, on_success=starbases_process, on_error=server_error, req_headers=config.headers)
def getJobs(target): """Event handler to fetch job data from server""" timingMsg = 'Using Local Cache' # Inform the user what we are doing. target.state = ('Connecting to ' + config.serverConn.svrName) if config.serverConn.svrStatus == 'Online': # Status has returned a value other than online, so why continue? if config.serverTime >= config.jobsCachedUntil: # Start the clock. t = time.clock() tempJobRows = [] if config.pilotRows != []: # Make sure we have keys in the config # keyID, vCode, characterID, characterName, corporationID, corporationName, keyType, keyExpires, skills, isActive numPilotRows = list(range(len(config.pilotRows))) for x in numPilotRows: # Iterate over all of the keys and character ids in config # Download the Account Industry Data keyOK = 1 # Set key check to OK test below changes if expired if config.pilotRows[x].keyExpires != 'Never': if config.pilotRows[x].keyExpires < config.serverTime: keyOK = 0 error = ('KeyID ' + config.pilotRows[x].keyID + ' has Expired') onError(error) if keyOK == 1: if config.pilotRows[x].keyType == 'Corporation': # baseUrl = 'corp/IndustryJobs.xml.aspx?keyID=%s&vCode=%s&characterID=%s' apiURL = config.serverConn.svrAddress + config.corpIndustry % ( config.pilotRows[x].keyID, config.pilotRows[x]. vCode, config.pilotRows[x].characterID) else: # Should be an account key # baseUrl = 'char/IndustryJobs.xml.aspx?keyID=%s&vCode=%s&characterID=%s' apiURL = config.serverConn.svrAddress + config.charIndustry % ( config.pilotRows[x].keyID, config.pilotRows[x]. vCode, config.pilotRows[x].characterID) # apiURL = config.serverConn.svrAddress + baseUrl % (config.pilotRows[x].keyID, config.pilotRows[x].vCode, config.pilotRows[x].characterID) print(apiURL) # Console debug def jobs_process(self, result): XMLData = parseString(result) dataNodes = XMLData.getElementsByTagName("row") cacheuntil = XMLData.getElementsByTagName( 'cachedUntil') cacheExpire = datetime.datetime(*(time.strptime(( cacheuntil[0].firstChild.nodeValue ), "%Y-%m-%d %H:%M:%S")[0:6])) config.jobsCachedUntil = cacheExpire # itemIDs = [] # obsolete # installerIDs = [] # obsolete locationIDs = [] for row in dataNodes: if row.getAttribute( 'status' ) != '101': # Ignore Delivered Jobs # if int(row.getAttribute('installedItemTypeID')) not in itemIDs: # itemIDs.append(int(row.getAttribute('installedItemTypeID'))) # if int(row.getAttribute('outputTypeID')) not in itemIDs: # itemIDs.append(int(row.getAttribute('outputTypeID'))) # if int(row.getAttribute('installerID')) not in installerIDs: # installerIDs.append(int(row.getAttribute('installerID'))) if int(row.getAttribute('outputLocationID') ) not in locationIDs: locationIDs.append( int( row.getAttribute( 'outputLocationID'))) # if int(row.getAttribute('installedInSolarSystemID')) not in locationIDs: # locationIDs.append(int(row.getAttribute('installedInSolarSystemID'))) # itemNames = id2name('item', itemIDs) # Depreciated # pilotNames = id2name('character', installerIDs) # Depreciated # locationNames = id2location(x, locationIDs, config.pilotRows) for row in dataNodes: if row.getAttribute( 'status' ) != '101': # Ignore Delivered Jobs tempJobRows.append( Job( row.getAttribute('jobID'), row.getAttribute('status'), int(row.getAttribute('activityID') ), # Leave as int for clauses # itemNames[int(row.getAttribute('installedItemTypeID'))], row.getAttribute( 'blueprintTypeName'), # int(row.getAttribute('installedItemProductivityLevel')), # int(row.getAttribute('installedItemMaterialLevel')), # locationNames[int(row.getAttribute('outputLocationID'))], int( row.getAttribute( 'outputLocationID')), # locationNames[int(row.getAttribute('installedInSolarSystemID'))], row.getAttribute('solarSystemName' ), # pilotNames[int(row.getAttribute('installerID'))], row.getAttribute('installerName'), int(row.getAttribute('runs')), # row.getAttribute('outputTypeID'), row.getAttribute('productTypeName' ), # row.getAttribute('installTime'), row.getAttribute('startDate'), # row.getAttribute('endProductionTime'), row.getAttribute('endDate'))) # Add job data to local cache. config.jobCache.put( row.getAttribute('jobID'), jobID=row.getAttribute('jobID'), status=row.getAttribute('status'), activityID=int( row.getAttribute('activityID') ), # Leave as int for clauses blueprintTypeName=row.getAttribute( 'blueprintTypeName'), outputLocationID=int( row.getAttribute( 'outputLocationID')), solarSystemName=row.getAttribute( 'solarSystemName'), installerName=row.getAttribute( 'installerName'), runs=int(row.getAttribute('runs')), productTypeName=row.getAttribute( 'productTypeName'), startDate=row.getAttribute( 'startDate'), endDate=row.getAttribute('endDate')) config.statusCache.put( 'jobs', cacheExpires=cacheuntil[0].firstChild. nodeValue) # Old API: # columns="assemblyLineID,containerID,installedItemLocationID,installedItemQuantity, # installedItemLicensedProductionRunsRemaining,outputLocationID,licensedProductionRuns, # installedInSolarSystemID,containerLocationID,materialMultiplier,charMaterialMultiplier, # timeMultiplier,charTimeMultiplier,containerTypeID,installedItemCopy,completed, # completedSuccessfully,installedItemFlag,outputFlag,completedStatus,beginProductionTime, # pauseProductionTime" # New API Output # columns="jobID,installerID,installerName,facilityID,solarSystemID,solarSystemName, # stationID,activityID,blueprintID,blueprintTypeID,blueprintTypeName,blueprintLocationID, # outputLocationID,runs,cost,teamID,licensedRuns,probability,productTypeID,productTypeName, # status,timeInSeconds,startDate,endDate,pauseDate,completedDate,completedCharacterID,successfulRuns" print(tempJobRows) def server_error(self, error): status = 'Error Connecting to %s:\n%s\nAt: %s' % ( config.serverConn.svrName, str(error), config.serverTime) onError(status) print(status) target.state = ('Connecting to ' + config.serverConn.svrName) UrlRequest(apiURL, on_success=jobs_process, on_error=server_error, req_headers=config.headers) if tempJobRows != []: config.jobRows = tempJobRows[:] # self.jobList.SetObjects(config.jobRows) timingMsg = '%0.2f ms' % (((time.clock() - t) * 1000)) target.state = str(timingMsg) print(timingMsg + '(Fetch Jobs)') else: onError('Please open Config to enter a valid API key') else: # Don't Contact server as cache timer hasn't expired # Iterate over the jobs and change their status if they should be ready. numItems = list(range(len(config.jobRows))) for r in numItems: if config.jobRows[r].endProductionTime > config.serverTime: config.jobRows[r].timeRemaining = config.jobRows[ r].endProductionTime - config.serverTime config.jobRows[r].state = 'In Progress' else: config.jobRows[r].timeRemaining = config.jobRows[ r].endProductionTime - config.serverTime config.jobRows[r].state = 'Ready' # self.jobList.RefreshObjects(config.jobRows) print('Not Contacting Server, Cache Not Expired') target.state = timingMsg else: # Server status is 'Offline' so skip everything send 'Using local cache' to status bar. target.state = timingMsg return ()
def apiCheck(keyID, vCode): pilots = [] skills = {} baseUrl = config.serverConn.svrAddress + 'account/APIKeyInfo.xml.aspx?keyID=%s&vCode=%s' apiURL = baseUrl % (keyID, vCode) print(apiURL) # Console debug def api_process(self, result): XMLData = parseString(result) key = XMLData.getElementsByTagName('key') keyInfo = { 'accessMask': key[0].getAttribute('accessMask'), 'type': key[0].getAttribute('type'), 'expires': key[0].getAttribute('expires') } dataNodes = XMLData.getElementsByTagName('row') for row in dataNodes: # TODO: Need to find out a way to not call this if the key has no access to this data. AccessMask? skills = skillCheck(keyID, vCode, row.getAttribute('characterID')) # Returned columns: characterID,characterName,corporationID,corporationName,allianceID,allianceName,factionID,factionName pilots.append([ keyID, vCode, row.getAttribute('characterID'), row.getAttribute('characterName'), row.getAttribute('corporationID'), row.getAttribute('corporationName'), keyInfo['type'], keyInfo['expires'], skills ]) print('Pilots at end of api_process: ' + str(pilots)) return pilots def api_fail(self, result): XMLData = parseString(result) error = XMLData.getElementsByTagName('error') errorInfo = { 'number': error[0].getAttribute('code'), 'text': error[0].firstChild.nodeValue } status = '%s Returned Error:\n%s\n%s' % ( config.serverConn.svrName, errorInfo['number'], errorInfo['text']) onError(status) def api_error(self, error): status = 'Error Connecting to %s:\n%s\nAt: %s' % ( config.serverConn.svrName, str(error), config.serverTime) onError(status) req = UrlRequest(apiURL, on_success=api_process, on_error=api_error, on_failure=api_fail, req_headers=config.headers) req.wait() return pilots
def search(self, text): url = _search % (text, _api) req = UrlRequest(url, self.got_search)
def GetYoutubeRTSP(uID): """ unused stuff """ video_id = "_iuukyjCz74" gdata = "http://gdata.youtube.com/feeds/api/videos/" oWeb = UrlRequest(gdata + video_id, on_success=YTOnSuccess) return oWeb
def search_location(self): search_template = "http://api.openweathermap.org/data/2.5/find?q={}" \ "&type=like&units=metric&appid=45672fd26353514b90d66ff9f9766757" search_url = search_template.format(self.search_input.text) request = UrlRequest(search_url, self.found_location)
def get_sort(req, result): self.sort_list = json.loads(result) UrlRequest( server_url + "/id/?id={}".format(int(config_dict["ID"])), get_my_sort)
def run_UrlRequests(self, *args): trader_time = data['fastestWait'] self.r = UrlRequest( "https://ethgasstation.info/api/ethgasAPI.json?api-key={defipulse_credentials.defipulseApikey}", req_body=trader_time, on_success=partial(self.up_label))
def getServerStatus(cacheExpire, serverTime, target): print(target) # Only query the server if the cache time has expired. if serverTime >= cacheExpire: # Download the Server Status Data from API server apiURL = config.serverConn.svrAddress + 'server/ServerStatus.xml.aspx/' print(apiURL) # Start the clock! t = time.clock() def server_status(self, result): XMLData = parseString(result) currentTime = XMLData.getElementsByTagName('currentTime') result = XMLData.getElementsByTagName('result') serverOpen = result[0].getElementsByTagName('serverOpen') onlinePlayers = result[0].getElementsByTagName('onlinePlayers') cacheUntil = XMLData.getElementsByTagName('cachedUntil') # The current time as reported by the server at time of query. serCurrentTime = datetime.datetime( *(time.strptime((currentTime[0].firstChild.nodeValue ), '%Y-%m-%d %H:%M:%S')[0:6])) # Use the server reported UTC time to check the clock of our device. checkClockDrift(serCurrentTime) # This is returned as 'True' for open from the api server. if (serverOpen[0].firstChild.nodeValue): config.serverConn.svrStatus = 'Online' else: config.serverConn.svrStatus = 'Down' config.serverConn.svrPlayers = ( onlinePlayers[0].firstChild.nodeValue) config.serverConn.svrCacheExpire = datetime.datetime( *(time.strptime((cacheUntil[0].firstChild.nodeValue ), '%Y-%m-%d %H:%M:%S')[0:6])) # Stop the clock for this update. config.serverConn.svrPing = '%0.2f ms' % (( (time.clock() - t) * 1000)) # Send the data to the gui elements of status_bar target.server = str( '%s %s' % (config.serverConn.svrName, config.serverConn.svrStatus)) target.players = str(config.serverConn.svrPlayers) target.serverTime = str(config.serverTime) target.jobsCachedUntil = str(config.serverConn.svrCacheExpire) target.state = str(config.serverConn.svrPing) # Update the statusCache JSON file. config.statusCache.put( 'server', name=config.serverConn.svrName, status=config.serverConn.svrStatus, players=config.serverConn.svrPlayers, cacheExpires=(cacheUntil[0].firstChild.nodeValue), ping=config.serverConn.svrPing) print(config.serverConn.svrPing + '(Server Status)') def server_error(self, error): status = 'Error Connecting to %s:\n%s\nAt: %s' % ( config.serverConn.svrName, str(error), config.serverTime) onError(status) print(status) target.state = ('Connecting to ' + config.serverConn.svrName) UrlRequest(apiURL, on_success=server_status, on_error=server_error, req_headers=config.headers)
class Trivia: """Trivia class""" def __init__(self, use_sample_data=False): self.quiz_data = None self.req = None self.score = 0 self.round = 0 self.running = False self.use_sample_data = use_sample_data def new_game(self, api_url, difficulty, category, amount, q_type, wait=False): self.score = 0 self.round = 0 self.fetch_new(api_url, difficulty, category, amount, q_type, wait=wait) def get_current_round(self): return self.round + 1 def get_total_rounds(self): return len(self.quiz_data) if self.quiz_data else 0 def get_current_question(self): return self.quiz_data[self.round] if ( self.running and self.round < len(self.quiz_data)) else False def register_answer(self, result): if self.running: if result: self.score += 1 print("CURRENT SCORE: {}".format(self.score)) self.round += 1 if self.round >= len(self.quiz_data): self.running = False def check_game(self): return self.running def fetch_new(self, api_url, difficulty, category, amount, q_type, wait=False): if self.use_sample_data: import json with open('./resources/sample_quiz_data.json') as f: data = json.load(f) self.fetch_success(None, data) else: base_url = api_url + '?' if difficulty is not '': base_url += 'difficulty=' + str(difficulty) + '&' if category is not 0: base_url += 'category=' + str(category) + '&' if q_type is not '': base_url += 'type=' + str(q_type) + '&' base_url += 'amount=' + str(amount) self.req = UrlRequest(base_url, on_success=self.fetch_success, on_failure=self.fetch_fail, on_error=self.fetch_error) if wait: self.req.wait() def fetch_success(self, request, result): self.quiz_data = self.html_decode(result['results']) self.running = True try: print(self.quiz_data) print() except: # Probably a unicode error. print("Could not print quiz data.") def fetch_fail(self, request, result): print("Failure fetching quiz data: {}".format(result)) def fetch_error(self, request, error): print("Error fetching quiz data: {}".format(error)) def html_decode(self, quiz_obj): """ URL decodes or unencodes the quiz data recursively. Credits to https://nvie.com/posts/modifying-deeply-nested-structures/ """ if isinstance(quiz_obj, dict): return {k: self.html_decode(v) for k, v in quiz_obj.items()} elif isinstance(quiz_obj, list): return [self.html_decode(elem) for elem in quiz_obj] else: return unescape(quiz_obj)
def get_game_status(self, dt): headers = {'Content-type': 'application/json'} req = UrlRequest('http://195.169.210.194:1234/1', on_success=self.update_game_status, req_headers=headers)
def patchCreate(self, JSON): toDatabase = json.loads('{"' + JSON + '": {"id": "null"}}') # requests.patch(url=self.url, json=toDatabase) UrlRequest(self.url, req_body=toDatabase, method='PATCH')
def search_location(self): print("starting search...") search_domain="http://api.openweathermap.org/data/2.5/find?q={}&type=like&APPID=f57ecb9a1d30725e5b652cad7e9c5a1e" search_url = search_domain.format(self.search_input.text) print("search url: {}".format(search_url)) request = UrlRequest(search_url, self.found_location)
def get(self): # request = requests.get(self.url + '?auth=' + self.authKey) request = UrlRequest(self.url + '?auth=' + self.authKey, method='GET') print(json.loads(request))
def SendCommand(self): self.bStopWait = False uUrlFull = "http://" + self.uIP + "/web/about" try: self.oReq = UrlRequest(uUrlFull, method="GET", timeout=self.fTimeOut, on_error=self.OnError, on_success=self.OnReceive) self.NewWait(0.05) if self.oReq.resp_status is not None: uResult = self.oReq.result if "<e2abouts>" in uResult: if PY2: oXmlRoot = fromstring( uResult.encode('ascii', 'xmlcharrefreplace')) else: oXmlRoot = fromstring(uResult) oXmlAbout = oXmlRoot.find("e2about") uModel = GetXMLTextValue(oXmlAbout, "e2model", False, "Enigma") uFoundHostName = "" try: uFoundHostName = socket.gethostbyaddr(self.uIP)[0] except Exception as e: # Logger.error("Cant get Hostname:"+oRet.sFoundIP+" "+str(e)) pass cThread_CheckIP.oWaitLock.acquire() self.oCaller.aResults.append({ "ip": self.uIP, "port": 80, "model": uModel, "ipversion": "IPv4", "hostname": uFoundHostName }) try: uIP = "" aIPs = socket.getaddrinfo(uFoundHostName, None) for tIP in aIPs: uIP = "[" + tIP[-1][0] + "]" if ":" in uIP: break if ":" in uIP: self.oCaller.aResults.append({ "ip": uIP, "port": 80, "model": uModel, "ipversion": "IPv6", "hostname": uFoundHostName }) except Exception as e: pass cThread_CheckIP.oWaitLock.release() except Exception as e: self.oCaller.ShowError("Error on send:", e) return
def statcor(self): url = 'https://api.kawalcorona.com/indonesia/' self.request = UrlRequest(url, on_success=self.sucescor, verify=True) user = self.help_str.get_screen('stat').ids.positif.text
def id2name(idType, ids): typeNames = {} if idType == 'item': # We'll use the local static DB for items as they don't change. if ids != []: # We have some ids we don't know. try: idList = ("', '".join(map(str, ids[:]))) con = lite.connect(config.staticDB) with con: cur = con.cursor() statement = "SELECT typeID, typeName FROM invtypes WHERE typeID IN ('" + idList + "')" cur.execute(statement) rows = cur.fetchall() # Use the item strings returned to populate the typeNames dictionary. for row in rows: typeNames.update({int(row[0]): str(row[1])}) ids.remove(row[0]) if ids != []: # We have some ids we don't know. numItems = range(len(ids)) for y in numItems: typeNames.update({int(ids[y]): str(ids[y])}) error = ('ids not found in database: ' + str(ids) ) # Error String onError(error) except lite.Error as err: error = ('SQL Lite Error: ' + str(err.args[0]) + str(err.args[1:])) # Error String ids = idList.split("', '") numItems = range(len(ids)) for y in numItems: typeNames.update({int(ids[y]): str(ids[y])}) onError(error) finally: if con: con.close() elif idType == 'character': # TODO: Check if Depreciated. # We'll have to talk to the API server for Pilot names as this can't be in the static dump. # cacheFile = config.characterCache cacheFile = '../character.cache' # TODO: Change to JSON if not depreciated. (See above TODO) if (os.path.isfile('cacheFile')): typeFile = open(cacheFile, 'r') typeNames = pickle.load(typeFile) typeFile.close() numItems = list(range(len(ids))) # print(ids) # Console debug for x in numItems: if ids[x] in typeNames: ids[x] = 'deleted' for y in ids[:]: if y == 'deleted': ids.remove(y) # print(ids) # Console debug if ids != []: # We still have some ids we don't know baseUrl = config.serverConn.svrAddress + 'eve/CharacterName.xml.aspx?ids=%s' key = 'characterID' value = 'name' # Calculate the number of ids we have left. Server has hard maximum of 250 IDs per query. # So we'll need to split this into multiple queries. numIDs = len(ids) idList = [] if numIDs > 250: startID = 0 endID = 250 while startID < numIDs: idList.append(','.join(map(str, ids[startID:endID]))) startID = startID + 250 if ((numIDs - endID)) > 250: endID = endID + 250 else: endID = numIDs else: idList.append(','.join(map(str, ids[0:numIDs]))) numIdLists = list(range(len(idList))) for x in numIdLists: # Iterate over all of the id lists generated above. # Download the CharacterName Data from API server apiURL = baseUrl % (idList[x]) # print(apiURL) # Console debug def characterNames_process(self, result): XMLData = parseString(result) dataNodes = XMLData.getElementsByTagName('row') for row in dataNodes: typeNames.update({ int(row.getAttribute(key)): str(row.getAttribute(value)) }) # config.characterCache.put(int(row.getAttribute(key)), name=str(row.getAttribute(value))) # Save the data we have so we don't have to fetch it typeFile = open(cacheFile, 'w') pickle.dump(typeNames, typeFile) typeFile.close() def characterNames_error(self, error): status = 'Error Connecting to %s:\n%s\nAt: %s' % ( config.serverConn.svrName, str(error), config.serverTime) ids = idList[x].split(',') numItems = range(len(ids)) for y in numItems: typeNames.update({int(ids[y]): str(ids[y])}) onError(status) print(status) req = UrlRequest(apiURL, on_success=characterNames_process, on_error=characterNames_error, req_headers=config.headers) req.wait() return typeNames
from kivy.network.urlrequest import UrlRequest req = UrlRequest("http://*****:*****@gmail.com", "country": "Bangladesh", "phone": "8801727309106",
def search_location(self): search_template = "http://api.openweathermap.org/data/2.5/"\ +"find?q={}&type=like&APPID={}" search_url = search_template.format(self.search_input.text, APPID) request = UrlRequest(search_url, self.found_location)
def geocode_get_lat_lon(self, address): with open('gps_apikey.txt', 'r') as f: gps_apikey_id = f.read() address = parse.quote(address) url = "https://geocoder.ls.hereapi.com/6.2/geocode.json?apiKey=" + gps_apikey_id +"&searchtext=" + address UrlRequest(url, on_success=self.success, on_failure=self.failure, on_error=self.error, ca_file=certifi.where())
def search(self, input): try: if input != '': input.replace(" ", "") urli = endpoint.request_account_info(input) req = UrlRequest(urli, ca_file=certifi.where(), verify=False) req.wait() #req2=UrlRequest('http://localhost:5000/s/'+input) #req2.wait() #print(req2.result) data = req.result global source global profile global Followers global Followings global Posts global ID global account global real global Post global private global bio global ver account = input profile = input + '.jpg' source = self.profile self.profile = input + '.jpg' source = data['graphql']['user']['profile_pic_url'] Followers = str( data['graphql']['user']['edge_followed_by']['count']) Followings = str( data['graphql']['user']['edge_follow']['count']) Posts = str(data['graphql']['user'] ['edge_owner_to_timeline_media']['count']) bio = data['graphql']['user']['biography'] ID = str(data['graphql']['user']['id']) real = str(data['graphql']['user']['full_name']) ver = data['graphql']['user']['is_verified'] private = data['graphql']['user']['is_private'] p = len(data['graphql']['user']['edge_owner_to_timeline_media'] ['edges']) Post.clear() for i in range(p): url = data['graphql']['user'][ 'edge_owner_to_timeline_media']['edges'][i]['node'][ 'thumbnail_resources'][2]['src'] comments = data['graphql']['user'][ 'edge_owner_to_timeline_media']['edges'][i]['node'][ 'edge_media_to_comment']['count'] likes = data['graphql']['user'][ 'edge_owner_to_timeline_media']['edges'][i]['node'][ 'edge_liked_by']['count'] t = data['graphql']['user'][ 'edge_owner_to_timeline_media']['edges'][i]['node'][ 'is_video'] time = data['graphql']['user'][ 'edge_owner_to_timeline_media']['edges'][i]['node'][ 'taken_at_timestamp'] date = datetime.fromtimestamp(time) shortcode = data['graphql']['user'][ 'edge_owner_to_timeline_media']['edges'][i]['node'][ 'shortcode'] location = data['graphql']['user'][ 'edge_owner_to_timeline_media']['edges'][i]['node'][ 'location'] if (location != None): location = location['name'] h = data['graphql']['user'][ 'edge_owner_to_timeline_media']['edges'][i]['node'][ 'edge_media_to_caption']['edges'] hashtags = [] if (len(h) > 0): txt = data['graphql']['user'][ 'edge_owner_to_timeline_media']['edges'][i][ 'node']['edge_media_to_caption']['edges'][0][ 'node']['text'] hashtags = txt.split('#') if ((len(txt) > 0 and txt[0] != '#') or txt == ''): hashtags.pop(0) type = 'Photo' count = 0 if (t): type = 'Video' count = data['graphql']['user'][ 'edge_owner_to_timeline_media']['edges'][i][ 'node']['video_view_count'] Post.append({ "url": url, "comments": comments, "likes": likes, "type": type, "date": str(date), "hashtags": hashtags, "shortcode": shortcode, "location": location, "view": count }) self.root.current = 'result' elif (input == ''): dialog = MDDialog(title="Alert", size_hint=(0.8, 0.3), text_button_ok="Ok", text='insert a profile name') dialog.open() except: dialog = MDDialog( title="Error", size_hint=(0.8, 0.3), text_button_ok="Ok", text="There's no profile corresponding to this name ") dialog.open()
def __init__(self, url): self.subtitles = [] req = UrlRequest(url, self.got_subtitles)
def on_enter(self, *args): req = UrlRequest(source, file_path=profile, ca_file=certifi.where(), verify=False) req.wait() self.ids.user.text = account self.ids.posts.text = '[b]Posts[/b] \n' + Posts self.ids.followers.text = '[b]Followers[/b] \n' + Followers self.ids.followings.text = '[b]Followings[/b] \n' + Followings self.ids.idp.text = '[b]Profile ID[/b] \n' + ID self.ids.real.text = '[b]Name[/b] \n' + real self.ids.ver.text = '[b]Verified[/b] \n' + str(ver) if (private == True): self.ids.type.text = '[b]Account Type[/b] \n Private' else: self.ids.type.text = '[b]Account Type[/b] \n Public' self.ids.imp.pop = profile self.ids.ppp.clear_widgets() st = statis.comp('comments', Post) n1 = st[1] maxim2 = st[2] b = Barplot(min_height=st[0], max_height=st[2], avg_height=st[1], norm=st[3]) self.ids.ppp.add_widget(b) st = statis.comp('likes', Post) n2 = st[1] maxim1 = st[2] b = Barplot(min_height=st[0], max_height=st[2], avg_height=st[1], norm=st[3], tit='likes') self.ids.ppp.add_widget(b) b = BoxLayout(orientation='vertical') er = ((n1 + n2) / int(Followers)) * 100 e = Enbox() er = round(er, 2) e.tt = str(er) + "%" ind = statis.mostliked(Post, maxim1) (h1, h2) = statis.hash(Post) norm = 1 if (len(h2) > 0): norm = max(h2) + 0.5 h = Hashplot() for i in range(len(h1)): h.ids.lab.add_widget(Label(text=str(h2[i]), halign='right')) h3 = Hashbar() h3.t = h1[i] h3.size_hint_x = 0.4 + (0.6 * (h2[i] / norm)) h.ids.labval.add_widget(h3) self.ids.ppp.add_widget(h) if (len(Post) > 0): m = MostBox() m.ss = Post[ind]['url'] m.likes = Post[ind]['likes'] for i in range(len(Post[ind]['hashtags'])): m.ids.list.text = m.ids.list.text + '\n #' + Post[ind][ 'hashtags'][i] self.ids.ppp.add_widget(m) self.ids.ppp.add_widget(e) b.add_widget( Label(text='The Statistics are Based on \nthe Posts Below', font_size='20sp', underline=True, valign="bottom", halign="center")) self.ids.ppp.add_widget(b) for i in range(len(Post)): box = BoxLayout(orientation='vertical') box.add_widget( AsyncImage(source=Post[i]['url'], allow_stretch=True, size_hint=(1, 1), pos_hint={"left": 1})) if Post[i]['type'] == 'Video': box.add_widget( Label(text="comments: " + str(Post[i]['comments']) + ' | ' + "likes: " + str(Post[i]['likes']) + "\n" + Post[i]['type'] + " | Views : " + str(Post[i]['view']) + "\n location: " + str(Post[i]['location']) + "\n" + Post[i]['date'], size_hint=(1, .25), halign='center', valign="top")) else: box.add_widget( Label(text="comments: " + str(Post[i]['comments']) + ' | ' + "likes: " + str(Post[i]['likes']) + "\n" + Post[i]['type'] + "\n location: " + str(Post[i]['location']) + "\n" + Post[i]['date'], size_hint=(1, .25), halign='center', valign="top")) b = DownloadButton() b.link = Post[i]['url'] b.path = '/sdcard/download/' + str(Post[i]['shortcode']) + '.jpg' box.add_widget(b) self.ids.ppp.add_widget(box)