def send_wechat(content): is_send_successful = False try: # 发送消息 corpid = "wxc05a1c692a154ab3" corpsecret = "qGpLLVmeAETWy9QRIqgpQ1-yAaLBQyXgwL3-MJJWpMY" qs_token = Token(corpid=corpid, corpsecret=corpsecret).get_token() url = "https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=" + qs_token header = {'Content-Type': 'application/x-www-form-urlencoded', 'charset': 'utf-8'} payload = { "touser": "******", "msgtype": "text", "agentid": "2", "text": { "content": content }, "safe": "0" } json_data = json.dumps(payload, ensure_ascii=False).encode('utf-8') request = urllib.request.Request(url, data=json_data, headers=header) request = urllib.request.urlopen(request) page_html = request.read().decode('utf-8') print(page_html) request.close() is_send_successful = True except Exception as e: print(e) finally: return is_send_successful
def link_resolves(url): try: request = urllib.request.urlopen(url) request.close() return True except: return False
def get_html(url): tryes = 5 # Build our request req = urllib.request.Request(url) # Accept gziped content req.add_header('Accepting-encoding', 'gzip') # Fake user aggent req.add_header('User-Agent', USER_AGENT) while tryes > 0: try: request = urllib.request.urlopen(req) break except socket.timeout: tryes -= 1 except urllib.error.URLError as error: if isinstance(error.reason, socket.timeout): tryes -= 1 else: print("URL error: " + error.reason) quit() if request.info().get('Content-Encoding') == 'gzip': buffer = BytesIO(request.read()) uncompressed_buffer = gzip.GzipFile(fileobj=buffer) html = BeautifulSoup(uncompressed_buffer.read(), 'lxml') else: html = BeautifulSoup(request.read(), "lxml") request.close() return html
def get(self, data=None): logger.debug("GET %s", self.url) req = urllib.request.Request(url=self.url, data=data, headers=self.headers) try: if self.opener: request = self.opener.open(req, timeout=self.timeout) else: request = urllib.request.urlopen(req, timeout=self.timeout) except (urllib.error.HTTPError, CertificateError) as error: if error.code == 401: raise UnauthorizedAccess("Access to %s denied" % self.url) else: raise HTTPError("Request to %s failed: %s" % (self.url, error)) except (socket.timeout, urllib.error.URLError) as error: raise HTTPError("Unable to connect to server %s: %s" % (self.url, error)) if request.getcode() > 200: logger.debug("Server responded with status code %s", request.getcode()) try: self.total_size = int(request.info().get("Content-Length").strip()) except AttributeError: logger.warning("Failed to read response's content length") self.total_size = 0 self.response_headers = request.getheaders() self.status_code = request.getcode() if self.status_code > 299: logger.warning("Request responded with code %s", self.status_code) self.content = b"".join(self._iter_chunks(request)) self.info = request.info() request.close() return self
def solution4() -> None: with urllib.request.urlopen("https://baidu.com") as request: response = request.read().decode('utf-8') request.close() regex = r"<img\s+[^>]*?src=['\"]?([^\"'\s]+)['\"]?[^>]*>" [print(item) for item in re.findall(regex, response)]
def test_httpretty_should_allow_adding_and_overwritting_by_kwargs_u2(now): "HTTPretty should allow adding and overwritting headers by " \ "keyword args with urllib2" body = "this is supposed to be the response, indeed" HTTPretty.register_uri(HTTPretty.GET, "http://github.com/", body=body, server='Apache', content_length=len(body), content_type='application/json') request = urlopen('http://github.com') headers = dict(request.headers) request.close() expect(request.code).to.equal(200) expect(headers).to.equal({ 'content-type': 'application/json', 'connection': 'close', 'content-length': str(len(body)), 'status': '200', 'server': 'Apache', 'date': now.strftime('%a, %d %b %Y %H:%M:%S GMT'), })
def test_httpretty_should_allow_adding_and_overwritting_urllib2(now): "HTTPretty should allow adding and overwritting headers with urllib2" HTTPretty.register_uri(HTTPretty.GET, "http://github.com/", body="this is supposed to be the response", adding_headers={ 'Server': 'Apache', 'Content-Length': '27', 'Content-Type': 'application/json', }) request = urlopen('http://github.com') headers = dict(request.headers) request.close() expect(request.code).to.equal(200) expect(headers).to.equal({ 'content-type': 'application/json', 'connection': 'close', 'content-length': '27', 'status': '200', 'server': 'Apache', 'date': now.strftime('%a, %d %b %Y %H:%M:%S GMT'), })
def get_hostid_with_hostip(self, hostip): data = json.dumps({ "jsonrpc": "2.0", "method": "hostinterface.get", "params": { "output": "extend", "filter": { "ip": hostip } }, "auth": self.user_login(), "id": 1 }) try: request = requests.post(url=self.url, headers=self.header, data=data) except URLError as e: if hasattr(e, 'reason'): print('We failed to reach a server.') print('Reason: ', e.reason) elif hasattr(e, 'code'): print('The server could not fulfill the request.') print('Error code: ', e.code) except: print("except") else: response = json.loads(request.text) request.close() if not len(response['result']): print("\033[041m hostid \033[0m is not exist") return False for hostid in response['result']: return hostid['hostid']
def get_tx_info(txhash): socket.setdefaulttimeout(3) print("connect to the web") request = urllib.request.urlopen('http://www.qukuai.com/search/zh-CN/BTC/' + txhash) html = request.read() request.close() tree = lxml.html.fromstring(html) time = tree.cssselect('span.desc_item')[0].text timestamps = date_to_timestamp(time) # value = tree.cssselect('span.info_text')[3].text # value = value.rstrip(' ').strip('\n').lstrip(' ') ul = tree.cssselect('ul') tx_inputs = [] tx_outputs = [] inp = ul[0].cssselect('span.trade_address') inp_v = ul[0].cssselect('span.trdde_num') out = ul[1].cssselect('span.trade_address') out_v = ul[1].cssselect('span.trdde_num') for i in range(len(inp)): tx_inputs.append([inp[i].text, float(inp_v[i].text)]) for i in range(len(out)): tx_outputs.append([out[i].text, float(out_v[i].text)]) inp_out_val_time = mimo_bitcoin(tx_inputs, tx_outputs, timestamps) return inp_out_val_time
def read_weather(): """ Reads the current weather state, if enabled, and stores it. """ # Only when explicitly enabled in settings. weather_settings = WeatherSettings.get_solo() if not weather_settings.track: return # Fetch XML from API. request = urllib.request.urlopen(BUIENRADAR_API_URL) response_bytes = request.read() request.close() response_string = response_bytes.decode("utf8") # Use simplified xPath engine to extract current temperature. root = ET.fromstring(response_string) xpath = BUIENRADAR_XPATH.format( weather_station_id=weather_settings.buienradar_station ) temperature_element = root.find(xpath) temperature = temperature_element.text # Gas readings trigger these readings, so the 'read at' timestamp should be somewhat in sync. # Therefor we align temperature readings with them, having them grouped by hour that is.. read_at = timezone.now().replace(minute=0, second=0, microsecond=0) TemperatureReading.objects.create(read_at=read_at, degrees_celcius=temperature)
def download_dep(url, filename): print("- Downloading " + filename + " from: " + url) request = urllib.request.urlopen(url) data = request.read() request.close() with open(DEPS_TEMP_DIR + '/' + filename, "wb") as f: f.write(data)
def get_item(self, triggerid): ret = False #hostid = self.get_hostid_with_hostip(hostip) #print(hostid) #if not hostid: # return ret data = json.dumps({ "jsonrpc": "2.0", "method": "item.get", "params": { "output": "extend", "triggerids": triggerid, "sortfield": "name", "search": { "key_": "icmppingsec" } }, "auth": self.user_login(), "id": 1 }) try: request = requests.post(url=self.url, headers=self.header, data=data) #print('123') except URLError as e: print("Error as ", e) response = {} except: print("Except") response = {} else: response = json.loads(request.text) request.close() return response
def host_enable(self, hostip): ret = False hostid = self.get_hostid_with_hostip(hostip) if not hostid: return ret data = json.dumps({ "jsonrpc": "2.0", "method": "host.update", "params": { "hostid": hostid, "status": 0 }, "auth": self.user_login(), "id": 1 }) try: request = requests.post(url=self.url, headers=self.header, data=data) except URLError as e: print("Error as ", e) except: print("Except") else: request.close() ret = True return ret
def get_template(self, template_name): ret = False data = json.dumps({ "jsonrpc": "2.0", "method": "template.get", "params": { "output": "extend", "filter": { "host": [template_name] } }, "auth": self.user_login(), "id": 1 }) try: request = requests.post(url=self.url, headers=self.header, data=data) except URLError as e: print("Error as ", e) response = {} except: print("Except") response = {} else: response = json.loads(request.text) request.close() return response
def get_problem(self, eventid): ret = False data = json.dumps({ "jsonrpc": "2.0", "method": "problem.get", "params": { "output": ['eventid', 'clock', 'r_clock', 'name'], #"selectAcknowledges": "extend", #"selectTags": "extend", "eventids": eventid, "recent": "true", "sortfield": ["eventid"], "sortorder": "DESC", #"time_from": start, #"time_till": stop }, "auth": self.user_login(), "id": 1 }) try: request = requests.post(url=self.url, headers=self.header, data=data) #print('123') except URLError as e: print("Error as ", e) response = {} except: print("Except") response = {} else: response = json.loads(request.text) request.close() return response
def get_pn_trigger(self, hostip): ret = False hostid = self.get_hostid_with_hostip(hostip) if not hostid: return ret data = json.dumps({ "jsonrpc": "2.0", "method": "trigger.get", "params": { "output": ["triggerid", "description", "priority"], "hostids": hostid, #"templateids": templateid, #"templated":"true", "sortfield": "priority", "sortorder": "DESC" }, "auth": self.user_login(), "id": 1 }) try: request = requests.post(url=self.url, headers=self.header, data=data) except URLError as e: print("Error as ", e) response = {} except: response = {} else: response = json.loads(request.text) request.close() return response
def photo_image(self, url): request = urllib.request.urlopen(url) data = request.read() request.close() image = Image.open(io.BytesIO(data)) return image.resize((self.THUMBNAIL_SIZE, self.THUMBNAIL_SIZE), Image.ANTIALIAS)
def read_weather(): """ Reads the current weather state, if enabled, and stores it. """ # Only when explicitly enabled in settings. if not should_sync(): return # For backend logging in Supervisor. print(' - Performing temperature reading at Buienradar.') weather_settings = WeatherSettings.get_solo() # Fetch XML from API. request = urllib.request.urlopen(BUIENRADAR_API_URL) response_bytes = request.read() request.close() response_string = response_bytes.decode("utf8") # Use simplified xPath engine to extract current temperature. root = ET.fromstring(response_string) xpath = BUIENRADAR_XPATH.format( weather_station_id=weather_settings.buienradar_station ) temperature_element = root.find(xpath) temperature = temperature_element.text # Gas readings trigger these readings, so the 'read at' timestamp should be somewhat in sync. # Therefor we align temperature readings with them, having them grouped by hour that is.. read_at = timezone.now().replace(minute=0, second=0, microsecond=0) TemperatureReading.objects.create(read_at=read_at, degrees_celcius=temperature) # Push next sync back for an hour. weather_settings.next_sync = read_at + timezone.timedelta(hours=1) weather_settings.save()
def commit(self): uri = self.solr + self.core + '/update?commit=true' if self.verbose: print("Committing to {}".format(uri)) request = urllib.request.urlopen(uri) request.close()
def commit(self): uri = self.solr + self.core + '/update?commit=true' if self.verbose: print("Committing to {}".format(uri)) request = urllib.request.urlopen( uri ) request.close()
def _paypal_s2s_get_access_token(self, cr, uid, ids, context=None): """ Note: see # see http://stackoverflow.com/questions/2407126/python-urllib2-basic-auth-problem for explanation why we use Authorization header instead of urllib2 password manager """ res = dict.fromkeys(ids, False) parameters = werkzeug.url_encode({'grant_type': 'client_credentials'}) for acquirer in self.browse(cr, uid, ids, context=context): tx_url = self._get_paypal_urls( cr, uid, acquirer.environment)['paypal_rest_url'] request = urllib.request.Request(tx_url, parameters) # add other headers (https://developer.paypal.com/webapps/developer/docs/integration/direct/make-your-first-call/) request.add_header('Accept', 'application/json') request.add_header('Accept-Language', 'en_US') # add authorization header base64string = base64.encodestring( '%s:%s' % (acquirer.paypal_api_username, acquirer.paypal_api_password)).replace('\n', '') request.add_header("Authorization", "Basic %s" % base64string) request = urllib.request.urlopen(request) result = request.read() res[acquirer.id] = json.loads(result).get('access_token') request.close() return res
def do_unban_user(text): username = clean_username(text.upper().rsplit()[1]) thresh.check() with requests.post(create_unbanurl(), data=create_data(username), headers=create_banheaders()) as request: request.close()
def __getUID__(name): import urllib.request request = urllib.request.urlopen('https://api.mojang.com/users/profiles/minecraft/' + name) content = request.read() request.close() string = content.decode("utf-8") return string[7:39]
def download_file(url, out_path): request = urllib.request.urlopen(url) data = request.read() request.close() with open(out_path, "wb") as file: file.write(data) print('Downloaded {}'.format(out_path))
def proper_urlopen(url, ddecode=True): request = urllib.request.Request(url, headers={"User-Agent": "Reddit Python3.4 Code Parser"}) request = urllib.request.urlopen(request) text = request.read() if ddecode: text = text.decode() request.close() return text
def solution3() -> None: with urllib.request.urlopen("https://jd.com/") as request: response = request.read().decode('utf-8') request.close() regex = r"<a\s+href\s*=\s*[\"']?([^\"'\s]+)[\"']?>([^<]+)</a>" for item in re.findall(regex, response): print(item[1], '->', item[0])
def __init__(self, request, client_address, server): self.rfile = self.wfile = request.makefile() self.is_secure = request.is_secure request.close( ) # close this now so that when rfile and wfile are closed, the socket gets closed self.client_address = client_address self.server = server self.set_response_code(None, 200, None) self.protocol_version = server.max_http_version
def solution3() -> None: # 单行模式 with urllib.request.urlopen('https://dumall.baidu.com/') as request: response = request.read().decode('utf-8') request.close() print('\n') regex = r'(?s)<script\s.*?</script>' [print('script ->', script) for script in re.findall(regex, response)]
def solution2() -> None: url = 'https://baidu.com' with urllib.request.urlopen(url) as request: response = request.read().decode('utf-8') request.close() print("\nAll script codes:") for code in re.findall(r'<script[\s>][\s\S]+?</script>', response): print(code)
def solution3() -> None: url = 'https://raw.githubusercontent.com/begeekmyfriend/tash/master/tash.c' with urllib.request.urlopen(url) as request: response = request.read().decode('utf-8') request.close() print("\nComments extrace(e.g /**/):") for comment in re.findall(r'/\*[\s\S]*?\*/', response): print(comment)
def GetCurrentValue(self): try: print(self.getUrl()) request = urllib.request.urlopen(self.getUrl()) return_val = request.read().decode('utf-8') request.close() return json.loads(return_val) except: return None
def getSearch(self, search_query): """Gets the HTML code as a string from the twitter search page for search_query""" request = urllib.request.urlopen( f'http://twitter.com/search?f=tweets&vertical=news&q={search_query}&src=typd' ) html_as_bytes = request.read() html_as_string = html_as_bytes.decode("utf8") request.close() return html_as_string
def fetch_schedule_actions_from_url(url): request = urllib.request.urlopen(url) try: content = request.read().decode('utf-8') except: content = None finally: request.close() return content
def query(self, parameters): '''Preform a query to api.php with the dict parameters''' url = config.api_path + '?action=query&format=json' for parameter in parameters: url += '&%s=%s' % (parameter, parameters[parameter]) # Request the URL and parse results request = urllib.request.urlopen(url) self.data = json.loads(request.read().decode()) request.close()
def image_exists(host, url): "Checks if a linked image actually exists." url = normalise_urls(host, url) try: request = urllib.request.urlopen(url, None, 1) request.close() except: return False return True
def get_album_art_url(self, album_id): # Sadly we cannot determine, if the Beets library really contains album # art. Thus we need to ask for it and check the status code. url = "{0}/album/{1}/art".format(self.api_endpoint, album_id) try: request = urllib.request.urlopen(url) except IOError: # DNS problem or similar return None request.close() return url if request.getcode() == 200 else None
def check_dbsnp(dicoNiourk,url): try: # Request ClinVar FTP request = urllib.request.urlopen(url) output = request.read().decode('UTF-8') request.close() # Check lastest version for line in output.split("\n"): search_version = re.search("<A HREF=\"human_9606;type=i\"></A>->\s*human_9606_(.+)_.+",line) if search_version: version = search_version.group(1) return version except: display_errors(dicoNiourk,["Could not retrieve dbSNP version from \""+url+"\""],2)
def check_clinvar(dicoNiourk,url): try: # Request ClinVar FTP request = urllib.request.urlopen(url) output = request.read().decode('UTF-8') request.close() # Check lastest version for line in output.split("\n"): search_version = re.search("<A HREF=\"clinvar.vcf.gz;type=i\"></A>->\s*clinvar_([0-9]+).vcf.gz",line) if search_version: version = search_version.group(1) return version except: display_errors(dicoNiourk,["Could not retrieve ClinVar version from \""+url+"\""],2)
def get_bbc_data(): request = urllib.request.urlopen("https://api.twitter.com/1/statuses/user_timeline.rss?screen_name=BreakingNews&count=1") dom = xml.dom.minidom.parse(request) latest_update = dom.getElementsByTagName('item')[0] updated = latest_update.getElementsByTagName('pubDate')[0].childNodes[0].data description = latest_update.getElementsByTagName('description')[0].childNodes[0].data #print description updated = datetime.datetime.strptime(updated, "%a, %d %b %Y %H:%M:%S +0000") #print updated ago = round((datetime.datetime.utcnow() - updated).seconds/60) request.close() return description, updated, ago
def get_quake_data(): request = urllib.request.urlopen("http://earthquake.usgs.gov/earthquakes/catalogs/1day-M2.5.xml") dom = xml.dom.minidom.parse(request) latest_quakenode = dom.getElementsByTagName('entry')[0] updated = latest_quakenode.getElementsByTagName('updated')[0].childNodes[0].data qtitle = latest_quakenode.getElementsByTagName('title')[0].childNodes[0].data updated = datetime.datetime.strptime(updated, "%Y-%m-%dT%H:%M:%SZ") ago = round((datetime.datetime.utcnow() - updated).seconds / 60) elevation = int(latest_quakenode.getElementsByTagName('georss:elev')[0].childNodes[0].data) depth = float(elevation) / -1000 depthmi = '{0:.2f}'.format(depth / 1.61) depthstring = "%s km (%s mi)" % (depth, depthmi) request.close() return qtitle, updated, ago, depthstring
def queryCarbonDate(cduri, uri): if cduri[-1] != '/': cduri += '/' sys.stderr.write("Using cduri = " + cduri + "\n") sys.stderr.write("Requesting " + cduri + uri + "\n") sys.stderr.flush() request = urllib.request.urlopen(cduri + uri) pagedata = request.readall().decode('utf-8') request.close() data = json.loads(pagedata) return data['Estimated Creation Date']
def get_data(self, docid, fields): uri = self.solr + self.core + '/get?id=' + urllib.parse.quote( docid ) + '&fl=' + ','.join(fields) request = urllib.request.urlopen( uri ) encoding = request.info().get_content_charset('utf-8') data = request.read() request.close() solr_doc = json.loads(data.decode(encoding)) data = None if 'doc' in solr_doc: data = solr_doc['doc'] return data
def image_quality(host, url, size=(100, 100)): "Calculate the potential 'quality' of an image." # Get the image url = normalise_urls(host, url) request = urllib.request.urlopen(url, None, 1) data = request.read() request.close() # Get the image using virtual IO virtio = io.BytesIO(data) image = Image.open(virtio) image.load() # Make a key based on the size of the image idealheight, idealwidth = size height, width = image.size return height + width
def probarPermutacion(palabra, fuentes): # print("buscando \"{}\"".format(palabra)) for fuente in fuentes: direccion = fuente + palabra # si encuentra la permutación en esta fuente, entonces esta es la palabra. request = None try: request = urllib.request.urlopen(direccion) codigo = request.getcode() except urllib.error.HTTPError as error: codigo = error.code finally: if request: request.close() if codigo == 200: print("Encontré \"{}\" en \"{}\".".format(palabra, direccion)) return True return False
def sendRequestFileToIBC(self, requestFile, responseFile, config): hostName = config.batchHost hostPort = int(config.batchPort) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) s.settimeout(int(config.batchTcpTimeout)) if config.batchUseSSL == 'true': s = ssl.wrap_socket(s) try: s.connect((hostName, hostPort)) except: raise Exception("Exception connect to litle") request = io.open(requestFile, 'r') ch = request.read(1) while ch != '': s.send(ch) ch = request.read(1) if not ch: break request.close() buf = bytearray(2048) buf[:] = '' with open(responseFile,'w') as respFile: while True: try: s.recv_into(buf, 2048) if buf == '': break respFile.write(buf) buf[:] = '' except socket.error as e: if e.errno != errno.ECONNRESET: raise Exception("Exception receiving response") respFile.close() s.close()
def getTimeMap(uri): urit = "http://mementoproxy.cs.odu.edu/aggr/timemap/link/" + uri try: request = urllib.request.urlopen(urit) if request.getcode() == 200: timemap = request.readall() request.close() else: timemap = None request.close() except urllib.error.HTTPError as e: timemap = None except urllib.error.URLError as e: timemap = None return timemap
def get(self, data=None): req = urllib.request.Request(url=self.url, data=data, headers=self.headers) try: request = urllib.request.urlopen(req, timeout=self.timeout) except (urllib.error.HTTPError, CertificateError) as e: logger.error("Unavailable url (%s): %s", self.url, e) except (socket.timeout, urllib.error.URLError) as e: logger.error("Unable to connect to server (%s): %s", self.url, e) else: try: total_size = request.info().get('Content-Length').strip() total_size = int(total_size) except AttributeError: total_size = 0 chunks = [] while 1: if self.stop_request and self.stop_request.is_set(): self.content = '' return self try: chunk = request.read(self.buffer_size) except socket.timeout as e: logger.error("Request timed out") self.content = '' return self self.downloaded_size += len(chunk) if self.thread_queue: self.thread_queue.put( (chunk, self.downloaded_size, total_size) ) else: chunks.append(chunk) if not chunk: break request.close() self.content = b''.join(chunks) return self
def run(self): now = 0 length = len(self.datas) for data in self.datas: if isinstance(data, (list, tuple)): # datas是字典列表 [{'url': '', 'name': ''}] url, filename = data if self.urlprefix: url = self.urlprefix + url else: # datas是url列表 url = data if self.urlprefix: url = self.urlprefix + url filename = data if self.keepdir else os.path.basename(data) else: filename = os.path.basename(data) filepath = filename if not self.filedir else os.path.join(self.filedir, filename) try: request = self.opener.open(url) if self.opener else urlopen(url) # 创建父目录 file_parent_dir = os.path.dirname(filepath) if file_parent_dir and not os.path.isdir(file_parent_dir): print('创建目录:' + file_parent_dir) os.makedirs(file_parent_dir) readed = request.read() with open(filepath, 'wb') as file: file.write(readed) request.close() except Exception: print('下载失败: %s, %s' % (url, filename)) now += 1 print("{0:2d}/{1:2d} {2}".format(now, length, filepath)) print('finish')
def get_quake_data(): #Altername URLS for intensities: http://earthquake.usgs.gov/earthquakes/feed/v1.0/geojson.php request = urllib.request.urlopen("http://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_hour.geojson") quake = json.loads(request.read().decode()) request.close() quake = quake['features'][0] #select the latest quake qtitle = quake['properties']['title'] updated = round(quake['properties']['time'] / 1000) updated = datetime.datetime.fromtimestamp(updated) ago = round((datetime.datetime.now() - updated).seconds / 60) depth = quake['geometry']['coordinates'][2] depthmi = '{0:.2f}'.format(depth / 1.61) elevation = "%s km (%s mi)" % (depth, depthmi) tsunami = "" if quake['properties']['tsunami']: tsunami = " - Tsunami Warning!" alert = "" if quake['properties']['alert']: alert = " - Alert level: %s" % quake['properties']['alert'] quakestring = "Latest Earthquake: %s - Depth: %s (%s minutes ago)%s%s" % (qtitle, elevation, ago, tsunami, alert) return updated, quakestring
def get_quake_data(): try: conn = sqlite3.connect('quakes.sqlite') c = conn.cursor() result = c.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='lastquake';").fetchone() if not result: c.execute('create table lastquake(' 'ids text UNIQUE ON CONFLICT REPLACE, ' 'ts NOT NULL default CURRENT_TIMESTAMP)') #Altername URLS for intensities: http://earthquake.usgs.gov/earthquakes/feed/v1.0/geojson.php request = urllib.request.urlopen("http://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_day.geojson") quake = json.loads(request.read().decode()) request.close() quake = quake['features'][0] #select the latest quake qplace = quake['properties']['place'] mag = round(float(quake['properties']['mag']),1) ids = quake['properties']['ids'] updated = round(quake['properties']['time'] / 1000) updated = datetime.datetime.fromtimestamp(updated) ago = round((datetime.datetime.now() - updated).seconds / 60) lon = quake['geometry']['coordinates'][0] lat = quake['geometry']['coordinates'][1] line_one = "Earthquake: M%s" % (mag) line_two = "%s min ago %s" % (ago, qplace) return line_one, line_two except SerialException as e: raise except: print (traceback.format_exc())
def processWebsocketRequest(self, request, data, websock): clients = self.container.clients # check if the client is requesting data from a group if (data['cmd'] == 'EXECGROUP'): group = data['group'] message = createMessage(cmd=data['remote_cmd'], args=data['remote_args']) res = self.container.publishToGroup(group, message) request.sendall(websock.encode(Opcode.text, res)) # check if the client is requesting data from a node elif (data['cmd'] == 'EXECNODE'): node = (data['ip'], int(data['port'])) message = createMessage(cmd=data['remote_cmd'], args=data['remote_args']) res = self.container.publishToHost(node, message) if ('result' in res): result = createMessage(result=res['result']) else: print("Result not in res? ", res) result = str(res).encode('UTF8') request.sendall(websock.encode(Opcode.text, result)) # check if the client is requesting a list of clusters available elif (data['cmd'] == 'GROUPNAMES'): result = websock.encode(Opcode.text, \ createMessage(clusters=self.container.getClusterList())) request.sendall(result) # check if the client is requesting a list of nodes in a particular # cluster elif (data['cmd'] == 'NODESINGROUP'): clients = self.container.getClientList(data['cluster']) msg = createMessage(clients=clients) request.sendall(websock.encode(Opcode.text, msg)) # check if the client is requesting the server to poll for mac # addresses, used for deployment elif (data['cmd'] == 'POLLMACS'): p = subprocess.Popen('./capturemacs.sh', stdout=subprocess.PIPE, \ stderr=subprocess.PIPE) out, err = p.communicate() macs = out.decode().rstrip().split('\n') result = [] for mac in macs: if (not mac.startswith(constants.get('default.vmMACPrefix'))): result+=[mac.strip()] msg = createMessage(macs=result) request.sendall(websock.encode(Opcode.text, msg)) elif (data['cmd'] == 'INSTANTIATE'): global locks self.container.cleanupClients() nodes = clients.get('COMPUTE') # Message style: # INSTANTIATE <VM_NAME> <USER_NAME> message = createMessage(cmd=data['cmd'], vm=data['vm'], user=data['user']) utilization = createMessage(cmd='UTILIZATION') error = createMessage(ip=None) myConnector = mysql(self.container.addr[0], 3306) myConnector.connect() if (self.lbMode == LBMode.CONSOLIDATE): weights = [0] * 5 elif (self.lbMode == LBMode.RAIN): weights = myConnector.getWeights('balance') vm = myConnector.getProfileData(data['vm']) myConnector.disconnect() load = [self.container.publishToHost(nodes[0], utilization)] for node in nodes[1:]: tmp = self.container.publishToHost(node, utilization) load += [tmp] selected = None index = -1 if (self.lbMode == LBMode.RAIN or self.lbMode == LBMode.CONSOLIDATE): while (1): # [memTotal, memFree, 1min, 5min, 15min, maxVCore, activeVCore] selected = load_balancer.rain_select(load, weights, vm) if (selected == None): # Couldn't find a node to instantiate the vm request.sendall(websock.encode(Opcode.text, error)) return for i in range(0, len(nodes), 1): if (nodes[i][0] == selected[0]): index = i if (index == -1): request.sendall(websock.encode(Opcode.text, error)) return # If we don't have enough locks, double it if (index > len(locks)): locks += [0] * len(locks) if (locks[index] == 0): locks[index] = 1 break else: load = [self.container.publishToHost(nodes[0], utilization)] for node in nodes[1:]: load += [self.container.publishToHost(node, utilization)] elif (self.lbMode == LBMode.ROUNDROBIN): selected = load_balancer.rr_select(load, vm) if (selected == None): # Couldn't find a node to instantiate the vm request.sendall(websock.encode(Opcode.text, error)) return for i in range(0, len(nodes), 1): if (nodes[i][0] == selected[0]): index = i break if (index == -1): request.sendall(websock.encode(Opcode.text, error)) selectedNode = nodes[index] response = self.container.publishToHost(selectedNode, message, False) locks[index] = 0 response = response['result'] ip = '' if ('mac' in response and response['mac'] != ''): ip = getIPFromARP(response['mac']) myConnector.connect() myConnector.updateInstanceIP(response['domain'], ip) myConnector.disconnect() request.sendall(websock.encode(Opcode.text, createMessage(ip=ip))) elif (data['cmd'] == 'GETUSERINSTANCES'): username = data['user'] myConnector = mysql(self.container.addr[0], 3306) myConnector.connect() instances = myConnector.getUserInstances(username) for instance in eval(instances): node = myConnector.getNodeByName(instance[2]) nodeAddr = node[1].split(':') nodeAddr = (nodeAddr[0],int(nodeAddr[1])) message = createMessage(cmd='CHECKINSTANCE', domain=instance[0]) response = self.container.publishToHost(nodeAddr, message) if (response['result'] == 'error' and instance[1] != '-1'): myConnector.deleteInstance(instance[0]) instances = myConnector.getUserInstances(username) myConnector.disconnect() message = createMessage(user=username, instances=instances) request.sendall(websock.encode(Opcode.text, message)) # the user has requested that an instance be destroyed. # a message should be relayed to the node hosting the instance. elif (data['cmd'] == 'DESTROYINSTANCE'): username = data['user'] domain = data['domain'] myConnector = mysql(self.container.addr[0], 3306) myConnector.connect() instances = myConnector.getUserInstances(username) result = 'error' for instance in eval(instances): if (instance[0] == domain): node = myConnector.getNodeByName(instance[2]) nodeAddr = node[1].split(':') nodeAddr = (nodeAddr[0],int(nodeAddr[1])) message = createMessage(cmd='DESTROY', domain=instance[0]) self.container.publishToHost(nodeAddr, message) result = 'success' break message = createMessage(result=result) myConnector.disconnect() request.sendall(websock.encode(Opcode.text, message)) # retrieve the RAIN constants from the database and send them to the # caller elif (data['cmd'] == 'RAINCONSTANTS'): myConnector = mysql(self.container.addr[0], 3306) myConnector.connect() weights = myConnector.getWeights('balance') result = [] for weight in weights: result += [weight] myConnector.disconnect() message = createMessage(result=result) request.sendall(websock.encode(Opcode.text, message)) elif (data['cmd'] == 'GETLBMODE'): try: fp = open('lb.conf', 'r') mode = fp.read() fp.close() message = createMessage(lbmode=mode); except: fp = open('lb.conf', 'w') fp.write("RAIN") fp.close() message = createMessage(lbmode="RAIN"); request.sendall(websock.encode(Opcode.text, message)) elif (data['cmd'] == 'CHANGELBMODE'): mode = data['mode'] try: self.lbMode = LBMode[mode] fp = open('lb.conf', 'w') fp.write(mode) fp.close() except: print(mode,"is not a valid load balance mode.") message = createMessage(result=0) request.sendall(websock.encode(Opcode.text, message)) elif (data['cmd'] == 'UPDATERAIN'): myConnector = mysql(self.container.addr[0], 3306) myConnector.connect() myConnector.updateWeights('balance', data['constants']) myConnector.disconnect() # TODO: Failutes? request.sendall(websock.encode(Opcode.text, createMessage(result=0))) elif (data['cmd'] == 'IMAGELIST'): myConnector = mysql(self.container.addr[0], 3306) myConnector.connect() images = myConnector.getImages() myConnector.disconnect() result = [] for image in images: result += [image[0]] message = createMessage(result=result) request.sendall(websock.encode(Opcode.text, message)) elif (data['cmd'] == 'SAVEPROFILE'): #name = data[1] #title = data[2] #desc = data[3] #ram = data[4] #vcpu = data[5] #image = data[6] #myConnector = mysql(self.container.addr[0], 3306) #myConnector.connect() #myConnector.insertProfile(name, title, desc, ram, vcpu, image) #myConnector.disconnect() print(data) elif (data['cmd'] == 'IMPORTIMAGE'): url = data['url'] myConnector = mysql(self.container.addr[0], 3306) myConnector.connect() storageNodes = myConnector.getStorageNodes() myConnector.disconnect() # Figure out what to do with multiple storage nodes nodeAddr = storageNodes[0][1].split(':') nodeAddr = (nodeAddr[0],int(nodeAddr[1])) message = createMessage(cmd='IMPORTIMAGE', url=url) res = self.container.publishToHost(nodeAddr, message, False) elif (data['cmd'] == 'PROFILEINFO'): myConnector = mysql(self.container.addr[0], 3306) myConnector.connect() profile = myConnector.getProfile(data['profile']) myConnector.disconnect() result = [] for datum in profile: result += [datum] message = createMessage(result=result) request.sendall(websock.encode(Opcode.text, message)) elif (data['cmd'] == 'DEPLOY'): role = data['role'] macs = data['macs'] for mac in macs: oldDHCPTime = getDHCPRenewTime(mac) tftpDir = constants.get('default.tftpdir') shutil.copyfile('pxetemplate.cfg', tftpDir + \ '/pxelinux.cfg/01-' + mac) fname = tftpDir + '/pxelinux.cfg/01-' + mac for line in fileinput.input(fname, inplace=True): if '<ROLE>' in line: print(line.replace('<ROLE>', role), end='') elif '<SERVER_IP>' in line: print(line.replace('<SERVER_IP>', \ self.container.addr[0]), \ end='') else: print(line, end='') t = threading.Thread(target = self.detectDHCPRenew, \ args = (mac, oldDHCPTime, )) t.start() message = createMessage(result=1) request.sendall(websock.encode(Opcode.text, message)) elif (data['cmd'] == "REBOOTNODE"): p = subprocess.Popen(['sudo','reboot'], stdout=subprocess.PIPE, \ stderr=subprocess.PIPE) out = p.communicate() else: print('DATA:',data) request.close() return
def processTraditionalRequest(self, request, data): client = self.client_address events = self.container.events clients = self.container.clients # check if the request is an event call if (events.contains(data['cmd'])): func = events.get(data['cmd']) response = func(data) # send the result to the caller request.sendall(createMessage(result=response)) # check if the request is a query for the service role # (PUBLISHER | SUBSCRIBER) elif (data['cmd'] == 'ROLE'): message = createMessage(role=self.container.role) request.sendall(message.encode('UTF8')) # check if the caller is requesting a nonce for authorization elif (data['cmd'] == 'AUTH'): nonce = auth.generateNonce() self.container._nonce = nonce request.sendall(createMessage(nonce=nonce)) # check if the caller is requesting authorization for subscription elif (data['cmd'] == 'SUBSCRIBE'): r = data['nonce'].encode('UTF8') m = auth.decrypt(r).decode('UTF8') if (m == self.container._nonce): # we can consider this subscriber to be authentic if (len(data) == 5): # should be 5 values # data[3] is the group name if (clients.contains(data['group'])): c = clients.get(data['group']) c.append((data['ip'], data['port'])) else: c = [(data['ip'], int(data['port']))] clients.append((data['group'], c)) request.sendall(createMessage(result=0)) # check if the caller is sending a heartbeat elif (data['cmd'] == 'HEARTBEAT'): # check if the client is still registered response = data['cmd'] if (len(clients.collection()) == 0): response = 'SUBSCRIBE' else: found = False for group in clients.collection(): for ip in clients.get(group): if (ip[0] == data['ip'] and ip[1] == data['port']): found = True if (not found): response = 'SUBSCRIBE' message = createMessage(result=response) request.sendall(message) elif (data['cmd'] == 'UPDATERAIN'): myConnector = mysql(self.container.addr[0], 3306) myConnector.connect() myConnector.updateWeights('balance', data['constants']) myConnector.disconnect() message = createMessage(result=0) request.sendall(message) elif (data['cmd'] == 'CHANGELBMODE'): mode = data['mode'] try: self.lbMode = LBMode[mode] fp = open('lb.conf', 'w') fp.write(mode) fp.close() except: print(mode,"is not a valid load balance mode.") message = createMessage(result=0) request.sendall(message) # check if an instance is running elif (data['cmd'] == 'CHECKINSTANCE'): virtcon = libvirt.openReadOnly(None) error = createMessage(result='error') success = createMessage(result='success') if (virtcon == None): request.sendall(error) try: virtcon.lookupByName(data['domain']) request.sendall(success) except: request.sendall(error) virtcon.close() else: print('DATA:',data) request.close() return