def f1(): my_url = "https://vps.beta.ule.com/vpsUzsMobile/yzs/user/testInfo.do" client = urlopen(my_url) content = client.read() print(content) client.close() pass
def get_courses(url, campus, dept_link): """Gets courses from a department's course description page. Args: url: The base URL for course descriptions. campus: The name of the department's campus. dept_link: A link to the department's course description page. Returns: A list of courses offered by the department. """ client = http.client.HTTPConnection(url.netloc) client.request('GET', '%s%s' % (url.path, dept_link)) response = client.getresponse() if response.status != 200: logging.warning('Error reading category (%s): %d %s', dept_link, response.status, response.read()) return tree = lxml.html.fromstring(response.read()) client.close() items = tree.xpath('/html/body/a/p') courses = [] for i in items: course = parse_course(i, campus) if not course: logging.warning('Unable to parse course: %s', lxml.html.tostring(i)) continue courses.append(course) return courses
def single_page(course, quarter): while (1): try: client = http.client.HTTPSConnection(host) client.connect() ''' client.request("GET",url) res = client.getresponse() ''' ############################ body[quarter_key] = quarter body[course_key] = course #print(body) data = urllib.parse.urlencode(body) client.request("POST", url, data, headers) res = client.getresponse() #print(res.getheaders()) raw_codes = res.read() html = gzip.decompress(raw_codes) html = html.decode("utf-8") ################################## filepath = "./raw_data/" + course + "_" + quarter + ".html" f = open(filepath, "w+") f.write(html) f.close() client.close() time.sleep(0.1) break ############################### except: print(raw_codes) print("error when reading this page!!!skipping!") break
def main(): from tornado.options import define, options, parse_command_line define("print_headers", type=bool, default=False) define("print_body", type=bool, default=True) define("follow_redirects", type=bool, default=True) define("validate_cert", type=bool, default=True) args = parse_command_line() client = HTTPClient() for arg in args: try: response = client.fetch( arg, follow_redirects=options.follow_redirects, validate_cert=options.validate_cert, ) except HTTPError as e: if e.response is not None: response = e.response else: raise if options.print_headers: print(response.headers) if options.print_body: print(response.body) client.close()
def create_houseshare(roomies_nb): """ Push a houseshare and related roomies to the server. Returns a 2-uple whose first element is the created houseshare id and the second a list of created users belonging to this houseshare) """ field = "h{}_{}{}" houseshare_id = 1 roomies = [] client = MyRoomiesClient('localhost', 8080) for i in range(roomies_nb): u = models.User() u.login = field.format(houseshare_id, "login", i) u.houseshare_id = houseshare_id password = field.format(houseshare_id, "password", i) u.password = password u.firstname = field.format(houseshare_id, "firstname", i) u.lastname = field.format(houseshare_id, "lastname", i) u.email = field.format(houseshare_id, "email", i) + "@host.com" u.date_of_birth = "19810619" user_str = json.dumps(u, cls=models.UserEncoder) headers = {} tools.add_admin_authorization_header(headers) status, data = client.request('POST', '/users', user_str, headers=headers) u = json.loads(data, object_hook=models.User.from_json) u.password = password roomies.append(u) client.close() return (houseshare_id, roomies)
def clientit2(): """ :return: """ client = socket() client.connect(('127.0.0.1', 4321)) in_datas = bytes() # receive batch data = client.recv(4096) while data: in_datas += data data = client.recv(4096) my_dict = json.loads(in_datas.decode()) filename = 'new_{}'.format(my_dict['filename']) filedata = my_dict['filedata'].encode() with open(filename, 'wb') as fw: fw.write(base64.b64decode(filedata)) client.close()
def listenToClient(self, client, address): size = 1024 #print(type(client)) print("Connected ", address) while True: try: data = client.recv(size) if data: # Set the response to echo back the recieved data response = 'ok' client.send(response.encode()) obj = pickle.loads(data) #print(obj) test = obj.split(',') print(test) ################################################# xml_request = str(xml_head + make_body(test[0], test[1]) + xml_tail) conn.request("POST", "/epcis/Service/EventCapture", headers=headers, body=xml_request.encode('utf-8')) res = conn.getresponse() data = res.read() print(data.decode("utf-8")) ################################################# else: raise error('Client disconnected') except: client.close() return False
def get_department_links(url): """Gets department links from the course index page. Args: url: The URL of the index page containing a list of departments. Returns: A set of department links found on the page. Raises: Exception: If an error occurred fetching the list of department links. """ client = http.client.HTTPConnection(url.netloc) client.request('GET', url.path) response = client.getresponse() if response.status != 200: raise Exception('Error reading index: %d %s' % (response.status, response.read())) tree = lxml.html.fromstring(response.read()) client.close() depts = tree.xpath( '/html/body/*/*/*/*/div[contains(@class, "uw-content")]//li/a') return set([i.get('href') for i in depts])
def tweet(self, message): authorization_params = { "oauth_consumer_key": self.consumer_key, "oauth_nonce": self.oauth_nonce, "oauth_signature": self.oauth_sign(message), "oauth_signature_method": self.oauth_signature_method, "oauth_timestamp": self.oauth_timestamp, "oauth_token": self.access_token, "oauth_version": self.oauth_version, } oauth_header = ", ".join([ f'{p}="{encode(authorization_params[p])}"' for p in sorted(authorization_params.keys()) ]) client = http.client.HTTPSConnection("api.twitter.com") client.request( "POST", "/1.1/statuses/update.json", body=f"status={encode(message)}", headers={ "Authorization": f"OAuth {oauth_header}", "Content-Type": "application/x-www-form-urlencoded", }, ) response = client.getresponse() print(response.status, response.reason) data = response.read() print(data) client.close()
def assertHttpContent(self, route: str): page_name = route.split("/")[-1] print("Connecting to " + self.host + ':' + self.port + '/' + route.lstrip("/")) client = http.client.HTTPConnection(self.host + ':' + self.port) client.request("GET", route) response = client.getresponse() print(response.status, response.reason) self.assertEqual(response.status, 200, "Should have found page " + self.host + ':' + self.port + '/' + route.lstrip("/")) contents = response.read().decode('utf-8') client.close() self.assertIsNotNone(contents) self.assertTrue(page_name in contents) self.assertTrue('POM_MODEL_VERSION=4.0.0' in contents) self.assertTrue('POM_GROUP_ID=org.example' in contents) self.assertTrue('POM_ARTIFACT_ID=mkdocs-pom-parser-plugin' in contents) self.assertTrue('POM_PACKAGING=jar' in contents) self.assertTrue('POM_VERSION=1.0.0-SNAPSHOT' in contents) self.assertTrue('POM_NAME=mkdocs-pom-parser-plugin name' in contents) self.assertTrue('POM_DESCRIPTION=Some description' in contents) self.assertTrue('POM_URL=https://github.com' in contents) self.assertTrue('POM_SCM_CONNECTION=scm:git:git://github.com' in contents) self.assertTrue(escape('<groupId>org.example</groupId>') in contents) self.assertTrue(escape('<artifactId>mkdocs-pom-parser-plugin</artifactId>') in contents) self.assertTrue(escape('<version>1.0.0-SNAPSHOT</version>') in contents)
def _rest_method(self, endpoint, method, data=None): encoded_data = urllib.parse.urlencode(data) if data else None client = http.client.HTTPSConnection(get_hostname(self.token)) client.request(method.upper(), endpoint, encoded_data, headers=get_authorization_header(self.token)) response = client.getresponse() data = response.read().decode('utf-8') client.close() return data
def make_get(client, domain, note): ''' Make an HTTP GET request and return the important bits of information as a dictionary. :param client: The instance of http.client.HTTPConnection for making the request with. :param domain: The value of the ``Host`` field of the GET request. :param note: The string 'eoff' or 'eon'. Used as part of the keys in the returned dictionary. ''' if note not in ['eoff', 'eon']: raise ValueError('Unsupported value for note: {}.'.format(note)) logger = logging.getLogger('default') h = {'User-Agent': USER_AGENT, 'Connection': 'close'} if domain is not None: h['Host'] = domain d = {} # Dictionary of values to be logged to the CSV output file. err_name = 'http_err_' + note stat_name = 'status_' + note hdr_name = 'headers_' + note try: client.request('GET', '/', headers=h) r = client.getresponse() client.close() logger.debug('Request for {} ({}) returned status code {}.'.format( client.host, note, r.status)) d[stat_name] = r.status if ARGS.save_headers: d[hdr_name] = r.getheaders() else: d[hdr_name] = None d[err_name] = None except OSError as e: if e.errno is None: logger.error('Request for {} failed (errno None): {}'.format( client.host, e)) d[err_name] = str(e) d[stat_name] = None d[hdr_name] = None else: logger.error('Request for {} failed (with errno): {}'.format( client.host, e.strerror)) d[err_name] = e.strerror d[stat_name] = None d[hdr_name] = None except Exception as e: logger.error('Request for {} failed ({}): {}.'.format( client.host, type(e), e)) d[err_name] = str(e) d[stat_name] = None d[hdr_name] = None return d
def do_POST(self): print("incomming http: ", self.path) content_length = int( self.headers['Content-Length']) # <--- Gets the size of data post_data = self.rfile.read( content_length) # <--- Gets the data itself self.send_response(200) client.close()
def sendMessage(m, sessionid, type="query"): client = http.client.HTTPConnection(janet_host, janet_port) client.connect() headers = {'Content-type': 'application/json'} query = '&content=' + m + '&user_id=' + sessionid + '&type=' + type + '&' #Si os preguntais por que & al principio y al final pues yo que se, pero asi funciona json_data = json.dumps(query) client.request("POST", "/api", json_data, headers) response = client.getresponse() responseString = response.read().decode('utf-8') client.close() return responseString
def get_all_funds(): o = urllib.parse.urlparse(all_fund_url) client = http.client.HTTPConnection(o.hostname, o.port) client.request('GET', o.path) response = client.getresponse() if response.status != 200: print('get_all_fund get error: ' + response.code) raise Exception('http error', response.code) r = response.read() client.close() funds = r.split(b' = ') funds = funds[-1].decode('utf-8') funds = funds[:-1] fund_arr = ast.literal_eval(funds) cur = sql.cursor() try: with sql: for fund in fund_arr: cur.execute('SELECT name FROM fund WHERE code = ?', (fund[0], )) name = cur.fetchone() if name is None: print('found new fund {0} {1}'.format(fund[0], fund[2])) cur.execute('INSERT INTO fund VALUES(?,?,?,?,?,?,?,?)', ( fund[0], fund[1], fund[2], fund[3], fund[4], 0, 0, 0, )) elif name[0] != fund[2]: print('found modified fund {0} {1}'.format( fund[0], fund[2])) cur.execute('INSERT INTO fund VALUES(?,?,?,?,?,?,?,?)', ( fund[0], fund[1], fund[2], fund[3], fund[4], 0, 0, 0, )) #get_fund(fund[2]) futures.append(thread_pool.submit(get_fund, fund[2], fund[0])) except Exception as err: print('got error when get all funds') print(err) raise print('init funds finished..') pass
def _http_method(self, endpoint, op, params, data=None, method='GET', headers=None): headers = headers if headers else {} params.update(authid=self.authid, fmt='json', op=op) encoded_params = urllib.parse.urlencode(params) url_path = self.url_path.format(endpoint=endpoint, params=encoded_params) client = http.client.HTTPSConnection(self.hostname) client.request(method.upper(), url_path, data, headers) response = client.getresponse() data = json.loads(response.read().decode('utf-8')) client.close() return data
def extract_dump(url): data = urllib.request.urlopen(url[1]).read().decode("utf-8",errors='ignore') tree = etree.HTML(data) date =tree.xpath( "//meta[@name='DC.date.issued']" )[0].get("content") keywords = tree.xpath("//meta[@name='keywords']" )[0].get("content") data = tree.xpath("//*[@id='article-block']/div/p/text()") data = "".join(data) client = MongoClient() db = client.articles coll = db.business_line coll.insert_one({"date":date,"keywords":keywords,"article":data,"company_name":url[0]}) client.close()
def extract_dump(url): data = urllib.request.urlopen(url[1]).read().decode("utf-8",errors='ignore') tree = etree.HTML(data) date =tree.xpath( "//meta[@name='DC.date.issued']" )[0].get("content") keywords = tree.xpath("//meta[@name='keywords']" )[0].get("content") data = tree.xpath("//*[@id='article-block']/div/p/text()") data = "".join(data) client = MongoClient() db = client.articles coll = db.business_line_1 coll.insert_one({"date":date,"keywords":keywords,"article":data,"company_name":url[0]}) client.close()
def upload_file(self, folder_id, file_name): folder, name = os.path.split(file_name) upload_url = self._get_upload_url(folder_id, name) added_query = urllib.parse.urlencode({'raw': 1, 'filename': name}) url = urllib.parse.urlparse(upload_url) client = http.client.HTTPSConnection(url.netloc) with open(file_name, 'rb') as file_data: path = '{}?{}&{}'.format(url.path, url.query, added_query) client.request('POST', path, file_data, {'content-type': 'application/octet-stream'}) response = client.getresponse().read() client.close() return response
def _streamingProc(self): headers = { 'Authorization': "Bearer {0}".format(self.auth_token), 'Accept': 'text/event-stream' } url = NEST_API_URL retries = urllib3.util.retry.Retry(remove_headers_on_redirect=[]) http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) try: response = http.request('GET', url, headers=headers, preload_content=False, retries=retries) except Exception as e: LOGGER.error('REST Streaming Request Failed: {}'.format(e)) http.clear() return False client = sseclient.SSEClient(response) for event in client.events(): # returns a generator event_type = event.event self.stream_last_update = int(time.time()) if event_type == 'open': # not always received here LOGGER.debug('The event stream has been opened') elif event_type == 'put': LOGGER.debug('The data has changed (or initial data sent)') event_data = json.loads(event.data) self.data = event_data['data'] for node in self.nodes: self.nodes[node].update() elif event_type == 'keep-alive': LOGGER.debug( 'No data updates. Receiving an HTTP header to keep the connection open.' ) elif event_type == 'auth_revoked': LOGGER.warning( 'The API authorization has been revoked. {}'.format( event.data)) self.auth_token = None cust_data = {} self.saveCustomData(cust_data) client.close() return False elif event_type == 'error': LOGGER.error( 'Error occurred, such as connection closed: {}'.format( event.data)) client.close() return False elif event_type == 'cancel': LOGGER.warning('Cancel event received, restarting the thread') client.close() return False else: LOGGER.error('REST Streaming: Unhandled event {} {}'.format( event_type, event.data)) client.close() return False LOGGER.warning('Streaming Process exited')
def clientit(): """ received: 2019-05-28 14:37:07.234834 :return: """ client = socket() client.connect(('127.0.0.1', 4321)) data = client.recv(4096).decode('utf-8') print('received: {}'.format(data)) client.close()
def RedirectRequest(self, context: RequestContext, endpoint: RoutingEndpoint): print( "--->In Request Redirect Manager\n Request redirected to http://" + endpoint.Host + ":" + str(endpoint.Port) + endpoint.Url) client = http.client.HTTPConnection(endpoint.Host, endpoint.Port) self._log.LogHttpInfo(endpoint, context.Command) payload = None if context.Command == 'GET' else self.AddPayloadToken( context.RequestMessage.read(int(context.Header['Content-Length']))) client.request(context.Command, endpoint.Url, payload, {'Content-type': 'application/json'}) response = client.getresponse() context.SetResponse(response.status, response.reason, response.read()) client.close() return context
def backupeverday(): try: url = [] url.append("192.168.0.126") url.append(8070) data = "targets=exam,train,vrexam&startday=" + time.strftime('%Y%m%d', time.localtime( time.time() - 24 * 60 * 60 *100)) + "&days=1" url.append("schedule/backup?" + data) client = http.client.HTTPConnection(url[0], url[1], timeout=30) client.request('GET', "/" + url[2]) response = client.getresponse() print(response.status) except Exception as e: print('except:', repr(e)) finally: if client: client.close()
def login(): global userName global userPass global storageUrl global authToken client = http.client.HTTPConnection("ocs-pl.oktawave.com") client.request("GET", "/auth/v1.0", None, { "X-Auth-User": userName, "X-Auth-Key" : userPass }) authResp = client.getresponse() authToken = authResp.headers["X-Auth-Token"] storageUrl = urlparse( authResp.headers["X-Storage-Url"] ) print("Auth: ", authResp.status, authResp.reason) print(authResp.headers["X-Auth-Token"]) print(authResp.headers["X-Storage-Url"]) client.close()
def get_token(self): uri_path = '/oauth/token' headers = {'Content-Type':'application/x-www-form-urlencoded'} params = { 'grant_type': 'password', 'client_id': self.client_id, 'client_secret': self.client_secret, 'username': self.username, 'password': self.password } client = http.client.HTTPSConnection(self.hostname) client.request('POST', uri_path, urllib.parse.urlencode(params), headers=headers) response = client.getresponse() if response.status == 200: self.token = json.loads(response.read().decode('utf-8')) client.close() return self.token
def backupold(): try: url = [] date = _get_date()[0] print(date) url.append("api.junruizx.com") url.append(80) data = "targets=exam,train,vrexam&startday=" + date + "&days=1" url.append("schedule/backup?" + data) client = http.client.HTTPConnection(url[0], url[1], timeout=30) client.request('GET', "/" + url[2]) response = client.getresponse() _set_date(_add_date(date)) print(response.status) except Exception as e: print('except:', repr(e)) finally: if client: client.close()
def extract(url): for _ in range(5): try: data = urllib.request.urlopen(url[0]).read() break except http.client.IncompleteRead: pass data = data.decode("utf-8", errors='ignore') data = data.replace("</p>", "") data = data.replace("<p>", "") soup = BeautifulSoup(data, 'html.parser') d = soup.find_all('div', class_='arti_cont') ds = BeautifulSoup(str(d), 'html.parser') mt = ds.find_all('div', class_='MT20') dmt = BeautifulSoup(str(mt), 'html.parser') for i in dmt.find_all('style'): i.extract() for i in dmt.find_all('script'): i.extract() text = dmt.getText() text = text.replace("[", "") text = text.replace("]", "") text = text.strip() tree = etree.HTML(data) key_words = tree.xpath("//meta[@name='news_keywords']")[0].get("content") date = tree.xpath("//meta[@http-equiv='Last-Modified']")[0].get("content") date = date.split() dat = date[0] + date[1] year = str(dateparser.parse(dat).year) month = str(dateparser.parse(dat).month) day = str(dateparser.parse(dat).day) s = year + '-' + month + '-' + day dmm = arrow.get(s).format('YYYY-MM-DD') client = MongoClient() db = client.articles coll = db.money_control coll.insert_one({ "date": dmm, "keywords": key_words, "article": text, "company": url[1] }) client.close()
def top_ten(subreddit): """Get the 10 top posts on a Subreddit""" path = '/r/' + urllib.parse.quote(subreddit, safe='') + '/hot.json' path += '?raw_json=1&limit=10' client = http.client.HTTPSConnection('www.reddit.com') client.connect() client.putrequest('GET', path) client.putheader('Connection', 'close') client.putheader('User-Agent', 'python:hbtn701t1:1 (by /u/SamHermesBoots)') client.endheaders() response = client.getresponse() if response.status != 200: client.close() print(None) return posts = json.load(io.TextIOWrapper(response, encoding='UTF-8')) client.close() for post in posts['data']['children']: print(post['data']['title'])
def test_checking_lack_of_origin_succeeds(self): server = self.loop.run_until_complete( serve(handler, 'localhost', 8642, origins=[''])) client = self.loop.run_until_complete(connect('ws://localhost:8642/')) self.loop.run_until_complete(client.send("Hello!")) self.assertEqual(self.loop.run_until_complete(client.recv()), "Hello!") self.loop.run_until_complete(client.close()) server.close() self.loop.run_until_complete(server.wait_closed())
def login(): global userName global userPass global storageUrl global authToken client = http.client.HTTPConnection("ocs-pl.oktawave.com") client.request("GET", "/auth/v1.0", None, { "X-Auth-User": userName, "X-Auth-Key": userPass }) authResp = client.getresponse() authToken = authResp.headers["X-Auth-Token"] storageUrl = urlparse(authResp.headers["X-Storage-Url"]) print("Auth: ", authResp.status, authResp.reason) print(authResp.headers["X-Auth-Token"]) print(authResp.headers["X-Storage-Url"]) client.close()
def get_fund_companies(): o = urllib.parse.urlparse(all_fund_company_url) client = http.client.HTTPConnection(o.hostname, o.port) client.request('GET', o.path) response = client.getresponse() if response.code != 200: print('get_fund_companies get error: ' + response.code) raise Exception('http error', response.code) r = response.read() client.close() comps = r.split(b'=') comps = comps[-1].decode('utf-8') op = 'op' comp_dict = eval(comps) cur = sql.cursor() try: with sql: for comp in comp_dict[op]: cur.execute("SELECT name FROM fund_company WHERE code = ?", (comp[0], )) exist_comp = cur.fetchone() if exist_comp is None: print('found new added company {0} {1}'.format( comp[1], comp[0])) cur.execute('INSERT INTO fund_company VALUES(?, ?)', ( comp[0], comp[1], )) elif exist_comp[0] != comp[1]: print('modifying company code: {0} from {1} to {2}'.format( comp[0], exist_comp, comp[1])) cur.execute('INSERT INTO fund_company VALUES(?, ?)', ( comp[0], comp[1], )) except Exception as err: print('get_fund_companies error when INSERT INTO') print(err) raise print('init companies finished...') pass
def test_connection(self): """ Testing connection via gotten connector """ if self._connector.__name__ == "_tcp_connector": try: client = socket.socket(socket.AF_INET, socket.SOCK_STREAM) ip, port = self._connection_string.split(":") client.connect((ip, int(port))) client.close() return True except Exception as err: print(f"Error: Cannot connect to Rspamd: {err} : {RSPAMD_HTTP_SOCKET}") return False else: try: client = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) client.connect(self._connection_string) client.close() return True except Exception as err: print(f"Error: Cannot connect to Rspamd: {err} : {RSPAMD_HTTP_SOCKET}") return False
def getF(self, file): print("Opening socket") client = http.client.HTTPConnection(self.server) #cache does not need to use custom headers # so a standard request can be used # otherwise use .putrequest, .putheader, .endheader print("Sending request") #puts together request client.request("GET", "/CS423" + file) #recieves the response resp = client.getresponse() print("opening file") print("Writing response to file") print(resp.status, resp.reason) data = resp.read() with open("./cache/" + file, "wb+") as file: file.write(data) #cleanup resp.close() client.close() return
def main(): from tornado.options import define, options, parse_command_line define("print_headers", type=bool, default=False) define("print_body", type=bool, default=True) define("follow_redirects", type=bool, default=True) args = parse_command_line() client = HTTPClient() for arg in args: try: response = client.fetch(arg, follow_redirects=options.follow_redirects) except HTTPError as e: if e.response is not None: response = e.response else: raise if options.print_headers: print(response.headers) if options.print_body: print(response.body) client.close()
def _unix_connector(self, message): """ : param message: bytes """ CRLF = "\r\n" init_line = ["POST /checkv2 HTTP/1.1"] self.add_header("Content-Length", len(message)) headers = init_line + [f"{header[0]}: {header[1]}" for header in self._headers] headers = (CRLF.join(headers) + 2*CRLF).encode("utf8") client = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) client.connect(self._connection_string) raddr = client.getpeername() print(f"{self.msg_id}: localhost -> {raddr}: Connected to maild.", on_debug=True) print(f"{self.msg_id}: localhost -> {raddr}: Send message to maild.", on_debug=True) client.send(headers + message) print(f"{self.msg_id}: localhost <- {raddr}: Waiting for response from maild.", on_debug=True) rspamd_result = client.recv(1024) if not rspamd_result: return {"error": "Error: Rspamd server is not responding"} headers, body = rspamd_result.decode("utf8").split("\r\n\r\n") client.close() return json.loads(body)
def recurse(subreddit, hot_list=[], after=None, client=None): """Get all hot posts on a Subreddit""" path = '/r/' + urllib.parse.quote(subreddit, safe='') + '/hot.json' path += '?raw_json=1' if after is not None: path += '&after=' + urllib.parse.quote_plus(after) path += '&count=' + str(len(hot_list)) if client is None: client = http.client.HTTPSConnection('www.reddit.com') client.connect() client.putrequest('GET', path) client.putheader('Connection', 'keep-alive') client.putheader('User-Agent', 'python:hbtn701t2:1 (by /u/SamHermesBoots)') client.endheaders() response = client.getresponse() if response.status != 200: client.close() return None posts = json.load(io.TextIOWrapper(response, encoding='UTF-8')) if response.getheader('Connection', 'close') == 'close': client.close() client = None hot_list.extend(p['data']['title'] for p in posts['data']['children']) if posts['data']['after'] is None: client.close() return hot_list return recurse(subreddit, hot_list, posts['data']['after'], client)
def serverit2(): """ :return: """ class FileTransferHandler(Thread): def __init__(self, cclient): super().__init__() self.cclient = cclient def run(self) -> None: my_dict = {} my_dict['filename'] = 'guido.jpg' my_dict['filedata'] = data json_str = json.dumps(my_dict) self.cclient.send(json_str.encode()) self.cclient.close() # SOCK_STREAM=tcp SOCK_DGRAM=udp server = socket(family=AF_INET, type=SOCK_STREAM) server.bind(("127.0.0.1", 4321)) # queue size server.listen(512) with open('guido.jpg', 'rb') as f: raw_data = f.read() data = base64.b64encode(raw_data).decode() while True: client, addr = server.accept() print('{} connected'.format(addr)) FileTransferHandler(client).start() client.close()
def connect(self): logger.debug( "Opening SSH connection to {host}:{port}".format( host=self.host, port=self.port)) client = SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(AutoAddPolicy()) try: client.connect( self.host, port=self.port, username=self.username, timeout=self.timeout, ) except ValueError as e: logger.error(e) logger.warning( """ Patching Crypto.Cipher.AES.new and making another attempt. See here for the details: http://uucode.com/blog/2015/02/20/workaround-for-ctr-mode-needs-counter-parameter-not-iv/ """) client.close() import Crypto.Cipher.AES orig_new = Crypto.Cipher.AES.new def fixed_AES_new(key, *ls): if Crypto.Cipher.AES.MODE_CTR == ls[0]: ls = list(ls) ls[1] = '' return orig_new(key, *ls) Crypto.Cipher.AES.new = fixed_AES_new client.connect( self.host, port=self.port, username=self.username, timeout=self.timeout, ) return client
def extract(url): for _ in range(5): try: data = urllib.request.urlopen(url[0]).read() break except http.client.IncompleteRead: pass data = data.decode("utf-8",errors='ignore') data = data.replace("</p>","") data = data.replace("<p>","") soup = BeautifulSoup(data,'html.parser') d=soup.find_all('div',class_='arti_cont') ds = BeautifulSoup(str(d),'html.parser') mt = ds.find_all('div',class_='MT20') dmt = BeautifulSoup(str(mt),'html.parser') for i in dmt.find_all('style'): i.extract() for i in dmt.find_all('script'): i.extract() text = dmt.getText() text = text.replace("[","") text = text.replace("]","") text = text.strip() tree = etree.HTML(data) key_words = tree.xpath( "//meta[@name='news_keywords']" )[0].get("content") date = tree.xpath( "//meta[@http-equiv='Last-Modified']" )[0].get("content") date = date.split() dat = date[0]+date[1] year= str(dateparser.parse(dat).year) month = str(dateparser.parse(dat).month) day = str(dateparser.parse(dat).day) s = year+'-'+month+'-'+day dmm = arrow.get(s).format('YYYY-MM-DD') client = MongoClient() db = client.articles coll = db.money_control coll.insert_one({"date":dmm,"keywords":key_words,"article":text,"company":url[1]}) client.close()
def single_page(course,quarter): while(1): try: client = http.client.HTTPSConnection(host) client.connect() ''' client.request("GET",url) res = client.getresponse() ''' ############################ body[quarter_key] = quarter body[course_key] = course #print(body) data = urllib.parse.urlencode(body) client.request("POST",url,data,headers) res = client.getresponse() #print(res.getheaders()) raw_codes = res.read() html = gzip.decompress(raw_codes) html = html.decode("utf-8") ################################## filepath = "./raw_data/"+course+"_"+quarter+".html" f = open(filepath,"w+") f.write(html) f.close() client.close() time.sleep(0.1) break ############################### except: print(raw_codes) print("error when reading this page!!!skipping!") break
#!/usr/bin/python import http.client, sys import argparse usageString = "Usage: %prog [options] hostname" parser = argparse.ArgumentParser(usage=usageString) parser.add_argument("-p", "--port", dest="port", metavar="PORT", default=80, type="int", help="Port to connect to") (opts,args) = parser.parse_args() if len(args) < 1: parser.error("Hostname is requiered") host = args[0] port = args[1] client = http.client.HTTPConnection(host,port) client.request("GET","/") resp = client.getresponse() client.close() if resp.status == 200: print(host + " : OK") sys.exit() print(host + " : DOWN! (" + resp.status + " ," + resp.reason + ")")