class StopPost(MultiOp): """ Test POST /stop """ def setUp(self): self.get = Get() urls, response = initiate_crawl() json_response = json.loads(response['content'].decode()) job_id = json_response['job_id'] self.query_string = 'job_id=' + str(job_id) # Wait until the initiated crawl has begun. self.get.wait_for_passing_content( '/status', self.query_string, self._mk_response_test(['Running', 'Complete'])) # Stop the crawl. self.response = request('POST', '/stop', self.query_string) def test_http_status(self): self.assertEqual(self.response['http_status'], '202 Accepted') def test_content(self): self.get.wait_for_passing_content('/status', self.query_string, self._mk_response_test(['Aborted'])) # Test that total results do not increase over 3 seconds. response = request('GET', '/result', self.query_string) json_response = json.loads(response['content'].decode()) len0 = len(json_response) time.sleep(3) response = request('GET', '/result', self.query_string) json_response = json.loads(response['content'].decode()) len1 = len(json_response) self.assertEqual(len0, len1)
class ResultDelete(MultiOp): """ Test DELETE /result """ def setUp(self): self.get = Get() urls, response = initiate_crawl() json_response = json.loads(response['content'].decode()) job_id = json_response['job_id'] self.query_string = 'job_id=' + str(job_id) # Wait until the initiated crawl has begun. self.get.wait_for_passing_content( '/status', self.query_string, self._mk_response_test(['Running', 'Complete'])) # Stop the crawl. response = request('POST', '/stop', self.query_string) self.assertEqual(response['http_status'], '202 Accepted') self.get.wait_for_passing_content('/status', self.query_string, self._mk_response_test(['Aborted'])) # Delete the results. for url in urls: self.response = request('DELETE', '/result', 'url=' + parse.quote(url)) def test_http_status(self): self.assertEqual(self.response['http_status'], '204 No Content') def test_success(self): response = request('GET', '/result', self.query_string) json_response = json.loads(response['content'].decode()) self.assertEqual(0, len(json_response))
def setUp(self): self.get = Get() urls, response = initiate_crawl() json_response = json.loads(response['content'].decode()) job_id = json_response['job_id'] self.query_string = 'job_id=' + str(job_id) # Wait until the initiated crawl has begun. self.get.wait_for_passing_content( '/status', self.query_string, self._mk_response_test(['Running', 'Complete'])) # Stop the crawl. self.response = request('POST', '/stop', self.query_string)
class Delete: """ Deletes a note given its ID """ ID = float() notebook = Get().notebook() def delete_entry(self): # archive the deleted note in the note archives deleted_note = [ note for note in self.notebook if note['ID'] == self.ID ] if len(deleted_note) > 0: with open(f'../Data/archived_notes.json', 'a') as f: json.dump({t.time(): deleted_note[0]}, f) f.write(os.linesep) # delete entry self.notebook = [ note for note in self.notebook if note['ID'] != self.ID ] with open('../Data/notebook.json', 'w+') as f: for note in self.notebook: json.dump(note, f) f.write(os.linesep) #if __name__ == "__main__": # delete = Delete() # delete.ID = float() # delete.delete_entry()
def setUp(self): self.get = Get() urls, response = initiate_crawl() json_response = json.loads(response['content'].decode()) job_id = json_response['job_id'] self.query_string = 'job_id=' + str(job_id) # Wait until the initiated crawl has begun. self.get.wait_for_passing_content('/status', self.query_string, self._mk_response_test(['Running', 'Complete'])) # Stop the crawl. self.response = request('POST', '/stop', self.query_string)
class StopPost(MultiOp): """ Test POST /stop """ def setUp(self): self.get = Get() urls, response = initiate_crawl() json_response = json.loads(response['content'].decode()) job_id = json_response['job_id'] self.query_string = 'job_id=' + str(job_id) # Wait until the initiated crawl has begun. self.get.wait_for_passing_content('/status', self.query_string, self._mk_response_test(['Running', 'Complete'])) # Stop the crawl. self.response = request('POST', '/stop', self.query_string) def test_http_status(self): self.assertEqual(self.response['http_status'], '202 Accepted') def test_content(self): self.get.wait_for_passing_content('/status', self.query_string, self._mk_response_test(['Aborted'])) # Test that total results do not increase over 3 seconds. response = request('GET', '/result', self.query_string) json_response = json.loads(response['content'].decode()) len0 = len(json_response) time.sleep(3) response = request('GET', '/result', self.query_string) json_response = json.loads(response['content'].decode()) len1 = len(json_response) self.assertEqual(len0, len1)
class ResultDelete(MultiOp): """ Test DELETE /result """ def setUp(self): self.get = Get() urls, response = initiate_crawl() json_response = json.loads(response['content'].decode()) job_id = json_response['job_id'] self.query_string = 'job_id=' + str(job_id) # Wait until the initiated crawl has begun. self.get.wait_for_passing_content('/status', self.query_string, self._mk_response_test(['Running', 'Complete'])) # Stop the crawl. response = request('POST', '/stop', self.query_string) self.assertEqual(response['http_status'], '202 Accepted') self.get.wait_for_passing_content('/status', self.query_string, self._mk_response_test(['Aborted'])) # Delete the results. for url in urls: self.response = request('DELETE', '/result', 'url=' + parse.quote(url)) def test_http_status(self): self.assertEqual(self.response['http_status'], '204 No Content') def test_success(self): response = request('GET', '/result', self.query_string) json_response = json.loads(response['content'].decode()) self.assertEqual(0, len(json_response))
class Put: """ Update a note entry """ ID = float() notebook = Get().notebook() note = [note for note in self.notebook if note['ID'] == self.ID] date = note['date'] time = note['time'] entry_type = note['entry_type'] entry = note['entry'] subnote = note['subnote'] tags = note['tags'] def edit(self): delete = Delete() delete.ID = self.ID delete.delete_entry() edited_notebook_entry = { 'ID' : self.ID 'date' : self.date 'time' : self.time 'entry_type': self.entry_type, 'entry' : self.entry, 'subnote' : self.subnote, 'tags' : self.tags } with open('Data/notebook.json', 'a') as f: json.dump(edited_notebook_entry, f) f.write(os.linesep)
from Post import Post from Get import Get import json """ GetCookie = GetCookie("http://passporttest.tclclouds.com/passport/login.html", {"username_element_id_name": ["email", "*****@*****.**"], "password_element_id_name": ["password", "test"], "confirm_button_class_name": "btn-primary"}, "http://usercare-tcl-test.tclclouds.com/boss") cookie = GetCookie.generateCookie() GetCookie.close_driver() """ headers = { "User-Agent": "Mozilla/5.0 (Windows NT 6.1; rv:45.0) Gecko/20100101 Firefox/45.0", 'Content-Type': "application/json" } Post = Post() Get = Get() # 接口一 url = "http://10.115.101.239:5001/sso/user.action" data = {"op": "Login.login", "email": "*****@*****.**", "pwd": "Aa123456"} import urllib json_data = Post.post_url_and_get_result(url, data=json.dumps(data), headers=headers) print json_data["data"]["sid"] """ data = {"name": "testsave222", "imageUrl": "http://ep.tclcom.com/_layouts/TCL.EP.GPortal.UI/images/logo.png", "contentUrl": "www.github1.com"} #data = urllib.urlencode(data) data = json.dumps(data) print data json_data = Post.post_url_and_get_result(url, data, headers) print json_data
# -*- coding: utf-8 -*- from GetCookie import GetCookie headers = {"User-Agent": "Mozilla/5.0 (Windows NT 6.1; rv:45.0) Gecko/20100101 Firefox/45.0"} GetCookie = GetCookie("http://passporttest.tclclouds.com/passport/admin/login", {"username_element_id_name": ["email", "*****@*****.**"], "password_element_id_name": ["password", "4ufkr1nd"]}, headers, "http://tstream-test.tclclouds.com/cms/static/index.html") cookie = GetCookie.generateCookie() from UploadFile_post import UploadFile_post from Get import Get Get = Get() UploadFile_post = UploadFile_post() url = "http://tstream-test.tclclouds.com/cms/apk/add" data = {"versionCode": 10, "versionName": "test", "description": "test", "appName": "test"} headers = {"User-Agent": "Mozilla/5.0 (Windows NT 6.1; rv:45.0) Gecko/20100101 Firefox/45.0", "Cookie": cookie} print Get.get_json_result("http://tstream-test.tclclouds.com/cms/apk/list", headers=headers) headers = {"User-Agent": "Mozilla/5.0 (Windows NT 6.1; rv:45.0) Gecko/20100101 Firefox/45.0", "Cookie": cookie} print UploadFile_post.get_upload_result(url, r"C:\Users\xiaobo.chi\Desktop\SpacePlus.apk", "uploadFile", data, headers)