def ensure_index(event_mapping=None): """ Creates the index from scratch, loading the event mapping if provided """ d = treq.head('http://localhost:9200/history') def clean(response): if response.code == 200: print('deleting and recreating index') return treq.delete('http://localhost:9200/history') def ensure(_): d = treq.put('http://localhost:9200/history') d.addCallback(treq.json_content) return d.addCallback(print) d.addCallback(clean).addCallback(ensure) def put_mapping(_): print("Putting mapping {0}".format(event_mapping)) with open(event_mapping) as f: d = treq.put('http://localhost:9200/history/event/_mapping', f.read()) return d.addCallback(treq.json_content).addCallback(print) if event_mapping is not None: d.addCallback(put_mapping) return d
def test_cached_pool(self): """ The first use of the module-level API populates the global connection pool, which is used for all subsequent requests. """ pool = SyntacticAbominationHTTPConnectionPool() self.patch(treq.api, "HTTPConnectionPool", lambda reactor, persistent: pool) self.failureResultOf(treq.head("http://test.com"), TabError) self.failureResultOf(treq.get("http://test.com"), TabError) self.failureResultOf(treq.post("http://test.com"), TabError) self.failureResultOf(treq.put("http://test.com"), TabError) self.failureResultOf(treq.delete("http://test.com"), TabError) self.failureResultOf(treq.request("OPTIONS", "http://test.com"), TabError) self.assertEqual(pool.requests, 6)
def _head_request_to_json_api(self): """ This is the initial request which checks whether the json document is even online anymore; during the interval, a lot can happen to the thread, most notably of all, it could 404. A head request to the json document keeps the overhead minimal as possible as the check, whether a download of the json data is necessary, depends on the returned headers. In the first run we don't pass the 'If-Modified-Since' header to the server since we didn't run it before. On all runs it needs to be updated before the LoopingCall calls start() again, that is, a) after the first run is finished and the thread exists b) in all following runs after the head request determines the thread exists """ d = treq.head(self.api_url, headers=self.headers) d.addCallback(self._head_request_to_json_api_success) d.addErrback(self._json_failure)
def get_title(self, url): d = treq.head(url, timeout=30, headers=self.headers) d.addCallback(self.handle_response, handle_body=False) @d.addCallback def trigger_get(ignored): return treq.get(url, timeout=30, headers=self.headers) d.addCallback(self.handle_response, handle_body=True) @d.addCallback def obtain_tree_root(ignored): return self.parser.close() @d.addCallback def extract_title(root): return root.xpath("//title")[0].text @d.addCallback def remove_extra_spaces(title): return " ".join(title.split()) return d
def check_status(): check_d = treq.head( append_segments(server_endpoint, 'servers', server_id), headers=headers(auth_token)) check_d.addCallback(check_success, [404]) return check_d
def check_status(): check_d = treq.head(append_segments(server_endpoint, 'servers', server_id), headers=headers(auth_token)) check_d.addCallback(check_success, [404]) return check_d
def _head_request_json_api(self): """ Here we make the head request to the 4chan api. No more. """ d = treq.head(self.api_url, headers=self.headers) return d