def get_task_log(self): """ get_task_log(task_id) -> (stdout_text, stderr_text, syslog_text) Retrieve the task log from the TaskTracker, at this url: http://<tracker_host>:<port>/tasklog?taskid=<attempt_id> Optional query string: &filter=<source> : where <source> is 'syslog', 'stdout', or 'stderr'. &start=<offset> : specify the start offset of the log section, when using a filter. &end=<offset> : specify the end offset of the log section, when using a filter. """ tracker = self.get_tracker() url = urlunparse( ('http', '%s:%s' % (tracker.host, tracker.httpPort), 'tasklog', None, 'attemptid=%s' % (self.attemptId, ), None)) LOG.info('Retrieving %s' % (url, )) try: data = urllib2.urlopen(url, timeout=REST_CONN_TIMEOUT.get()) except urllib2.URLError: raise urllib2.URLError( _("Cannot retrieve logs from TaskTracker %(id)s.") % {'id': self.taskTrackerId}) et = lxml.html.parse(data) log_sections = et.findall('body/pre') logs = [section.text or '' for section in log_sections] if len(logs) < 3: LOG.warn( 'Error parsing task attempt log for %s at "%s". Found %d (not 3) log sections' % (self.attemptId, url, len(log_sections))) err = _("Hue encountered an error while retrieving logs from '%s'." ) % (url, ) logs += [err] * (3 - len(logs)) return logs
def put(self, relpath=None, params=None, data=None, contenttype=None, allow_redirects=False, clear_cookies=False, timeout=REST_CONN_TIMEOUT.get()): return MockedResponse()
def _do_put(self, uri, data): """ Send a PUT request """ try: req = RequestWithMethod(uri) req.set_method('PUT') req.add_header('Content-Type', 'application/octet-stream') if data is not None: req.add_data(data) return urllib2.urlopen(req, timeout=REST_CONN_TIMEOUT.get()).read() except urllib2.HTTPError, e: if e.code == 412: # precondition failed raise ZooKeeper.WrongVersion(uri) raise
def _do_get(self, uri): """ Send a GET request and convert errors to exceptions """ try: req = urllib2.Request(uri) req.add_header("Accept", "application/json") r = urllib2.urlopen(req, timeout=REST_CONN_TIMEOUT.get()) resp = json.load(r) if 'Error' in resp: raise ZooKeeper.Error(resp['Error']) return resp except urllib2.HTTPError, e: if e.code == 404: raise ZooKeeper.NotFound(uri) raise
def _do_get(self, uri): """ Send a GET request and convert errors to exceptions """ try: req = urllib2.Request(uri) req.add_header("Accept", "application/json"); r = urllib2.urlopen(req, timeout=REST_CONN_TIMEOUT.get()) resp = json.load(r) if 'Error' in resp: raise ZooKeeper.Error(resp['Error']) return resp except urllib2.HTTPError, e: if e.code == 404: raise ZooKeeper.NotFound(uri) raise
def _do_post(self, uri, data=None): """ Send a POST request and convert errors to exceptions """ try: req = urllib.request.Request(uri, {}) req.add_header('Content-Type', 'application/octet-stream') if data is not None: req.add_data(data) resp = json.load(urllib.request.urlopen(req, timeout=REST_CONN_TIMEOUT.get())) if 'Error' in resp: raise ZooKeeper.Error(resp['Error']) return resp except urllib.error.HTTPError as e: if e.code == 201: return True elif e.code == 409: raise ZooKeeper.ZNodeExists(uri) elif e.code == 401: raise ZooKeeper.InvalidSession(uri) raise
def _do_post(self, uri, data=None): """ Send a POST request and convert errors to exceptions """ try: req = urllib2.Request(uri, {}) req.add_header('Content-Type', 'application/octet-stream') if data is not None: req.add_data(data) resp = json.load(urllib2.urlopen(req, timeout=REST_CONN_TIMEOUT.get())) if 'Error' in resp: raise ZooKeeper.Error(resp['Error']) return resp except urllib2.HTTPError, e: if e.code == 201: return True elif e.code == 409: raise ZooKeeper.ZNodeExists(uri) elif e.code == 401: raise ZooKeeper.InvalidSession(uri) raise
def _do_delete(self, uri): """ Send a DELETE request """ req = RequestWithMethod(uri) req.set_method('DELETE') req.add_header('Content-Type', 'application/octet-stream') return urllib2.urlopen(req, timeout=REST_CONN_TIMEOUT.get()).read()