def extract_file_aod(ts_obj, file_obj, curr_dir, pressure_file, psurf_file, label_prefix, aerosol_re, use_sub_types=None): press_obj = ts_obj.get_cached_file(curr_dir, pressure_file, required=True) psurf_obj = ts_obj.get_cached_file(curr_dir, psurf_file) try: in_press_vals = file_obj[PRESSURE_COLUMN][:,0] except LookupError: logger.error('For file "%s" LookupError: %s' % (file_obj.filename,traceback.format_exception_only(*sys.exc_info()[0:2]))) return None if press_obj != None: out_press_vals = press_obj[PRESSURE_COLUMN][:,0] else: out_press_vals = in_press_vals try: if psurf_obj != None: out_press_vals = apply_psurf_to_pressures(out_press_vals, psurf_obj[PSURF_COLUMN][0,0]) except LookupError: logger.error('For file "%s" LookupError: %s' % (press_obj.filename,traceback.format_exception_only(*sys.exc_info()[0:2]))) return None if not hasattr(aerosol_re, '__iter__'): aerosol_re = ( aerosol_re, ) aer_columns = [] for col_name in file_obj.labels_lower: for curr_re in aerosol_re: if re.search(curr_re.lower(), col_name) and not re.search(PRESSURE_COLUMN.lower(), col_name): aer_columns.append( file_obj[col_name] ) aod_data_vals = calculate_aod_values(in_press_vals, out_press_vals, aer_columns, label_prefix, use_sub_types) return aod_data_vals
def _cross_reference_masses(self): """ Links the mass to nodes, properties (and materials depending on the card). """ for mass in itervalues(self.masses): try: mass.cross_reference(self) except (SyntaxError, RuntimeError, AssertionError, KeyError, ValueError) as e: self._ixref_errors += 1 var = traceback.format_exception_only(type(e), e) self._stored_xref_errors.append((mass, var)) if self._ixref_errors > self._nxref_errors: self.pop_xref_errors() # msg = "Couldn't cross reference Mass.\n%s" % str(mass) # self.log.error(msg) # raise for prop in itervalues(self.properties_mass): try: prop.cross_reference(self) except (SyntaxError, RuntimeError, AssertionError, KeyError, ValueError) as e: self._ixref_errors += 1 var = traceback.format_exception_only(type(e), e) self._stored_xref_errors.append((prop, var)) if self._ixref_errors > self._nxref_errors: self.pop_xref_errors()
def simplify_syntax_error(code, ex_type, value, trace, lineno, username): """ print out a syntax error closely based on showsyntaxerror from the code module in the standard library """ filename = _("User's code") # will most likely not be used # Work hard to stuff the correct filename in the exception try: msg, (filename, lineno, offset, line) = value except: # Not the format we expect; leave it alone pass else: # Stuff in the right filename value = SyntaxError(msg, (filename, lineno, offset, line)) sys.last_value = value if username and config[username]['friendly']:# ignore that filename stuff! tb_list = traceback.format_exception_only(ex_type, value)[1:] tb_list.insert(0, "Error on line %s:\n"%lineno) else: tb_list = traceback.format_exception_only(ex_type, value) retval = StringIO() list(map(retval.write, tb_list)) out = retval.getvalue().replace("Error on line", _("Error on line") ) return out
def _cross_reference_materials(self): """ Links the materials to materials (e.g. MAT1, CREEP) often this is a pass statement """ for mat in itervalues(self.materials): # MAT1 try: mat.cross_reference(self) except (SyntaxError, RuntimeError, AssertionError, KeyError, ValueError) as e: self._ixref_errors += 1 var = traceback.format_exception_only(type(e), e) self._stored_xref_errors.append((mat, var)) if self._ixref_errors > self._nxref_errors: self.pop_xref_errors() # CREEP - depends on MAT1 data = [self.MATS1, self.MATS3, self.MATS8, self.MATT1, self.MATT2, self.MATT3, self.MATT4, self.MATT5, self.MATT8, self.MATT9] for material_deps in data: for mat in itervalues(material_deps): try: mat.cross_reference(self) except (SyntaxError, RuntimeError, AssertionError, KeyError, ValueError) as e: self._ixref_errors += 1 var = traceback.format_exception_only(type(e), e) self._stored_xref_errors.append((mat, var)) if self._ixref_errors > self._nxref_errors: self.pop_xref_errors()
def _cross_reference_elements(self): """ Links the elements to nodes, properties (and materials depending on the card). """ for elem in itervalues(self.elements): try: elem.cross_reference(self) except (SyntaxError, RuntimeError, AssertionError, KeyError, ValueError) as e: self._ixref_errors += 1 var = traceback.format_exception_only(type(e), e) self._stored_xref_errors.append((elem, var)) if self._ixref_errors > self._nxref_errors: self.pop_xref_errors() for elem in itervalues(self.rigid_elements): try: elem.cross_reference(self) except (SyntaxError, RuntimeError, AssertionError, KeyError, ValueError) as e: self._ixref_errors += 1 var = traceback.format_exception_only(type(e), e) self._stored_xref_errors.append((elem, var)) if self._ixref_errors > self._nxref_errors: self.pop_xref_errors() for elem in itervalues(self.plotels): try: elem.cross_reference(self) except (SyntaxError, RuntimeError, AssertionError, KeyError, ValueError) as e: self._ixref_errors += 1 var = traceback.format_exception_only(type(e), e) self._stored_xref_errors.append((elem, var)) if self._ixref_errors > self._nxref_errors: self.pop_xref_errors()
def store_error_details(name, env): description = "Failed in method {:s}.".format(name) if env is not None: try: create_diagnostic_snapshot(env, "fail", name) except: logger.error("Fetching of diagnostic snapshot failed: {0}".format( traceback.format_exception_only(sys.exc_info()[0], sys.exc_info()[1]))) logger.debug("Fetching of diagnostic snapshot failed: {0}". format(traceback.format_exc())) try: with env.d_env.get_admin_remote()\ as admin_remote: pull_out_logs_via_ssh(admin_remote, name) except: logger.error("Fetching of raw logs failed: {0}".format( traceback.format_exception_only(sys.exc_info()[0], sys.exc_info()[1]))) logger.debug("Fetching of raw logs failed: {0}". format(traceback.format_exc())) finally: try: env.make_snapshot(snapshot_name=name[-50:], description=description, is_make=True) except: logger.error( "Error making the environment snapshot: {0}".format( traceback.format_exception_only(sys.exc_info()[0], sys.exc_info()[1]))) logger.debug("Error making the environment snapshot:" " {0}".format(traceback.format_exc()))
def handle_errors(row, exc): self.num_request_done += 1 try: name = type(exc).__name__ self.exception_type[name] += 1 except Exception as e: print traceback.format_exception_only(type(e), e)
def format_exception (filename, should_remove_self): type, value, tb = sys.exc_info () sys.last_type = type sys.last_value = value sys.last_traceback = tb if type is SyntaxError: try: # parse the error message msg, (dummy_filename, lineno, offset, line) = value except: pass # Not the format we expect; leave it alone else: # Stuff in the right filename value = SyntaxError(msg, (filename, lineno, offset, line)) sys.last_value = value res = traceback.format_exception_only (type, value) # There are some compilation errors which do not provide traceback so we # should not massage it. if should_remove_self: tblist = traceback.extract_tb (tb) del tblist[:1] res = traceback.format_list (tblist) if res: res.insert(0, "Traceback (most recent call last):\n") res[len(res):] = traceback.format_exception_only (type, value) # traceback.print_exception(type, value, tb) for line in res: print(line, end=' ')
def removePymelAll(filepath, p4merge=True, replace='ask', text=None): print "removePymelAll: %s" % filepath # if we have a directory, recurse if os.path.isdir(filepath): if text is not None: raise ValueError("when passing in a directory to removePymelAll, text may not be specified") for root, dirs, files in os.walk(filepath): for f in files: if not PYTHON_FILE_RE.match(f): continue path = os.path.join(root, f) try: with open(path, _READMODE) as filehandle: text = filehandle.read() except (IOError, OSError), e: print '!!!!!!!!!!!!!!!!' print "Error reading %s:" % path print '\n'.join(traceback.format_exception_only(type(e), e)) if FROM_PYMEL_ALL_RE.search(text): try: removePymelAll(os.path.join(root, f), p4merge=p4merge, replace=replace, text=text) except SyntaxError, e: print '!!!!!!!!!!!!!!!!' print "Error parsing %s:" % path print '\n'.join(traceback.format_exception_only(type(e), e))
def run_cmd(self, cmd, kernel_name = None): """ Runs python command string. """ if _debugging: logging.info('Running command: ' + cmd + ' using kernel: ' + kernel_name) notebook = nbformat.v4.new_notebook() my_cell = nbformat.v4.new_code_cell(source=cmd) notebook.cells = [my_cell] if kernel_name: notebook.metadata['kernelspec'] = {'name' : kernel_name} try: self.executePreprocessor.preprocess(notebook, {'metadata': {'path': '.' }}) if _debugging: logging.info('Result notebook: ' + nbformat.v4.writes_json(notebook)) if len(notebook.cells) < 1 or len(notebook.cells[0].outputs) < 1: return None return self.postprocess_output(notebook.cells[0].outputs) except: exc_type, exc_obj, exc_tb = sys.exc_info() msg = None if _debugging: msg = '\n'.join(traceback.format_exception_only(exc_type, exc_obj) + traceback.format_tb(exc_tb)) else: msg = '\n'.join(traceback.format_exception_only(exc_type, exc_obj)) out = NotebookNode(output_type = 'error', html = RClansiconv(msg + '\n')) return [out]
def clicks_a_link(self, url, verbose=True, templates_used=[]): """ Simulate a GET request using the test client """ # request the page # submit the form import traceback, sys try: response = self.client.get(url) except TemplateDoesNotExist: exception_message = "".join(traceback.format_exception_only(sys.exc_type, sys.exc_value)).strip() self.fail("No url to handle (%s) or template does not exist: (%s)" % (url, exception_message)) except ViewDoesNotExist: exception_message = "".join(traceback.format_exception_only(sys.exc_type, sys.exc_value)).strip() self.fail("No view to handle (%s): %s" % (url, exception_message)) # check that this was a valid request self.failUnless(response.status_code in [200, 302]) # if there are templates supplied for checking, make sure they are used to render the page for template in templates_used: self.assertTemplateUsed(response, template) # if we're not going to do anything with the response, don't bother using BeautifulSoup if not verbose: return True # return a BeautifulSoup document for navigating return BeautifulSoup(response.content)
def publish(self,request,queryset): result = None failed_objects = [] #import ipdb;ipdb.set_trace() try_set_push_owner("workspace_admin",enforce=True) warning_message = None try: for workspace in queryset: try: workspace.publish() except: error = sys.exc_info() failed_objects.append((workspace.name,traceback.format_exception_only(error[0],error[1]))) #remove failed, continue to process the next publish continue try: try_push_to_repository('workspace_admin',enforce=True) except: error = sys.exc_info() warning_message = traceback.format_exception_only(error[0],error[1]) logger.error(traceback.format_exc()) finally: try_clear_push_owner("workspace_admin",enforce=True) if failed_objects or warning_message: if failed_objects: if warning_message: messages.warning(request, mark_safe("<ul><li>{0}</li><li>Pushing changes to repository failed:<ul>{1}</ul></li></ul>".format(warning_message,"".join(["<li>{0} : {1}</li>".format(o[0],o[1]) for o in failed_objects])))) else: messages.warning(request, mark_safe("Publish failed for some selected workspaces:<ul>{0}</ul>".format("".join(["<li>{0} : {1}</li>".format(o[0],o[1]) for o in failed_objects])))) else: messages.warning(request, mark_safe(warning_message)) else: messages.success(request, "Publish successfully for all selected workspaces")
def test_handle_exception(self, mock_sys): # pylint: disable=protected-access from letsencrypt import cli mock_open = mock.mock_open() with mock.patch('letsencrypt.cli.open', mock_open, create=True): exception = Exception('detail') cli._handle_exception( Exception, exc_value=exception, trace=None, args=None) mock_open().write.assert_called_once_with(''.join( traceback.format_exception_only(Exception, exception))) error_msg = mock_sys.exit.call_args_list[0][0][0] self.assertTrue('unexpected error' in error_msg) with mock.patch('letsencrypt.cli.open', mock_open, create=True): mock_open.side_effect = [KeyboardInterrupt] error = errors.Error('detail') cli._handle_exception( errors.Error, exc_value=error, trace=None, args=None) # assert_any_call used because sys.exit doesn't exit in cli.py mock_sys.exit.assert_any_call(''.join( traceback.format_exception_only(errors.Error, error))) args = mock.MagicMock(debug=False) cli._handle_exception( Exception, exc_value=Exception('detail'), trace=None, args=args) error_msg = mock_sys.exit.call_args_list[-1][0][0] self.assertTrue('unexpected error' in error_msg) interrupt = KeyboardInterrupt('detail') cli._handle_exception( KeyboardInterrupt, exc_value=interrupt, trace=None, args=None) mock_sys.exit.assert_called_with(''.join( traceback.format_exception_only(KeyboardInterrupt, interrupt)))
def google_callback(): """Handle the redirect request from consumers browser containing information from identity provider""" # I. Get Inputs and Handle Error returned error_message = request.args.get('error') auth_code = request.args.get('code', '') LOGGER.debug('Google Return Error: %s, Code: %s', str(error_message), auth_code) # If Error, throw error message if error_message is not None: return flaskApp.custom_errors.error_formatter(code='403_opt_out', display_format='html', details=error_message) # II. Call Google to get credentials # If no success return error message try: flow = google_flow() credentials = flow.step2_exchange(auth_code) LOGGER.debug('credentials: %s', str(credentials.access_token)) except Exception: LOGGER.error("Exception getting credentials: %s", traceback.format_exception_only(sys.exc_info()[0], sys.exc_info()[1])) return flaskApp.custom_errors.error_formatter(code='500_02', display_format='html', details=traceback.format_exception_only(sys.exc_info()[0], sys.exc_info()[1])) # III. Use credentials to get information return google_fetch_user_info(str(credentials.access_token))
def get_target_deps(args, sha): try: cache_fn = os.path.join(CACHEDIR, 'depcache-{}'.format(sha)) with open(cache_fn, 'rb') as f: deps = pickle.load(f) except Exception as e: print('Could not load dependency cache: {}'.format( traceback.format_exception_only(type(e), e) )) print('Introspecting dependencies...') deps = None if deps is None: if args.target == 'atom': deps = find_atom_deps(args, sha) elif args.target == 'vscode': deps = find_vscode_deps(args, sha) else: die('unsupported target: {}'.format(args.target)) try: cache_fn = os.path.join(CACHEDIR, 'depcache-{}'.format(sha)) with open(cache_fn, 'wb') as f: pickle.dump(deps, file=f) except Exception as e: print('Could not store dependency cache: {}'.format( traceback.format_exception_only(type(e), e) )) return deps
def get_value_of_var_from_global_sheet(self, parameter): # If the parameter is a title of a sheet, we need to get the data from this sheet if parameter in self.workbook.keys(): value_of_the_parameter = self.workbook['Global'].get_value_of_var_by_index_and_param(self.hostname, parameter) try: data_of_the_parameter = self.workbook[parameter].get_all_param_by_index(value_of_the_parameter) except KeyError as err: self.unresolved += 1 self.tb += traceback.format_exception_only(KeyError, err) return "<unresolved>" template_regex = re.compile('template') output = '' for _header, _template_name in data_of_the_parameter.items(): if template_regex.match(_header.lower()) and _template_name: template_content = self.workbook[parameter].template_content_by_name(_template_name.lower()) output += self.fill_local_template(self, data_of_the_parameter, template_content) if output == '': self.unresolved += 1 self.tb += traceback.format_exception_only(Warning, "No subtemplates to being picked up for the var '{}'".format(parameter)) return "<unresolved>" else: return output # If the parameter/feature is only a variable (there is not a sheet for this feature) else: try: value = self.workbook['Global'].get_value_of_var_by_index_and_param(self.hostname, parameter) self.resolved += 1 return value except KeyError as err: self.unresolved += 1 self.tb += traceback.format_exception_only(KeyError, err) return "<unresolved>"
def test_get_tag_objects_blank(stc): fail_msg = '' try: tag_utils.get_tag_object('') except: exc_info = sys.exc_info() fail_list = traceback.format_exception_only(exc_info[0], exc_info[1]) fail_msg = fail_list[0] if len(fail_list) == 1 else '\n'.join(fail_list) if fail_msg == '': raise AssertionError('function did not fail as expected') if 'must not be blank' not in fail_msg: raise AssertionError('function failed with unexpected exception: "' + fail_msg + '"') try: tag_utils.get_tag_object_list(['non-blank', '']) except: exc_info = sys.exc_info() fail_list = traceback.format_exception_only(exc_info[0], exc_info[1]) fail_msg = fail_list[0] if len(fail_list) == 1 else '\n'.join(fail_list) if fail_msg == '': raise AssertionError('function did not fail as expected') if 'must not be blank' not in fail_msg: raise AssertionError('function failed with unexpected exception: "' + fail_msg + '"')
def main(): global gLogFile try: CheckEnvironment() except FDKEnvironmentError: return try: options = getOptions() except focusOptionParseError: logMsg( traceback.format_exception_only(sys.exc_type, sys.exc_value)[-1]) return # verify that all files exist. if not os.path.exists(options.filePath): logMsg("File does not exist: <%s>." % options.filePath) else: if options.logFilePath: gLogFile, logFilePath = openLogFile(options.logFilePath, options.filePath) try: checkFile(options.filePath, options) except (focusFontError): logMsg("\t%s" %(traceback.format_exception_only(sys.exc_type, sys.exc_value)[-1])) if gLogFile: gLogFile.close() gLogFile = None logMsg("Log file written to %s" % (logFilePath)) return
def prepareInsert(self,query,fieldList): ## print "preparing insert" ## print fieldList try: fieldDict = {} id = -1 for i in range(fieldList.getLength()): field = fieldList.item(i) if field.nodeType == field.ELEMENT_NODE: if field.firstChild <> None: fieldDict[field.tagName] = field.firstChild.data else: fieldDict[field.tagName] = '' if string.find(query,'$generateId$') > 0: id = self.generateId() query = string.replace(query,'$generateId$',str(id)) ## print "Generated Id : " + str(id) for key in fieldDict.keys(): if string.find(query,"'$" + str(key) + "$'") < 0: if str(fieldDict[key]) == '': query = string.replace(query, '$' + str(key) + '$', 'null') else: query = string.replace(query, '$' + str(key) + '$', str(fieldDict[key])) else: query = string.replace(query, '$' + str(key) + '$', str(fieldDict[key])) ## print "Generated Query : " + query return (id, query) except: traceback.format_exception_only(sys.exc_info()[0],sys.exc_info()[1])[0]
def add(request): if request.POST: errors = {} paused = request.POST.has_key('paused') url = request.POST['url'] if url: try: f = urllib.urlopen(url) content = f.read() f.close() metainfo = ConvertedMetainfo(bdecode(content)) name = metainfo.name_fs[:50] except Exception, e: err = traceback.format_exception_only(e.__class__, e) errors.setdefault('url', []).append(''.join(err)) else: try: torrent = request.FILES['torrent'] content = torrent['content'] metainfo = ConvertedMetainfo(bdecode(content)) name = metainfo.name_fs[:50] except Exception, e: err = traceback.format_exception_only(e.__class__, e) errors.setdefault('torrent', []).append(''.join(err))
def test_unicode(self): err = AssertionError('\xff') lines = traceback.format_exception_only(type(err), err) self.assertEqual(lines, ['AssertionError: \xff\n']) err = AssertionError(u'\xe9') lines = traceback.format_exception_only(type(err), err) self.assertEqual(lines, ['AssertionError: \\xe9\n'])
def test_unicode(self): err = AssertionError("\xff") lines = traceback.format_exception_only(type(err), err) self.assertEqual(lines, ["AssertionError: \xff\n"]) err = AssertionError(u"\xe9") lines = traceback.format_exception_only(type(err), err) self.assertEqual(lines, ["AssertionError: \\xe9\n"])
def _cross_reference_loads(self): """ Links the loads to nodes, coordinate systems, and other loads. """ for (lid, sid) in iteritems(self.loads): #self.log.debug("lid=%s sid=%s" %(lid, sid)) for load in sid: try: load.cross_reference(self) except (SyntaxError, RuntimeError, AssertionError, KeyError, ValueError) as e: self._ixref_errors += 1 var = traceback.format_exception_only(type(e), e) self._stored_xref_errors.append((load, var)) if self._ixref_errors > self._nxref_errors: self.pop_xref_errors() for (lid, sid) in iteritems(self.dloads): #self.log.debug("lid=%s sid=%s" %(lid, sid)) for load in sid: try: load.cross_reference(self) except (SyntaxError, RuntimeError, AssertionError, KeyError, ValueError) as e: self._ixref_errors += 1 var = traceback.format_exception_only(type(e), e) self._stored_xref_errors.append((load, var)) if self._ixref_errors > self._nxref_errors: self.pop_xref_errors() for (lid, sid) in iteritems(self.dload_entries): for load in sid: try: load.cross_reference(self) except (SyntaxError, RuntimeError, AssertionError, KeyError, ValueError) as e: self._ixref_errors += 1 var = traceback.format_exception_only(type(e), e) self._stored_xref_errors.append((load, var)) if self._ixref_errors > self._nxref_errors: self.pop_xref_errors() for key, darea in iteritems(self.dareas): try: darea.cross_reference(self) except (SyntaxError, RuntimeError, AssertionError, KeyError, ValueError) as e: self._ixref_errors += 1 var = traceback.format_exception_only(type(e), e) self._stored_xref_errors.append((load, var)) if self._ixref_errors > self._nxref_errors: self.pop_xref_errors() for key, dphase in iteritems(self.dphases): try: dphase.cross_reference(self) except (SyntaxError, RuntimeError, AssertionError, KeyError, ValueError) as e: self._ixref_errors += 1 var = traceback.format_exception_only(type(e), e) self._stored_xref_errors.append((load, var)) if self._ixref_errors > self._nxref_errors: self.pop_xref_errors()
def print_exception( print_traceback = False ): import traceback, sys if print_traceback: print traceback.print_tb( sys.exc_info()[2] ) # print sys.exc_info()[1] print 'Exception: \n','\n'.join(traceback.format_exception_only(sys.exc_info()[0],sys.exc_info()[1])) else: # print traceback.print_exception(sys.exc_type,None, None) print 'Excception dected: \n' print '\n'.join(traceback.format_exception_only(sys.exc_info()[0],sys.exc_info()[1]))
def test_unicode(self): if due_to_ironpython_bug("http://ironpython.codeplex.com/workitem/28021"): return err = AssertionError('\xff') lines = traceback.format_exception_only(type(err), err) self.assertEqual(lines, ['AssertionError: \xff\n']) err = AssertionError(u'\xe9') lines = traceback.format_exception_only(type(err), err) self.assertEqual(lines, ['AssertionError: \\xe9\n'])
def __init__(self, specifiedconfigfile = None): if specifiedconfigfile is None: specifiedconfigfile = os.path.abspath('PyFileServer.conf') loadconfig = True try: from paste import pyconfig servcfg = pyconfig.Config() servcfg.load(specifiedconfigfile) except ImportError: try: import loadconfig_primitive servcfg = loadconfig_primitive.load(specifiedconfigfile) except: exceptioninfo = traceback.format_exception_only(sys.exc_type, sys.exc_value) exceptiontext = '' for einfo in exceptioninfo: exceptiontext = exceptiontext + einfo + '\n' raise RuntimeError('Failed to read PyFileServer configuration file : ' + specifiedconfigfile + '\nDue to ' + exceptiontext) except: exceptioninfo = traceback.format_exception_only(sys.exc_type, sys.exc_value) exceptiontext = '' for einfo in exceptioninfo: exceptiontext = exceptiontext + einfo + '\n' raise RuntimeError('Failed to read PyFileServer configuration file : ' + specifiedconfigfile + '\nDue to ' + exceptiontext) self._srvcfg = servcfg #add default abstraction layer self._srvcfg['resAL_library']['*'] = FilesystemAbstractionLayer() self._infoHeader = '<a href="mailto:%s">Administrator</a> at %s' % (servcfg.get('Info_AdminEmail',''), servcfg.get('Info_Organization','')) self._verbose = servcfg.get('verbose', 0) _locksfile = servcfg.get('locksfile', os.path.abspath('PyFileServer.locks')) _propsfile = servcfg.get('propsfile', os.path.abspath('PyFileServer.dat')) _locksmanagerobj = servcfg.get('locksmanager', None) or LockManager(_locksfile) _propsmanagerobj = servcfg.get('propsmanager', None) or PropertyManager(_propsfile) _domaincontrollerobj = servcfg.get('domaincontroller', None) or PyFileServerDomainController() # authentication fields _authacceptbasic = servcfg.get('acceptbasic', False) _authacceptdigest = servcfg.get('acceptdigest', True) _authdefaultdigest = servcfg.get('defaultdigest', True) application = RequestServer(_propsmanagerobj, _locksmanagerobj) application = HTTPAuthenticator(application, _domaincontrollerobj, _authacceptbasic, _authacceptdigest, _authdefaultdigest) application = RequestResolver(application) application = ErrorPrinter(application, server_descriptor=self._infoHeader) self._application = application
def _ERROR(Message,Function): import sys,traceback i=sys.exc_info();T=traceback.extract_tb(i[2])[0] print '-----' print 'Recall: '+Function print print 'File: '+T[0].split('\\')[-1]+', line '+str(T[1]) print "Code: '"+T[3]+"'" print traceback.format_exception_only(i[0], i[1])[0] print Message print '-----'
def copyFile(srcPath, dstPath): if os.path.isfile(srcPath): try: shutil.copy(srcPath, dstPath) shutil.copystat(srcPath, dstPath) os.chmod(dstPath, kPermissions) except IOError: print "Failed to copy file %s to %s." % (srcPath, dstPath) print traceback.format_exception_only(sys.exc_type, sys.exc_value)[-1] print "Quitting - not all files were copied." raise InstallError print "Copied: %s to dir %s" % (srcPath, dstPath)
def fetch(self): """Git fetch remote repository.""" message = 'Fetching latest remote branches' self.notify(message) try: self.outputs['git'].fetch('-p', self.inputs['origin']) self.notify(message, 'Done') except GitCommandError as ex: self.notify(message, 'Failed') print(format_exception_only(type(ex), ex)[0]) except Exception as ex: self.notify(message, 'Failed') print(format_exception_only(type(ex), ex)[0])
def checkout(self, branch): """Checkout local git repository.""" message = 'Checking out %s branch' % branch self.notify(message) try: self.outputs['git'].checkout('-f', branch) self.notify(message, 'Done') except GitCommandError as ex: self.notify(message, 'Failed') print(format_exception_only(type(ex), ex)[0]) except Exception as ex: self.notify(message, 'Failed') print(format_exception_only(type(ex), ex)[0])
if '--otros_impuestos' in sys.argv: ret = wslum.ConsultarOtrosImpuestos() print("\n".join(ret)) if '--puntosventa' in sys.argv: ret = wslum.ConsultarPuntosVentas() print("\n".join(ret)) print("hecho.") except SoapFault as e: print("Falla SOAP:", e.faultcode, e.faultstring.encode("ascii", "ignore"), file=sys.stderr) sys.exit(3) except Exception as e: try: print(traceback.format_exception_only(sys.exc_info()[0], sys.exc_info()[1])[0], file=sys.stderr) except BaseException: print("Excepción no disponible:", type(e), file=sys.stderr) if DEBUG: raise sys.exit(5) finally: if XML: open("wslum_request.xml", "w").write(wslum.client.xml_request) open("wslum_response.xml", "w").write(wslum.client.xml_response)
if symlinks and os.path.islink(srcname): linkto = os.readlink(srcname) os.symlink(linkto, dstname) elif os.path.isdir(srcname): copy_directory(srcname, dstname, symlinks, ignore) else: copy2(srcname, dstname) except OSError as why: # we don't want to stop if there is an unreadbable file - just produce an error print('Warning:', why, file=sys.stderr) if __name__ == '__main__': my_name = re.sub(r'\.py$', '', os.path.basename(sys.argv[0])) # there may be other threads running so use os._exit(1) to terminate entire program on interrupt if not debug: signal.signal(signal.SIGINT, lambda signum, frame:os._exit(2)) try: main() except InternalError as e: print("%s: %s" % (my_name, str(e)), file=sys.stderr) if debug: traceback.print_exc(file=sys.stderr) sys.exit(2) except Exception: etype, evalue, etraceback = sys.exc_info() eformatted = "\n".join(traceback.format_exception_only(etype, evalue)) print("%s: internal error: %s" % (my_name, eformatted), file=sys.stderr) if debug: traceback.print_exc(file=sys.stderr) sys.exit(2)
def exception(self): """String representation of the exception.""" buf = traceback.format_exception_only(self.exc_type, self.exc_value) rv = ''.join(buf).strip() return rv.decode('utf-8', 'replace') if PY2 else rv
def _download_url(self, url, user): # TODO: we should probably honour robots.txt... except in practice # we're most likely being explicitly triggered by a human rather than a # bot, so are we really a robot? file_id = datetime.date.today().isoformat() + '_' + random_string(16) file_info = FileInfo( server_name=None, file_id=file_id, url_cache=True, ) with self.media_storage.store_into_file(file_info) as (f, fname, finish): try: logger.debug("Trying to get url '%s'" % url) length, headers, uri, code = yield self.client.get_file( url, output_stream=f, max_size=self.max_spider_size, ) except SynapseError: # Pass SynapseErrors through directly, so that the servlet # handler will return a SynapseError to the client instead of # blank data or a 500. raise except DNSLookupError: # DNS lookup returned no results # Note: This will also be the case if one of the resolved IP # addresses is blacklisted raise SynapseError( 502, "DNS resolution failure during URL preview generation", Codes.UNKNOWN ) except Exception as e: # FIXME: pass through 404s and other error messages nicely logger.warn("Error downloading %s: %r", url, e) raise SynapseError( 500, "Failed to download content: %s" % ( traceback.format_exception_only(sys.exc_info()[0], e), ), Codes.UNKNOWN, ) yield finish() try: if b"Content-Type" in headers: media_type = headers[b"Content-Type"][0].decode('ascii') else: media_type = "application/octet-stream" time_now_ms = self.clock.time_msec() download_name = get_filename_from_headers(headers) yield self.store.store_local_media( media_id=file_id, media_type=media_type, time_now_ms=self.clock.time_msec(), upload_name=download_name, media_length=length, user_id=user, url_cache=url, ) except Exception as e: logger.error("Error handling downloaded %s: %r", url, e) # TODO: we really ought to delete the downloaded file in this # case, since we won't have recorded it in the db, and will # therefore not expire it. raise defer.returnValue({ "media_type": media_type, "media_length": length, "download_name": download_name, "created_ts": time_now_ms, "filesystem_id": file_id, "filename": fname, "uri": uri, "response_code": code, # FIXME: we should calculate a proper expiration based on the # Cache-Control and Expire headers. But for now, assume 1 hour. "expires": 60 * 60 * 1000, "etag": headers["ETag"][0] if "ETag" in headers else None, })
def authenticate(self, service, certificate, private_key, force=False, cache="", wsdl="", proxy=""): """ Call AFIP Authentication webservice to get token & sign or error message """ # import AFIP webservice authentication helper: from pyafipws.wsaa import WSAA # create AFIP webservice authentication helper instance: wsaa = WSAA() # raise python exceptions on any failure wsaa.LanzarExcepciones = True # five hours DEFAULT_TTL = 60 * 60 * 5 # make md5 hash of the parameter for caching... fn = "%s.xml" % hashlib.md5( (service + certificate + private_key).encode('utf-8')).hexdigest() if cache: fn = os.path.join(cache, fn) else: fn = os.path.join(wsaa.InstallDir, "cache", fn) try: # read the access ticket (if already authenticated) if not os.path.exists(fn) or \ os.path.getmtime(fn) + (DEFAULT_TTL) < time.time(): # access ticket (TA) outdated, create new access request # ticket (TRA) tra = wsaa.CreateTRA(service=service, ttl=DEFAULT_TTL) # cryptographically sing the access ticket cms = wsaa.SignTRA(tra, certificate, private_key) # connect to the webservice: wsaa.Conectar(cache, wsdl, proxy) # call the remote method ta = wsaa.LoginCMS(cms) if not ta: raise RuntimeError() # write the access ticket for further consumption open(fn, "w").write(ta) else: # get the access ticket from the previously written file ta = open(fn, "r").read() # analyze the access ticket xml and extract the relevant fields wsaa.AnalizarXml(xml=ta) token = wsaa.ObtenerTagXml("token") sign = wsaa.ObtenerTagXml("sign") expirationTime = wsaa.ObtenerTagXml("expirationTime") generationTime = wsaa.ObtenerTagXml("generationTime") uniqueId = wsaa.ObtenerTagXml("uniqueId") except Exception: token = sign = None if wsaa.Excepcion: # get the exception already parsed by the helper err_msg = wsaa.Excepcion else: # avoid encoding problem when reporting exceptions to the user: err_msg = traceback.format_exception_only( sys.exc_type, sys.exc_value)[0] raise UserError( _('Could not connect. This is the what we received: %s') % (err_msg)) return { 'uniqueid': uniqueId, 'generationtime': generationTime, 'expirationtime': expirationTime, 'token': token, 'sign': sign, }
def report_unexpected_exception(self, out, test, example, exc_info): exf = traceback.format_exception_only(exc_info[0], exc_info[1])[-1] self.results.append(exf)
def trace_code(self, source, load_as=None, module=None, dump=False, driver=None, filename=None): builder = ReportBuilder(self.message_limit) builder.max_width = self.max_width try: tree = parse(source) new_tree = Tracer().visit(tree) fix_missing_locations(new_tree) LineNumberCleaner().visit(new_tree) # from ast import dump # print(dump(new_tree, include_attributes=False)) code = compile(new_tree, PSEUDO_FILENAME, 'exec') self.environment[CONTEXT_NAME] = builder self.run_instrumented_module(code, load_as or SCOPE_NAME, filename) if driver: try: with self.swallow_output(): if module: self.run_python_module(driver[0], driver) else: self.run_python_file(driver[0], driver) except: if not (builder.message_count or builder.history): raise for value in self.environment.values(): if isinstance(value, types.GeneratorType): value.close() except SyntaxError: ex = sys.exc_info()[1] messages = traceback.format_exception_only(type(ex), ex) builder.add_message(messages[-1].strip() + ' ', ex.lineno) except: etype, value, tb = sys.exc_info() is_reported = False builder.message_limit = None # make sure we don't hit limit builder.max_width = None # make sure we don't hit limit messages = traceback.format_exception_only(etype, value) message = messages[-1].strip() + ' ' entries = traceback.extract_tb(tb) for filename, line_number, _, _ in entries: if filename == PSEUDO_FILENAME: builder.add_extra_message(message, line_number) is_reported = True if not is_reported: builder.add_message(message, 1) # print('=== Unexpected Exception in tracing code ===') # traceback.print_exception(etype, value, tb) report = builder.report() if dump: source_lines = source.splitlines() report_lines = report.splitlines() dump_lines = [] source_width = max(map(len, source_lines)) indent = 4 for source_line, report_line in izip_longest(source_lines, report_lines, fillvalue=''): line = (indent * ' ' + source_line + (source_width-len(source_line))*' ' + ' | ' + report_line) dump_lines.append(line) report = '\n'.join(dump_lines) return report
def post(self, request, namespace, report_slug, widget_slug, format=None): logger.debug("Received POST for report %s, widget %s: %s" % (report_slug, widget_slug, request.POST)) report = get_object_or_404(Report, namespace=namespace, slug=report_slug) widget = get_object_or_404( Widget, slug=widget_slug, section__in=Section.objects.filter(report=report)) req_json = json.loads(request.POST['criteria']) fields = widget.collect_fields() form = TableFieldForm(fields, use_widgets=False, hidden_fields=report.hidden_fields, include_hidden=True, data=req_json, files=request.FILES) if not form.is_valid(): raise ValueError("Widget internal criteria form is invalid:\n%s" % (form.errors.as_text())) if form.is_valid(): logger.debug('Form passed validation: %s' % form) formdata = form.cleaned_data logger.debug('Form cleaned data: %s' % formdata) # parse time and localize to user profile timezone timezone = pytz.timezone(request.user.timezone) form.apply_timezone(timezone) try: form_criteria = form.criteria() logger.debug('Form_criteria: %s' % form_criteria) job = Job.create(table=widget.table(), criteria=form_criteria) job.start() wjob = WidgetJob(widget=widget, job=job) wjob.save() logger.debug("Created WidgetJob %s for report %s (handle %s)" % (str(wjob), report_slug, job.handle)) return Response({ "joburl": reverse( 'report-job-detail', args=[namespace, report_slug, widget_slug, wjob.id]) }) except Exception as e: logger.exception("Failed to start job, an exception occurred") ei = sys.exc_info() resp = {} resp['message'] = "".join( traceback.format_exception_only(*sys.exc_info()[0:2])), resp['exception'] = "".join( traceback.format_exception(*sys.exc_info())) return JsonResponse(resp, status=400) else: logger.error("form is invalid, entering debugger") from IPython import embed embed()
def get(self, request, namespace, report_slug, widget_slug, job_id, format=None, status=None): logger.debug("WidgetJobDetail GET %s/%s/%s/%s" % (namespace, report_slug, widget_slug, job_id)) wjob = WidgetJob.objects.get(id=job_id) job = wjob.job widget = wjob.widget if not job.done(): # job not yet done resp = job.json() elif job.status == Job.ERROR: resp = job.json() logger.debug("%s: Job in Error state, deleting Job" % str(wjob)) wjob.delete() else: try: i = importlib.import_module(widget.module) widget_func = i.__dict__[widget.uiwidget].process if widget.rows > 0: tabledata = job.values()[:widget.rows] else: tabledata = job.values() if tabledata is None or len(tabledata) == 0: resp = job.json() resp['status'] = Job.ERROR resp['message'] = "No data returned" logger.debug("%s marked Error: No data returned" % str(wjob)) elif hasattr(i, 'authorized') and not i.authorized()[0]: _, msg = i.authorized() resp = job.json() resp['data'] = None resp['status'] = Job.ERROR resp['message'] = msg logger.debug("%s Error: module unauthorized for user %s" % (str(wjob), request.user)) elif (hasattr(i, 'authorized') and not Location.objects.all() and not LocationIP.objects.all()): # we are using a maps widget, but have no locations resp = job.json() resp['data'] = None resp['status'] = Job.ERROR msg = '''\ Geolocation data has not been loaded. See <a href="https://support.riverbed.com/apis/steelscript/appfwk/configuration.html#locations">\ geolocation documentation</a> for more information.''' resp['message'] = msg logger.debug("%s Error: geo location data not loaded.") elif status is not None: # Only status metadata requested resp = job.json() else: data = widget_func(widget, job, tabledata) resp = job.json(data) logger.debug("%s complete" % str(wjob)) except: logger.exception(("Widget %s (%s) WidgetJob %s, Job %s " "processing failed") % (widget.slug, widget.id, wjob.id, job.id)) resp = job.json() resp['status'] = Job.ERROR ei = sys.exc_info() resp['message'] = str( traceback.format_exception_only(ei[0], ei[1])) wjob.delete() resp['message'] = cgi.escape(resp['message']) try: return JsonResponse(resp) except: logger.error('Failed to generate HttpResponse:\n%s' % str(resp)) raise
def get_exception_format(self, func, exc): try: func() except exc, value: return traceback.format_exception_only(exc, value)
"""Utilities needed to emulate Python's interactive interpreter.
def format_exception(self, type, value, tb): import traceback text = [ 'Traceback (most recent call last):\n' ] + traceback.format_list(tb) + traceback.format_exception_only(type, value) return ''.join(text)
def _BuildFromOther(self, site, exc_type, value, tb): self.colno = -1 self.lineno = 0 if debugging: # Full traceback if debugging. list = traceback.format_exception(exc_type, value, tb) self.description = ExpandTabs(''.join(list)) return # Run down the traceback list, looking for the first "<Script..>" # Hide traceback above this. In addition, keep going down # looking for a "_*_" attribute, and below hide these also. hide_names = ["r_import", "r_reload", "r_open" ] # hide from these functions down in the traceback. depth = None tb_top = tb while tb_top: filename, lineno, name, line = self.ExtractTracebackInfo( tb_top, site) if filename[:7] == "<Script": break tb_top = tb_top.tb_next format_items = [] if tb_top: # found one. depth = 0 tb_look = tb_top # Look down for our bottom while tb_look: filename, lineno, name, line = self.ExtractTracebackInfo( tb_look, site) if name in hide_names: break # We can report a line-number, but not a filename. Therefore, # we return the last line-number we find in one of our script # blocks. if filename.startswith("<Script"): self.lineno = lineno self.linetext = line format_items.append((filename, lineno, name, line)) depth = depth + 1 tb_look = tb_look.tb_next else: depth = None tb_top = tb bits = ['Traceback (most recent call last):\n'] bits.extend(traceback.format_list(format_items)) if exc_type == pythoncom.com_error: desc = "%s (0x%x)" % (value[1], value[0]) if value[0] == winerror.DISP_E_EXCEPTION and value[2] and value[2][ 2]: desc = value[2][2] bits.append("COM Error: " + desc) else: bits.extend(traceback.format_exception_only(exc_type, value)) # XXX - this utf8 encoding seems bogus. From well before py3k, # we had the comment: # > all items in the list are utf8 courtesy of Python magically # > converting unicode to utf8 before compilation. # but that is likely just confusion from early unicode days; # Python isn't doing it, pywin32 probably was, so 'mbcs' would # be the default encoding. We should never hit this these days # anyway, but on py3k, we *never* will, and str objects there # don't have a decode method... if sys.version_info < (3, ): for i in xrange(len(bits)): if type(bits[i]) is str: #assert type(bits[i]) is str, type(bits[i]) bits[i] = bits[i].decode('utf8') self.description = ExpandTabs(u''.join(bits)) # Clear tracebacks etc. tb = tb_top = tb_look = None
def __updateActivations(self): if self.getReadOnly(): # nothing should be activated, regardless return parametersPlug = self.node()["parameters"] annotations = _shaderAnnotations(self.node()) activators = {} for name, value in annotations.items(): if name.startswith("activator.") and name.endswith(".expression"): activator = activators.setdefault(name.split(".")[1], {}) activator["expression"] = value.value elif name.endswith(".activator"): plugName = name.split(".")[0] plug = parametersPlug.getChild(plugName) if plug is not None: activator = activators.setdefault(value.value, {}) activatorPlugs = activator.setdefault("plugs", []) activatorPlugs.append(plug) class ExpressionVariables: def __init__(self, parametersPlug): self.__parametersPlug = parametersPlug def connected(self, key): return self.__parametersPlug[key].getInput() is not None def __getitem__(self, key): if key == "connected": return self.connected else: return self.__parametersPlug[key].getValue() for activator in activators.values(): expression = activator.get("expression", None) if not expression: continue plugs = activator.get("plugs", None) if not plugs: continue try: active = eval(expression, globals(), ExpressionVariables(parametersPlug)) except Exception, e: IECore.msg( IECore.Msg.Level.Error, "Parameter activator", "".join(traceback.format_exception_only(type(e), e))) continue for plug in plugs: plugValueWidget = self.plugValueWidget(plug, lazy=False) if plugValueWidget is not None: plugValueWidget.setReadOnly(not active) plugWidget = plugValueWidget.ancestor(GafferUI.PlugWidget) if plugWidget is not None: plugWidget.labelPlugValueWidget().setReadOnly( not active)
def format_exception(ex): return format_exception_only(ex.__class__, ex)[-1].strip()
def __init__(self, e_type, value, tbacklist): debug.mainthreadTest() errorstrings = [] # list of strings self.tracebacks = [] # list of lists of \n-terminated strings # If there are previous unprocessed exceptions, print them # too. The oldest exception is the first in the # _savedExceptions list. global _savedExceptions _savedExceptions.append((e_type, value, tbacklist)) for e_type, value, tbacklist in _savedExceptions: # format_exception_only returns a list of string, each # terminated whith a newline. The list has length 1, # except for syntax errors. errorstrings.extend([ line.rstrip() for line in traceback.format_exception_only(e_type, value) ]) if isinstance(value, ooferror.ErrErrorPtr): moreinfo = value.details() if moreinfo: errorstrings.append(moreinfo) errorstrings.append("") # blank line if tbacklist: self.tracebacks.append(traceback.format_list(tbacklist)) _savedExceptions = [] self.answer = None self.datestampstring = time.strftime("%Y %b %d %H:%M:%S %Z") self.gtk = gtklogger.Dialog() self.gtk.set_keep_above(True) # self.gtk = gtk.Dialog() gtklogger.newTopLevelWidget(self.gtk, "Error") self.gtk.set_title("%s Error" % subWindow.oofname()) self.gtk.vbox.set_spacing(3) classname = string.split(str(e_type), '.')[-1] self.gtk.vbox.pack_start(gtk.Label("ERROR"), expand=0, fill=0) self.errframe = gtk.Frame() self.errframe.set_border_width(6) self.errframe.set_shadow_type(gtk.SHADOW_IN) self.gtk.vbox.pack_start(self.errframe, expand=1, fill=1) fd = pango.FontDescription(mainmenuGUI.getFixedFont()) errscroll = gtk.ScrolledWindow() gtklogger.logScrollBars(errscroll, "ErrorScroll") errscroll.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC) self.errframe.add(errscroll) self.errbox = gtk.TextView() # error text goes here gtklogger.setWidgetName(self.errbox, "ErrorText") errscroll.add(self.errbox) self.errbox.set_editable(0) self.errbox.set_wrap_mode(gtk.WRAP_WORD) self.errbox.get_buffer().set_text("\n".join(errorstrings)) self.errbox.modify_font(fd) self.gtk.add_button(gtk.STOCK_OK, self.OK) self.gtk.add_button("Abort", self.ABORT) self.gtk.set_default_response(self.OK) self.reportbutton = gtk.Button("Report") gtklogger.setWidgetName(self.reportbutton, "ReportFromError") gtklogger.connect(self.reportbutton, "clicked", self.report) self.gtk.action_area.add(self.reportbutton) self.tracebutton = gtk.Button("View Traceback") gtklogger.setWidgetName(self.tracebutton, "ViewTraceback") gtklogger.connect(self.tracebutton, "clicked", self.trace) self.gtk.action_area.add(self.tracebutton) self.savebutton = gtkutils.StockButton(gtk.STOCK_SAVE, "Save Traceback") gtklogger.setWidgetName(self.savebutton, "SaveTraceback") gtklogger.connect(self.savebutton, "clicked", self.savetrace) self.gtk.action_area.add(self.savebutton) self.scroll = gtk.ScrolledWindow() gtklogger.logScrollBars(self.scroll, "TraceScroll") self.scroll.set_border_width(3) self.scroll.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC) self.scroll.set_shadow_type(gtk.SHADOW_IN) self.tracepane = gtk.TextView() self.tracepane.set_editable(0) self.tracepane.set_wrap_mode(gtk.WRAP_WORD) self.tracepane.modify_font(fd) self.traceframe = gtk.Frame() self.traceframe.set_shadow_type(gtk.SHADOW_NONE) self.gtk.vbox.pack_start(self.traceframe, expand=0, fill=0) # Scroll is not added to the frame until the traceback button # is pressed. self.scroll.add(self.tracepane) if self.tracebacks: tbtext = "" for err, tb in zip(errorstrings, self.tracebacks): if tbtext: tbtext += '\n----------\n\n' tbtext += err + '\n' tbtext += "".join(tb) self.tracepane.get_buffer().set_text(tbtext) else: self.savebutton.set_sensitive(0) self.tracebutton.set_sensitive(0) self.gtk.show_all()
def format_traceback(): t, v = sys.exc_info()[:2] message = ''.join(traceback.format_exception_only(t, v)).replace('\n', ' ') return message.strip()
def do_pyafipws_request_cae(self): "Request to AFIP the invoices' Authorization Electronic Code (CAE)" for inv in self: # Ignore invoices with cae if inv.afip_auth_code and inv.afip_auth_code_due: continue afip_ws = inv.journal_id.afip_ws # Ignore invoice if not ws on point of sale if not afip_ws: continue # get the electronic invoice type, point of sale and afip_ws: commercial_partner = inv.commercial_partner_id country = commercial_partner.country_id journal = inv.journal_id pos_number = journal.point_of_sale_number doc_afip_code = inv.document_type_id.code # authenticate against AFIP: ws = inv.company_id.get_connection(afip_ws).connect() # get the last invoice number registered in AFIP if afip_ws in ["wsfe", "wsmtxca"]: ws_invoice_number = ws.CompUltimoAutorizado( doc_afip_code, pos_number) elif afip_ws in ['wsfex', 'wsbfe']: ws_invoice_number = ws.GetLastCMP(doc_afip_code, pos_number) if not country: raise UserError( _('For WS "%s" country is required on partner' % (afip_ws))) elif not country.code: raise UserError( _('For WS "%s" country code is mandatory' 'Country: %s' % (afip_ws, country.name))) elif not country.afip_code: raise UserError( _('For WS "%s" country afip code is mandatory' 'Country: %s' % (afip_ws, country.name))) ws_next_invoice_number = int(ws_invoice_number) + 1 # verify that the invoice is the next one to be registered in AFIP if inv.invoice_number != ws_next_invoice_number: raise UserError( _('Error!' 'Invoice id: %i' 'Next invoice number should be %i and not %i' % (inv.id, ws_next_invoice_number, inv.invoice_number))) partner_id_code = commercial_partner.main_id_category_id.afip_code tipo_doc = partner_id_code or '99' nro_doc = partner_id_code and int( commercial_partner.main_id_number) or "0" cbt_desde = cbt_hasta = cbte_nro = inv.invoice_number concepto = tipo_expo = int(inv.afip_concept) fecha_cbte = inv.date_invoice if afip_ws != 'wsmtxca': fecha_cbte = fecha_cbte.replace("-", "") # due and billing dates only for concept "services" if int(concepto) != 1: fecha_venc_pago = inv.date_due fecha_serv_desde = inv.afip_service_start fecha_serv_hasta = inv.afip_service_end if afip_ws != 'wsmtxca': fecha_venc_pago = fecha_venc_pago.replace("-", "") fecha_serv_desde = fecha_serv_desde.replace("-", "") fecha_serv_hasta = fecha_serv_hasta.replace("-", "") else: fecha_venc_pago = fecha_serv_desde = fecha_serv_hasta = None if inv.mipymesf and doc_afip_code in ('201', '206', '211'): fecha_venc_pago = inv.date_due.replace('-', '') # # invoice amount totals: imp_total = str("%.2f" % abs(inv.amount_total)) # ImpTotConc es el iva no gravado imp_tot_conc = str("%.2f" % abs(inv.vat_untaxed_base_amount)) imp_neto = str("%.2f" % abs(inv.vat_taxable_amount)) imp_iva = str("%.2f" % abs(inv.vat_amount)) imp_subtotal = str("%.2f" % abs(inv.amount_untaxed)) imp_trib = str("%.2f" % abs(inv.other_taxes_amount)) if doc_afip_code in ['11', '12', '13']: #inovice C type imp_tot_conc = 0 imp_neto = str( "%.2f" % (abs(inv.amount_total) - abs(inv.other_taxes_amount))) imp_op_ex = str("%.2f" % abs(inv.vat_exempt_base_amount)) moneda_id = inv.currency_id.afip_code moneda_ctz = inv.currency_rate # # foreign trade data: export permit, country code, etc.: if inv.afip_incoterm_id: incoterms = inv.afip_incoterm_id.afip_code incoterms_ds = inv.afip_incoterm_id.name else: incoterms = incoterms_ds = None if int(doc_afip_code) in [19] and tipo_expo == 1: permiso_existente = "N" or "S" # not used now else: permiso_existente = "" obs_generales = inv.comment if inv.payment_term_id: forma_pago = inv.payment_term_id.name obs_comerciales = inv.payment_term_id.name else: forma_pago = obs_comerciales = None idioma_cbte = 1 # invoice language: spanish / español # customer data (foreign trade): nombre_cliente = commercial_partner.name # If argentinian and cuit, then use cuit if country.code == 'AR' and tipo_doc == 80 and nro_doc: id_impositivo = nro_doc cuit_pais_cliente = None # If not argentinian and vat, use vat elif country.code != 'AR' and nro_doc: id_impositivo = nro_doc cuit_pais_cliente = None # else use cuit pais cliente else: id_impositivo = None if commercial_partner.is_company: cuit_pais_cliente = country.cuit_juridica else: cuit_pais_cliente = country.cuit_fisica if not cuit_pais_cliente: raise UserError( _('No vat defined for the partner and also no CUIT set ' 'on country')) domicilio_cliente = " - ".join([ commercial_partner.name or '', commercial_partner.street or '', commercial_partner.street2 or '', commercial_partner.zip or '', commercial_partner.city or '', ]) pais_dst_cmp = commercial_partner.country_id.afip_code # create the invoice internally in the helper if afip_ws == 'wsfe': ws.CrearFactura(concepto, tipo_doc, nro_doc, doc_afip_code, pos_number, cbt_desde, cbt_hasta, imp_total, imp_tot_conc, imp_neto, imp_iva, imp_trib, imp_op_ex, fecha_cbte, fecha_venc_pago, fecha_serv_desde, fecha_serv_hasta, moneda_id, moneda_ctz) elif afip_ws == 'wsmtxca': ws.CrearFactura( concepto, tipo_doc, nro_doc, doc_afip_code, pos_number, cbt_desde, cbt_hasta, imp_total, imp_tot_conc, imp_neto, imp_subtotal, # difference with wsfe imp_trib, imp_op_ex, fecha_cbte, fecha_venc_pago, fecha_serv_desde, fecha_serv_hasta, moneda_id, moneda_ctz, obs_generales # difference with wsfe ) elif afip_ws == 'wsfex': ws.CrearFactura( doc_afip_code, pos_number, cbte_nro, fecha_cbte, imp_total, tipo_expo, permiso_existente, pais_dst_cmp, nombre_cliente, cuit_pais_cliente, domicilio_cliente, id_impositivo, moneda_id, moneda_ctz, obs_comerciales, obs_generales, forma_pago, incoterms, idioma_cbte, incoterms_ds) elif afip_ws == 'wsbfe': ws.CrearFactura(tipo_doc, nro_doc, 0, doc_afip_code, pos_number, cbte_nro, fecha_cbte, imp_total, imp_neto, imp_iva, imp_tot_conc, 0.0, imp_op_ex, 0.0, 0.0, 0.0, 0.0, moneda_id, moneda_ctz) # TODO ver si en realidad tenemos que usar un vat pero no lo # subimos if afip_ws != 'wsfex' and afip_ws != 'wsbfe': for vat in inv.vat_taxable_ids: _logger.info('Adding VAT %s' % vat.tax_id.tax_group_id.name) ws.AgregarIva( vat.tax_id.tax_group_id.afip_code, "%.2f" % abs(vat.base), # "%.2f" % abs(vat.base_amount), "%.2f" % abs(vat.amount), ) for tax in inv.not_vat_tax_ids: _logger.info('Adding TAX %s' % tax.tax_id.tax_group_id.name) ws.AgregarTributo( tax.tax_id.tax_group_id.afip_code, tax.tax_id.tax_group_id.name, "%.2f" % abs(tax.base), # "%.2f" % abs(tax.base_amount), # TODO pasar la alicuota # como no tenemos la alicuota pasamos cero, en v9 # podremos pasar la alicuota 0, "%.2f" % abs(tax.amount), ) if afip_ws == 'wsfe' and inv.mipymesf: if doc_afip_code in ('201', '206', '211'): if inv.cbu: ws.AgregarOpcional(2101, inv.cbu) if inv.cbu_alias: ws.AgregarOpcional(2102, inv.cbu_alias) if doc_afip_code in ('202', '203', '207', '208', '212', '213'): ws.AgregarOpcional(22, inv.revocation_code) CbteAsoc = inv.get_related_invoices_data() if CbteAsoc: ws.AgregarCmpAsoc( CbteAsoc.document_type_id.code, CbteAsoc.journal_id.point_of_sale_number, CbteAsoc.invoice_number, ) # analize line items - invoice detail # wsfe do not require detail if afip_ws != 'wsfe': for line in inv.invoice_line_ids: codigo = line.product_id.code # unidad de referencia del producto si se comercializa # en una unidad distinta a la de consumo if not line.uom_id.afip_code: raise UserError( _('Not afip code con producto UOM %s' % (line.uom_id.name))) cod_mtx = line.uom_id.afip_code ds = line.name qty = line.quantity umed = line.uom_id.afip_code precio = line.price_unit importe = line.price_subtotal bonif = line.discount or None if not line.product_id.mercosur_code.isdigit(): raise Warning( _('just numbers in Mercosur Code %s' % (line.product_id.mercosur_code))) pro_codigo_ncm = line.product_id.mercosur_code if line.product_id.secretaria_code: pro_codigo_sec = line.product_id.secretaria_code else: pro_codigo_sec = '' if afip_ws == 'wsmtxca': if not line.product_id.uom_id.afip_code: raise Warning( _('Not afip code con producto UOM %s' % (line.product_id.uom_id.name))) u_mtx = (line.product_id.uom_id.afip_code or line.uom_id.afip_code) # dummy true to avoid pylint error if True: raise Warning(_('WS wsmtxca Not implemented yet')) # TODO en las lineas no tenemos vat_tax_ids todavia if self.invoice_id.type in ('out_invoice', 'in_invoice'): iva_id = line.vat_tax_ids.tax_code_id.afip_code else: iva_id = line.vat_tax_ids.ref_tax_code_id.afip_code vat_taxes_amounts = line.vat_tax_ids.compute_all( line.price_unit, line.quantity, product=line.product_id, partner=inv.partner_id) imp_iva = vat_taxes_amounts[ 'total_included'] - vat_taxes_amounts['total'] ws.AgregarItem(u_mtx, cod_mtx, codigo, ds, qty, umed, precio, bonif, iva_id, imp_iva, importe + imp_iva) elif afip_ws == 'wsfex': ws.AgregarItem(codigo, ds, qty, umed, precio, importe, bonif) elif afip_ws == 'wsbfe': for tribute in line.product_id.taxes_id: if tribute.tax_group_id.tax == 'vat': iva_id = tribute.tax_group_id.afip_code break ws.AgregarItem(pro_codigo_ncm, pro_codigo_sec, str(ds), qty, umed, precio, bonif, iva_id, importe) # Request the authorization! (call the AFIP webservice method) vto = None msg = False try: if afip_ws == 'wsfe': ws.CAESolicitar() vto = ws.Vencimiento elif afip_ws == 'wsmtxca': ws.AutorizarComprobante() vto = ws.Vencimiento elif afip_ws == 'wsfex': ws.Authorize(inv.id) vto = ws.FchVencCAE elif afip_ws == 'wsbfe': ws.Authorize(inv.id) vto = ws.FchVencCAE except SoapFault as fault: self.update_sql(self.id, ws.XmlRequest, ws.XmlResponse) msg = 'Falla SOAP %s: %s' % (fault.faultcode, fault.faultstring) except Exception, e: self.update_sql(self.id, ws.XmlRequest, ws.XmlResponse) msg = e except Exception: self.update_sql(self.id, ws.XmlRequest, ws.XmlResponse) if ws.Excepcion: # get the exception already parsed by the helper msg = ws.Excepcion else: # avoid encoding problem when raising error msg = traceback.format_exception_only( sys.exc_type, sys.exc_value)[0]
def RunWithCrashHandler(f): try: exit_code = f() sys.exit(exit_code) except (SystemExit, KeyboardInterrupt): raise except: import inspect import traceback # Save trace and exception now. These calls look at the most recently # raised exception. The code that makes the report might trigger other # exceptions. original_trace = inspect.trace(3)[1:] formatted_exception = traceback.format_exception_only(*(sys.exc_info()[:2])) apology = """Yikes, the program threw an unexpected exception! Hopefully a complete report has been saved to transitfeedcrash.txt, though if you are seeing this message we've already disappointed you once today. Please include the report in a new issue at https://github.com/google/transitfeed/issues or an email to the public group [email protected]. Sorry! """ dashes = '%s\n' % ('-' * 60) dump = [] dump.append(apology) dump.append(dashes) try: import transitfeed dump.append("transitfeed version %s\n\n" % __version__) except NameError: # Oh well, guess we won't put the version in the report pass for (frame_obj, filename, line_num, fun_name, context_lines, context_index) in original_trace: dump.append('File "%s", line %d, in %s\n' % (filename, line_num, fun_name)) if context_lines: for (i, line) in enumerate(context_lines): if i == context_index: dump.append(' --> %s' % line) else: dump.append(' %s' % line) for local_name, local_val in list(frame_obj.f_locals.items()): try: truncated_val = str(local_val)[0:500] except Exception as e: dump.append(' Exception in str(%s): %s' % (local_name, e)) else: if len(truncated_val) >= 500: truncated_val = '%s...' % truncated_val[0:499] dump.append(' %s = %s\n' % (local_name, truncated_val)) dump.append('\n') dump.append(''.join(formatted_exception)) open('transitfeedcrash.txt', 'w').write(''.join(dump)) print(''.join(dump)) print() print(dashes) print(apology) try: input('Press enter to continue...') except EOFError: # Ignore stdin being closed. This happens during some tests. pass sys.exit(127)
def __call__(self, obj, p, cycle): r""" Format matrix. INPUT: - ``obj`` -- anything. Object to format. - ``p`` -- PrettyPrinter instance. - ``cycle`` -- boolean. Whether there is a cycle. OUTPUT: Boolean. Whether the representer is applicable to ``obj``. If ``True``, the string representation is appended to ``p``. EXAMPLES:: sage: from sage.repl.display.fancy_repr import PlainPythonRepr sage: pp = PlainPythonRepr() sage: pp.format_string(type(1)) "<type 'sage.rings.integer.Integer'>" Do not swallow a trailing newline at the end of the output of a custom representer. Note that it is undesirable to have a trailing newline, and if we don't display it you can't fix it:: sage: class Newline(object): ....: def __repr__(self): ....: return 'newline\n' sage: n = Newline() sage: pp.format_string(n) 'newline\n' sage: pp.format_string([n, n, n]) '[newline\n, newline\n, newline\n]' sage: [n, n, n] [newline , newline , newline ] """ klass = _safe_getattr(obj, '__class__', None) or type(obj) klass_repr = _safe_getattr(klass, '__repr__', None) if klass_repr in _baseclass_reprs: p.text(klass_repr(obj)) else: # A user-provided repr. Find newlines and replace them with p.break_() try: output = repr(obj) except Exception: import sys, traceback objrepr = object.__repr__(obj).replace("object at", "at") exc = traceback.format_exception_only(sys.exc_info()[0], sys.exc_info()[1]) exc = (''.join(exc)).strip() output = "<repr({}) failed: {}>".format(objrepr, exc) for idx, output_line in enumerate(output.split('\n')): if idx: p.break_() p.text(output_line) return True
def format_exception(error): # type: (BaseException) -> str return "\n".join(traceback.format_exception_only(type(error), error))
def subprocessFetch(): """ Used for invocation in fetcher subprocess. Reads cPickled FetcherInArgs from stdin and write FetcherOutArgs to stdout. Implementation of the subprocess URL fetch. """ outArgs = None try: inArgs = cPickle.load(sys.stdin) inArgs.check() outArgs = HTTPFetcher.staticFetch(inArgs.url, inArgs.options, inArgs.platformPath) except BaseException, e: #this will trap KeyboardInterrupt as well errorStr = traceback.format_exc() shortError = str(e) outArgs = FetcherOutArgs(errorStr=errorStr, shortError=shortError) if outArgs is None: shortError = "Subprocess logic error - no output args" errorStr = traceback.format_exception_only( HTTPFetcherError, HTTPFetcherError(shortError)) outArgs = FetcherOutArgs(errorStr=errorStr, shortError=shortError) try: cPickle.dump(outArgs, sys.stdout) except: cPickle.dump(None, sys.stdout) #catch-all case
def logging_excepthook(exc_type, value, tb): extra = {} if CONF.verbose or CONF.debug: extra['exc_info'] = (exc_type, value, tb) getLogger(product_name).critical( "".join(traceback.format_exception_only(exc_type, value)), **extra)
def logging_excepthook(exc_type, value, tb): extra = {'exc_info': (exc_type, value, tb)} getLogger(product_name).critical( "".join(traceback.format_exception_only(exc_type, value)), **extra)
def exception_str(etype=None, value=None, sep='\n'): if etype is None: etype, value = sys.exc_info()[:2] return sep.join(traceback.format_exception_only(etype, value))
def trace(self, frame, event, arg): ### Checking whether we should trace this line: ####################### # # # We should trace this line either if it's in the decorated function, # or the user asked to go a few levels deeper and we're within that # number of levels deeper. if not (frame.f_code in self.target_codes or frame in self.target_frames): if self.depth == 1: # We did the most common and quickest check above, because the # trace function runs so incredibly often, therefore it's # crucial to hyper-optimize it for the common case. return None elif self._is_internal_frame(frame): return None else: _frame_candidate = frame for i in range(1, self.depth): _frame_candidate = _frame_candidate.f_back if _frame_candidate is None: return None elif _frame_candidate.f_code in self.target_codes or _frame_candidate in self.target_frames: break else: return None thread_global.__dict__.setdefault('depth', -1) if event == 'call': thread_global.depth += 1 indent = ' ' * 4 * thread_global.depth # # ### Finished checking whether we should trace this line. ############## ### Reporting newish and modified variables: ########################## # # old_local_reprs = self.frame_to_local_reprs.get(frame, {}) self.frame_to_local_reprs[frame] = local_reprs = \ get_local_reprs(frame, watch=self.watch) newish_string = ('Starting var:.. ' if event == 'call' else 'New var:....... ') for name, value_repr in local_reprs.items(): if name not in old_local_reprs: self.write('{indent}{newish_string}{name} = {value_repr}'.format( **locals())) elif old_local_reprs[name] != value_repr: self.write('{indent}Modified var:.. {name} = {value_repr}'.format( **locals())) # # ### Finished newish and modified variables. ########################### now_string = datetime_module.datetime.now().time().isoformat() line_no = frame.f_lineno source_line = get_source_from_frame(frame)[line_no - 1] thread_info = "" if self.thread_info: current_thread = threading.current_thread() thread_info = "{ident}-{name} ".format( ident=current_thread.ident, name=current_thread.getName()) thread_info = self.set_thread_info_padding(thread_info) ### Dealing with misplaced function definition: ####################### # # if event == 'call' and source_line.lstrip().startswith('@'): # If a function decorator is found, skip lines until an actual # function definition is found. for candidate_line_no in itertools.count(line_no): try: candidate_source_line = \ get_source_from_frame(frame)[candidate_line_no - 1] except IndexError: # End of source file reached without finding a function # definition. Fall back to original source line. break if candidate_source_line.lstrip().startswith('def'): # Found the def line! line_no = candidate_line_no source_line = candidate_source_line break # # ### Finished dealing with misplaced function definition. ############## # If a call ends due to an exception, we still get a 'return' event # with arg = None. This seems to be the only way to tell the difference # https://stackoverflow.com/a/12800909/2482744 code_byte = frame.f_code.co_code[frame.f_lasti] if not isinstance(code_byte, int): code_byte = ord(code_byte) ended_by_exception = ( event == 'return' and arg is None and (opcode.opname[code_byte] not in ('RETURN_VALUE', 'YIELD_VALUE')) ) if ended_by_exception: self.write('{indent}Call ended by exception'. format(**locals())) else: self.write(u'{indent}{now_string} {thread_info}{event:9} ' u'{line_no:4} {source_line}'.format(**locals())) if event == 'return': del self.frame_to_local_reprs[frame] thread_global.depth -= 1 if not ended_by_exception: return_value_repr = utils.get_shortish_repr(arg) self.write('{indent}Return value:.. {return_value_repr}'. format(**locals())) if event == 'exception': exception = '\n'.join(traceback.format_exception_only(*arg[:2])).strip() exception = utils.truncate(exception, utils.MAX_EXCEPTION_LENGTH) self.write('{indent}{exception}'. format(**locals())) return self.trace
def get_context(self, record): from corehq.util.datadog.gauges import datadog_counter try: request = record.request except Exception: request = None request_repr = get_sanitized_request_repr(request) tb_list = [] code = None if record.exc_info: etype, _value, tb = record.exc_info value = clean_exception(_value) tb_list = ['Traceback (most recent call first):\n'] formatted_exception = traceback.format_exception_only(etype, value) tb_list.extend(formatted_exception) extracted_tb = list(reversed(traceback.extract_tb(tb))) code = self.get_code(extracted_tb) tb_list.extend(traceback.format_list(extracted_tb)) stack_trace = '\n'.join(tb_list) subject = '%s: %s' % (record.levelname, formatted_exception[0].strip() if formatted_exception else record.getMessage()) else: stack_trace = 'No stack trace available' subject = '%s: %s' % (record.levelname, record.getMessage()) context = defaultdict(lambda: '') context.update({ 'subject': self.format_subject(subject), 'message': record.getMessage(), 'details': getattr(record, 'details', None), 'tb_list': tb_list, 'request_repr': request_repr, 'stack_trace': stack_trace, 'code': code, }) if request: sanitized_url = sanitize_url(request.build_absolute_uri()) datadog_counter( ERROR_COUNT, tags=[ 'url:{}'.format(sanitized_url), 'group:{}'.format(get_url_group(sanitized_url)), 'domain:{}'.format( getattr(request, 'domain', DATADOG_UNKNOWN)), ]) context.update({ 'get': list(request.GET.items()), 'post': SafeExceptionReporterFilter().get_post_parameters(request), 'method': request.method, 'username': request.user.username if getattr(request, 'user', None) else "", 'url': request.build_absolute_uri(), }) return context
def autoreload(watchers, args, old_static, reader_descs, excqueue=None): while True: try: # Check source dir for changed files ending with the given # extension in the settings. In the theme dir is no such # restriction; all files are recursively checked if they # have changed, no matter what extension the filenames # have. modified = {k: next(v) for k, v in watchers.items()} if modified['settings']: pelican, settings = get_instance(args) # Adjust static watchers if there are any changes new_static = settings.get("STATIC_PATHS", []) # Added static paths # Add new watchers and set them as modified new_watchers = set(new_static).difference(old_static) for static_path in new_watchers: static_key = '[static]%s' % static_path watchers[static_key] = folder_watcher( os.path.join(pelican.path, static_path), [''], pelican.ignore_files) modified[static_key] = next(watchers[static_key]) # Removed static paths # Remove watchers and modified values old_watchers = set(old_static).difference(new_static) for static_path in old_watchers: static_key = '[static]%s' % static_path watchers.pop(static_key) modified.pop(static_key) # Replace old_static with the new one old_static = new_static if any(modified.values()): print('\n-> Modified: {}. re-generating...'.format( ', '.join(k for k, v in modified.items() if v))) if modified['content'] is None: logger.warning( 'No valid files found in content for ' + 'the active readers:\n' + '\n'.join(reader_descs)) if modified['theme'] is None: logger.warning('Empty theme folder. Using `basic` ' 'theme.') pelican.run() except KeyboardInterrupt as e: logger.warning("Keyboard interrupt, quitting.") if excqueue is not None: excqueue.put(traceback.format_exception_only(type(e), e)[-1]) return except Exception as e: if (args.verbosity == logging.DEBUG): if excqueue is not None: excqueue.put( traceback.format_exception_only(type(e), e)[-1]) else: raise logger.warning( 'Caught exception "%s". Reloading.', e) finally: time.sleep(.5) # sleep to avoid cpu load
if isinstance(e, SystemExit): raise # "sanitize" the traceback under the assumption that it is the user's fault import traceback exc_info = sys.exc_info() tb = traceback.extract_tb(exc_info[2]) short_tb = [] for frame_info in tb: if 'AthenaCommon' not in frame_info[0]: short_tb.append(frame_info) print('Shortened traceback (most recent user call last):') print(''.join(traceback.format_list(short_tb)), end=' ') print(''.join(traceback.format_exception_only(exc_info[0], exc_info[1])), end='') sys.stdout.flush() # additional processing to get right error codes import AthenaCommon.ExitCodes as ExitCodes if isinstance(e, IncludeError): theApp._exitstate = ExitCodes.INCLUDE_ERROR sys.exit(theApp._exitstate) elif isinstance(e, ImportError): theApp._exitstate = ExitCodes.IMPORT_ERROR sys.exit(theApp._exitstate) # Throw exception to stop application theApp._exitstate = ExitCodes.UNKNOWN_EXCEPTION raise
def fetch(self, handler, url): log = self.log config = self.xom.config log.info("fetching %s", url) uuid, master_uuid = make_uuid_headers(config.nodeinfo) assert uuid != master_uuid try: self.master_contacted_at = time.time() token = self.auth_serializer.dumps(uuid) r = self.session.get(url, auth=self.master_auth, headers={ H_REPLICA_UUID: uuid, H_EXPECTED_MASTER_ID: master_uuid, H_REPLICA_OUTSIDE_URL: config.args.outside_url, str('Authorization'): 'Bearer %s' % token }, timeout=self.REPLICA_REQUEST_TIMEOUT) except Exception as e: msg = ''.join(traceback.format_exception_only(e.__class__, e)).strip() log.error("error fetching %s: %s", url, msg) return False if r.status_code not in (200, 202): log.error("%s %s: failed fetching %s", r.status_code, r.reason, url) return False # we check that the remote instance # has the same UUID we saw last time master_uuid = config.get_master_uuid() remote_master_uuid = r.headers.get(H_MASTER_UUID) if not remote_master_uuid: # we don't fatally leave the process because # it might just be a temporary misconfiguration # for example of a nginx frontend log.error( "remote provides no %r header, running " "<devpi-server-2.1?" " headers were: %s", H_MASTER_UUID, r.headers) self.thread.sleep(self.ERROR_SLEEP) return True if master_uuid and remote_master_uuid != master_uuid: # we got a master_uuid and it is not the one we # expect, we are replicating for -- it's unlikely this heals # itself. It's thus better to die and signal we can't operate. log.error( "FATAL: master UUID %r does not match " "expected master UUID %r. EXITTING.", remote_master_uuid, master_uuid) # force exit of the process os._exit(3) try: remote_serial = int(r.headers["X-DEVPI-SERIAL"]) except Exception as e: msg = ''.join(traceback.format_exception_only(e.__class__, e)).strip() log.error("error fetching %s: %s", url, msg) return False if r.status_code == 200: try: handler(r) except Exception: log.exception("could not process: %s", r.url) else: # we successfully received data so let's # record the master_uuid for future consistency checks if not master_uuid: self.xom.config.set_master_uuid(remote_master_uuid) # also record the current master serial for status info self.update_master_serial(remote_serial) return True elif r.status_code == 202: remote_serial = int(r.headers["X-DEVPI-SERIAL"]) log.debug("%s: trying again %s\n", r.status_code, url) # also record the current master serial for status info self.update_master_serial(remote_serial) return True return False