def lex(self): scan = self.scan self.list = [] while scan.next(): pos = scan.get_pos() if scan.get() == '(': self.list.append(token.Token('bparen', scan.emit(), pos)) self.paren_depth += 1 self.lex_list() elif scan.get() == ')': self.list.append(token.Token('eparen', scan.emit(), pos)) self.paren_depth -= 1 if self.paren_depth > 0: self.lex_list() elif self.paren_depth < 0: self.paren_depth += 1 raise error.Error('too many end parens').error() else: return self.list elif scan.get() == '#': while scan.get() != '\n': scan.next() scan.emit() else: dump = scan.emit() if len(dump.strip()) > 0: error.Error( 'unknown \'' + (dump if len(dump) <= 10 else dump[0:10 - 3] + '...') + '\'', pos=scan.get_pos()).error() return self.list
def CheckErrors(self, index=None): """Checks for errors in the coordinated video timing block. Args: index: The index of the CoordinatedVideoTiming object within the EDID. Returns: An error.Error object specifying details, if there is one; else, None. """ loc = 'Coordinated video timing block %s' % ('#%d' % index if index else '') errors = [] # Check that preferred refresh rate is supported if self.preferred_vertical_rate not in self.supported_vertical_rates: errors.append( error.Error(loc, 'Preferred refresh rate not supported', '%s supported' % self.preferred_vertical_rate, 'Supported: %s' % self.supported_vertical_rates)) # Check for reserved 0 bits if self._block[1] & 0x03: # Error: Bits 1-0 of byte 2 in CVT should be set to '00' errors.append( error.Error(loc, 'Bits 1-0 of byte 2 incorrectly set', 0x00, self._block[1] & 0x03)) if self._block[2] & 0x80: # Error: Bit 7 of byte 3 in CVT should be set to '0' errors.append( error.Error(loc, 'Bit 7 of byte 3 incorrectly set', 0x00, self._block[2] & 0x80)) return errors
def parseBody(self, body, xmpp_elements): try: # grab session if body.hasAttribute('sid'): sid = str(body['sid']) else: if self.v: log.msg('Session ID not found') return None, defer.fail(error.NotFound) if self.inSession(body): s = self.sessions[sid] s.touch() # any connection should be a renew on wait else: if self.v: log.msg('session does not exist?') return None, defer.fail(error.NotFound) if bool(s.key) != body.hasAttribute('key'): # This session is keyed, but there's no key in this packet; or there's # a key in this packet, but the session isn't keyed. return s, defer.fail(error.Error('item-not-found')) # If this session is keyed, validate the next key. if s.key: key = hashlib.sha1(body['key']).hexdigest() next_key = body['key'] if key != s.key: if self.v: log.msg('Error in key') return s, defer.fail(error.Error('item-not-found')) s.key = next_key # If there's a newkey in this packet, save it. Do this after validating the # previous key. if body.hasAttribute('newkey'): s.key = body['newkey'] # need to check if this is a valid rid (within tolerance) if body.hasAttribute('rid') and body['rid'] != '': if s.cache_data.has_key(int(body['rid'])): s.touch() # implements issue 32 and returns the data returned on a dropped connection return s, defer.succeed(s.cache_data[int(body['rid'])]) if abs(int(body['rid']) - int(s.rid)) > s.window: if self.v: log.msg('This rid is invalid %s %s ' % ( str(body['rid']), str(s.rid), )) return s, defer.fail(error.NotFound) else: if self.v: log.msg('There is no rid on this request') return s, defer.fail(error.NotFound) return s, self._parse(s, body, xmpp_elements) except: log.err() return s, defer.fail(error.InternalServerError)
def LoadRevisionFile(self, revision_file, skip_hash_verify=False): """Loads a revision JSON file into this object. Args: revision_file: File name for a revision JSON file. skip_hash_verify: If True, will skip the hash validation check. This should only be used if a field has been added or removed in order to recalculate the revision hash. """ try: with open(revision_file, 'rt') as f: revision_json = json.load(f) self._package_name = revision_json[FIELD_PACKAGE_NAME] self._revision_num = revision_json[FIELD_REVISION] self._package_targets = {} package_targets = revision_json[FIELD_PACKAGE_TARGETS] for package_target, archive_list in package_targets.iteritems(): self._package_targets[ package_target] = package_info.PackageInfo(archive_list) except (TypeError, KeyError) as e: raise error.Error('Invalid revision file [%s]: %s' % (revision_file, e)) self._ValidateRevisionComplete() if not skip_hash_verify: hash_value = revision_json[FIELD_REVISION_HASH] if self._GetRevisionHash() != hash_value: raise error.Error( 'Invalid revision file [%s] - revision hash check ' 'failed' % revision_file)
def _ValidateRevisionComplete(self): """Validate packages to make sure it matches the packages description.""" if self._package_name is None: raise error.Error('Invalid revision information - ' 'no package name.') elif self._revision_num is None: raise error.Error('Invalid revision information - ' 'no revision identifier') package_targets = self._packages_desc.GetPackageTargetsForPackage( self._package_name ) if package_targets: package_targets = set(package_targets) revision_targets = set(self._package_targets.keys()) if package_targets != revision_targets: raise error.Error('Invalid revision information - ' 'target mismatch:' + '\n%s:' % self._package_name + '\n Required Target Packages:' + '\n\t' + '\n\t'.join(sorted(package_targets)) + '\n Supplied Target Packages:' + '\n\t' + '\n\t'.join(sorted(revision_targets)))
def LoadPackageFile(self, package_file, skip_missing=False): """Loads a package file into this object. Args: package_file: Filename or JSON dictionary. """ archive_names = None self._archive_list = [] # TODO(dyen): Support old format temporarily when it was a list of archives. if isinstance(package_file, list) or isinstance(package_file, dict): if isinstance(package_file, list): self._package_version = 0 archive_list = package_file else: self._package_version = package_file[PACKAGE_KEY_VERSION] archive_list = package_file[PACKAGE_KEY_ARCHIVES] if archive_list: if isinstance(archive_list[0], archive_info.ArchiveInfo): # Setting a list of ArchiveInfo objects, no need to interpret JSON. self._archive_list = archive_list else: # Assume to be JSON. for archive_json in archive_list: archive = archive_info.ArchiveInfo( archive_info_file=archive_json) self._archive_list.append(archive) elif isinstance(package_file, str) or isinstance( package_file, unicode): package_data = ReadPackageFile(package_file) self._package_version = package_data[PACKAGE_KEY_VERSION] archive_names = package_data[PACKAGE_KEY_ARCHIVES] package_name = GetLocalPackageName(package_file) archive_dir = os.path.join(os.path.dirname(package_file), package_name) for archive in archive_names: arch_file = archive + '.json' arch_path = os.path.join(archive_dir, arch_file) if not os.path.isfile(arch_path): if not skip_missing: raise error.Error( 'Package (%s) points to invalid archive file (%s).' % (package_file, arch_path)) archive_desc = archive_info.ArchiveInfo(name=archive) else: archive_desc = archive_info.ArchiveInfo( archive_info_file=arch_path) self._archive_list.append(archive_desc) else: raise error.Error('Invalid load package file type (%s): %s.' % (type(package_file), package_file))
def Reset(): global _src if len(sys.argv) < 2: error.Error("Запуск: python O.py <файл программы>") else: try: _f = open(sys.argv[1]) except: error.Error("Ошибка открытия файла") _src = _f.read() _f.close()
def check(self): if str(self.user_ch) not in self.choices.keys(): return False else: if int(self.user_ch) - 1 != len(self.varlist): if (self.quali == False) & (self.varlist[int(self.user_ch) - 1] == 'type'): err.Error("Choisissez une variable quantitative").display() return False if (self.quanti == False) & ( self.varlist[int(self.user_ch) - 1] != 'type'): err.Error("Choisissez une variable qualitative").display() return False return True
def CheckErrors(self, index=None): """Checks for errors. Errors may include maximum values being less than minimum values, or invalid support flag values. Args: index: The integer index of this descriptor (1-4). Returns: A list of error.Error objects. """ errors = [] max_vert = self.max_vertical_rate min_vert = self.min_vertical_rate max_hor = self.max_horizontal_rate min_hor = self.min_horizontal_rate loc = '%s %s' % (self._type, '#%d' % index if index else '') if max_vert < min_vert: errors.append( error.Error( loc, 'Maximum vertical rate less than minimum', 'Max vert: %d\tMin vert: %d' % (max_vert, min_vert))) if max_hor < min_hor: errors.append( error.Error(loc, 'Maximum horizontal rate less than ' 'minimum', '', 'Max hor: %d\tMin hor: %d' % (max_hor, min_hor))) if not self.pixel_clock: errors.append( error.Error(loc, 'Pixel clock value invalid', 'Non-zero', self.pixel_clock)) val = [0x00, 0x01, 0x02, 0x04] val_str = '0x00 0x01 0x02 0x04' if self._block[10] not in val: my_err = error.Error( loc + '- byte 10', 'Invalid value for Video Timing ' 'Support Flags', val_str, '0x%02X' % self._block[10]) errors.append(my_err) return errors
def connectError(self, xs): """called when we get disconnected""" # FIXME: we should really only send the error event back if # attempts to reconnect fail. There's no reason temporary # connection failures should be exposed upstream if self.verbose: log.msg('connect ERROR') try: log.msg(xs) except: pass if self.waiting_requests: if len(self.waiting_requests) > 0: wr = self.waiting_requests.pop(0) wr.doErrback(error.Error('remote-connection-failed')) if self.pint and self.pint.sessions.has_key(self.sid): try: self.expire() except: self.onExpire()
def _ExtensionErrors(edid, version): """Checks all extensions for errors. Args: edid: The EDID being checked. version: A string indicating the EDID's version. Returns: A list of error.Error objects. """ num_ext = edid[0x7E] errors = [] if (num_ext + 1) != (len(edid) / 128): errors.append( error.Error('Extensions', 'Extension count does not match ' 'EDID length', '%d extensions' % num_ext, '%d extensions' % ((len(edid) / 128) - 1))) for x in xrange(1, num_ext + 1): ext = extensions.GetExtension(edid, x, version) err = ext.CheckErrors(x) if err: errors.extend(err) return errors
def buildRemoteError(self, err_elem=None): e = error.Error('remote-stream-error') e.error_stanza = 'remote-stream-error' e.children = [] if err_elem: e.children.append(err_elem) return e
def streamStart(self, xs): """ A xmpp stream has started """ # This is done to fix the stream id problem, I should submit a bug to twisted bugs try: self.authid = self.xmlstream.sid if not self.attrs.has_key('no_events'): self.xmlstream.addOnetimeObserver("/auth", self.stanzaHandler) self.xmlstream.addOnetimeObserver("/response", self.stanzaHandler) self.xmlstream.addOnetimeObserver("/success", self._saslSuccess) self.xmlstream.addOnetimeObserver("/failure", self._saslError) self.xmlstream.addObserver("/iq/bind", self.bindHandler) self.xmlstream.addObserver("/bind", self.stanzaHandler) self.xmlstream.addObserver("/challenge", self.stanzaHandler) self.xmlstream.addObserver("/message", self.stanzaHandler) self.xmlstream.addObserver("/iq", self.stanzaHandler) self.xmlstream.addObserver("/presence", self.stanzaHandler) # TODO - we should do something like this # self.xmlstream.addObserver("/*", self.stanzaHandler) except: log.err(traceback.print_exc()) self._wrError(error.Error("remote-connection-failed")) self.disconnect()
def visit_complex_expr(self, complex_expr): complex_expr.rest.accept(self) right_hand_type = self.current_type complex_expr.first_operand.accept(self) if not (right_hand_type == self.current_type): err_msg = "expecting " + self.current_type + ", found " + right_hand_type line, column = self.__first_token(complex_expr.rest) raise error.Error(err_msg, line, column)
def __init__(self, *args, **kwargs): if kwargs.get('window_dimension') is None: raise error.InputError('Window dimension', 'is a required parameter') self.window_dimension = kwargs.get('window_dimension') if self.__class__ is MovingWindow: raise error.Error('MovingWindow is an abstract base class')
def make_error_response_with_details(error_code, **kwargs): """ Create an error response from a single error code. """ error_obj = error.Error() error_obj.set_error_with_details(error_code, **kwargs) return error_response(error_obj.get_html_status(), error_obj.get_html_status_msg(), error_obj.error_code, error_obj.get_message())
def check(self): if str(self.user_ch) not in self.choices.keys(): return False if self.var == 'type': if int(self.user_ch) in [1, 2]: err.Error("Choisir '==' ou '!=' pour une variable qualitative" ).display() return False return True
def DownloadPackageInfoFiles(local_package_file, remote_package_file, downloader=None): """Downloads all package info files from a downloader. Downloads a package file from the cloud along with all of the archive info files. Archive info files are expected to be in a directory with the name of the package along side the package file. Files will be downloaded in the same structure. Args: local_package_file: Local package file where root file will live. remote_package_file: Remote package URL to download from. downloader: Optional downloader if standard HTTP one should not be used. """ if downloader is None: downloader = pynacl.gsd_storage.HttpDownload pynacl.file_tools.MakeParentDirectoryIfAbsent(local_package_file) downloader(remote_package_file, local_package_file) if not os.path.isfile(local_package_file): raise error.Error('Could not download package file: %s.' % remote_package_file) package_data = ReadPackageFile(local_package_file) archive_list = package_data[PACKAGE_KEY_ARCHIVES] local_package_name = GetLocalPackageName(local_package_file) remote_package_name = GetRemotePackageName(remote_package_file) local_archive_dir = os.path.join(os.path.dirname(local_package_file), local_package_name) remote_archive_dir = posixpath.join(posixpath.dirname(remote_package_file), remote_package_name) pynacl.file_tools.MakeDirectoryIfAbsent(local_archive_dir) for archive in archive_list: archive_file = archive + '.json' local_archive_file = os.path.join(local_archive_dir, archive_file) remote_archive_file = posixpath.join(remote_archive_dir, archive_file) downloader(remote_archive_file, local_archive_file) if not os.path.isfile(local_archive_file): raise error.Error('Could not download archive file: %s.' % remote_archive_file)
def visit_complex_bool_expr(self, complex_bool_expr): complex_bool_expr.second_expr.accept(self) right_hand_type = self.current_type complex_bool_expr.first_expr.accept(self) if not (right_hand_type == self.current_type): err_msg = "expecting " + self.current_type + ", found " + right_hand_type line, column = self.__first_token(complex_bool_expr.second_expr) raise error.Error(err_msg, line, column) if complex_bool_expr.has_bool_connector: complex_bool_expr.rest.accept(self)
def enable_blkio_and_cfq(device): """Enable blkio and cfq, when not done by boot command.""" # Ensure that the required device is valid block device. disk = os.path.join('/sys/block', device) if not os.path.exists(disk): raise error.Error('Machine does not have disk device ' + device) # Ensure the io cgroup is mounted. if not cgroup.mount_point(BLKIO_CGROUP_NAME): raise error.Error('Kernel not compiled with blkio support') # Enable cfq scheduling on the block device. file = os.path.join(disk, 'queue/scheduler') if '[cfq]' in utils.read_one_line(file): logging.debug('cfq scheduler is already enabled on drive %s', device) return logging.info('Enabling cfq scheduler on drive %s', device) utils.write_one_line(file, 'cfq')
def np_random(seed=None): if seed is not None and not (isinstance(seed, int) and 0 <= seed): raise error.Error( 'Seed must be a non-negative integer or omitted, not {}'.format( seed)) seed = create_seed(seed) rng = np.random.RandomState() rng.seed(_int_list_from_bigint(hash_seed(seed))) return rng, seed
def check(self): if self.var != 'type': if self.is_number(self.user_ch) == False: err.Error( "Veuillez entrer un nombre pour une variable quantitative" ).display() return False return True else: self.user_ch = "'" + self.user_ch + "'" return True
def set_value(self, value): """ Sets new value on the variable. The new value must have same type as current value. Read-only variable cannot be set. :param value: New value as :class:`bkl.expr.Expr` object. """ if self.readonly: raise error.Error("variable \"%s\" is read-only" % self.name) # FIXME: type checks self.value = value
def _int_list_from_bigint(bigint): # Special case 0 if bigint < 0: raise error.Error('Seed must be non-negative, not {}'.format(bigint)) elif bigint == 0: return [0] ints = [] while bigint > 0: bigint, mod = divmod(bigint, 2**32) ints.append(mod) return ints
def _startup_timeout(self, d): # this can be called if connection failed, or if we connected # but never got a stream features before the timeout if self.pint.v: log.msg('================================== %s %s startup timeout ==================================' % (str(self.sid), str(time.time()),)) for i in range(len(self.waiting_requests)): if self.waiting_requests[i].deferred == d: # check if we really failed or not if self.authid: self._wrPop(self.elems, i=i) else: self._wrError(error.Error("remote-connection-failed"), i=i)
def setlayer(self, layername, layer, force=False): """ Put an existing map layer to the layer collection @param string name of the layer @param list a map layer @param boolean optional, whether to overwrite values if key exists """ if not force and self.layers.has_key(layername): raise error.Error( "r.agent::libagent.playground.Playground.setlayer()", "May not overwrite existing layer.") self.layers[layername] = layer
def visit_simple_expr(self, simple_expr): term = simple_expr.term if term.tokentype == mytoken.ID: var_name = term.lexeme if self.sym_table.variable_exists(var_name): var_type = self.sym_table.get_variable_type(var_name) self.current_type = var_type else: err_msg = term.lexeme + " is undefined" raise error.Error(err_msg, term.line, term.column) else: self.current_type = term.tokentype
def buildRemoteError(self, err_elem=None): # This may not be a stream error, such as an XML parsing error. # So expose it as remote-connection-failed. err = 'remote-connection-failed' if err_elem is not None: # This is an actual stream:error. Create a remote-stream-error to encapsulate it. err = 'remote-stream-error' e = error.Error(err) e.error_stanza = err e.children = [] if err_elem is not None: e.children.append(err_elem) return e
def CheckErrors(self, index=None): """Checks the DummyDescriptor for errors. Args: index: The integer index of this descriptor (1-4). Returns: A list of error.Error objects. """ errors = [] loc = '%s %s' % (self._type, '#%d' % index if index else '') if self._block[0:5] != [0x00, 0x00, 0x00, 0x10, 0x00]: found_header = '0x%02X ' * 5 % tuple(self._block[0:5]) errors.append( error.Error(loc, 'Bytes 0-4', '0x00 0x00 0x00 0x10 0x00', found_header)) if self._block[5:18] != [0x00] * 13: found_body = '0x%02X ' * 13 % tuple(self._block[5:18]) errors.append( error.Error(loc, 'Bytes 5-18', 'All 0x00', found_body)) return errors
def getPlugin(self, name): plugins = self.listDOM.getElementsByTagName("plugin") plugin = [ elem for elem in plugins if elem.attributes["name"].value == name ] if len(plugin) == 1: pluginDOM = plugin[0] plugin = PluginInfo(pluginDOM, self) return plugin raise error.Error("Plugin " + name + "doesn't exists! in " + self.repository)