def signature(self): # compute the result one time, and suppose the scan_signature will give the good result try: return self.cache_sig except AttributeError: pass self.m = Utils.md5() self.m.update(self.hcode) id_sig = self.m.digest() # explicit deps self.m = Utils.md5() self.sig_explicit_deps() exp_sig = self.m.digest() # env vars self.m = Utils.md5() self.sig_vars() var_sig = self.m.digest() # implicit deps / scanner results self.m = Utils.md5() if self.scan: try: self.sig_implicit_deps() except Errors.TaskRescan: return self.signature() impl_sig = self.m.digest() ret = self.cache_sig = impl_sig + id_sig + exp_sig + var_sig return ret
def h_file(filename): """now folders can have a signature too""" st = os.stat(filename) if stat.S_ISDIR(st[stat.ST_MODE]): return Utils.md5(filename).digest() m = Utils.md5() m.update(str(st.st_mtime)) m.update(str(st.st_size)) m.update(filename) return m.digest()
def hash_env_vars(self, env, vars_lst): if not env.table: env = env.parent if not env: return Utils.SIG_NIL idx = str(id(env)) + str(vars_lst) try: cache = self.cache_env except AttributeError: cache = self.cache_env = {} else: try: return self.cache_env[idx] except KeyError: pass v = str([env[a] for a in vars_lst]) v = v.replace(self.srcnode.abspath(), '') # here m = Utils.md5() m.update(v.encode()) ret = m.digest() Logs.debug('envhash: %r %r', ret, v) cache[idx] = ret return ret
def h_file(filename): st = os.stat(filename) if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file') if filename in Build.hashes_md5_tstamp: if Build.hashes_md5_tstamp[filename][0] == str(st.st_mtime): return Build.hashes_md5_tstamp[filename][1] m = Utils.md5() if STRONGEST: f = open(filename, 'rb') read = 1 try: while read: read = f.read(100000) m.update(read) finally: f.close() else: m.update(str(st.st_mtime)) m.update(str(st.st_size)) m.update(filename) # ensure that the cache is overwritten Build.hashes_md5_tstamp[filename] = (str(st.st_mtime), m.digest()) return m.digest()
def hash_env_vars(self, env, vars_lst): if not env.table: env = env.parent if not env: return Utils.SIG_NIL idx = str(id(env)) + str(vars_lst) try: cache = self.cache_env except AttributeError: cache = self.cache_env = {} else: try: return self.cache_env[idx] except KeyError: pass v = str([env[a] for a in vars_lst]) v = v.replace(self.srcnode.abspath().__repr__()[:-1], '') m = Utils.md5() m.update(v.encode()) ret = m.digest() Logs.debug('envhash: %r %r', ret, v) cache[idx] = ret return ret
def hash_env_vars(self, env, vars_lst): """ Override env signature computation, and make it to be engine path and 3rdParty path independent. """ if not env.table: env = env.parent if not env: return Utils.SIG_NIL idx = str(id(env)) + str(vars_lst) try: cache = self.cache_env except AttributeError: cache = self.cache_env = {} else: try: return self.cache_env[idx] except KeyError: pass lst = [ env[x] for x in vars_lst if x not in ['INCLUDES', 'INCPATHS', 'LIBPATH', 'STLIBPATH'] ] env_str = replace_engine_path_and_tp_root_in_string(self, str(lst)) m = Utils.md5() m.update(env_str.encode()) ret = m.digest() Logs.debug('envhash: %s %r', Utils.to_hex(ret), lst) cache[idx] = ret return ret
def hash_env_vars(self, env, vars_lst): # reimplement so that the resulting hash does not depend on local paths if not env.table: env = env.parent if not env: return Utils.SIG_NIL idx = str(id(env)) + str(vars_lst) try: cache = self.cache_env except AttributeError: cache = self.cache_env = {} else: try: return self.cache_env[idx] except KeyError: pass v = str([env[a] for a in vars_lst]) v = v.replace(self.srcnode.abspath().__repr__()[:-1], "") m = Utils.md5() m.update(v.encode()) ret = m.digest() Logs.debug("envhash: %r %r", ret, v) cache[idx] = ret return ret
def hash_env_vars(self, env, vars_lst): # reimplement so that the resulting hash does not depend on local paths if not env.table: env = env.parent if not env: return Utils.SIG_NIL idx = str(id(env)) + str(vars_lst) try: cache = self.cache_env except AttributeError: cache = self.cache_env = {} else: try: return self.cache_env[idx] except KeyError: pass v = str([env[a] for a in vars_lst]) v = v.replace(self.srcnode.abspath().__repr__()[:-1], '') m = Utils.md5() m.update(v.encode()) ret = m.digest() Logs.debug('envhash: %r %r', ret, v) cache[idx] = ret return ret
def uid(self): """ Returns an identifier used to determine if tasks are up-to-date. Since the identifier will be stored between executions, it must be: - unique for a task: no two tasks return the same value (for a given build context) - the same for a given task instance By default, the node paths, the class name, and the function are used as inputs to compute a hash. The pointer to the object (python built-in 'id') will change between build executions, and must be avoided in such hashes. :return: hash value :rtype: string """ try: return self.uid_ except AttributeError: m = Utils.md5(self.__class__.__name__) up = m.update for x in self.inputs + self.outputs: up(x.abspath()) self.uid_ = m.digest() return self.uid_
def uid(self): """ Return an identifier used to determine if tasks are up-to-date. Since the identifier will be stored between executions, it must be: - unique: no two tasks return the same value (for a given build context) - the same for a given task instance By default, the node paths, the class name, and the function are used as inputs to compute a hash. The pointer to the object (python built-in 'id') will change between build executions, and must be avoided in such hashes. :return: hash value :rtype: string """ try: return self.uid_ except AttributeError: # this is not a real hot zone, but we want to avoid surprises here m = Utils.md5() up = m.update up(self.__class__.__name__.encode()) for x in self.inputs + self.outputs: up(x.abspath().encode()) self.uid_ = m.digest() return self.uid_
def signature(self): """ Task signatures are stored between build executions, they are use to track the changes made to the input nodes (not to the outputs!). The signature hashes data from various sources: * files listed in the inputs (list of node objects) * list of nodes returned by scanner methods (when present) * variables/values read from task.__class__.vars/task.env if the signature is expected to give a different result, clear the result stored in self.cache_sig """ try: return self.cache_sig except AttributeError: pass self.m = Utils.md5() self.m.update(self.hcode.encode()) # explicit deps self.sig_explicit_deps() # env vars self.sig_vars() # implicit deps / scanner results if self.scan: try: imp_sig = self.sig_implicit_deps() except Errors.TaskRescan: return self.signature() ret = self.cache_sig = self.m.digest() return ret
def cached_hash_file(self): try: cache = self.ctx.cache_listdir_cache_hash_file except AttributeError: cache = self.ctx.cache_listdir_cache_hash_file = {} if id(self.parent) in cache: try: t = cache[id(self.parent)][self.name] except KeyError: raise IOError('Not a file') else: # an opportunity to list the files and the timestamps at once findData = ctypes.wintypes.WIN32_FIND_DATAW() find = FindFirstFile(TP % self.parent.abspath(), ctypes.byref(findData)) if find == INVALID_HANDLE_VALUE: cache[id(self.parent)] = {} raise IOError('Not a file') cache[id(self.parent)] = lst_files = {} try: while True: if findData.cFileName not in UPPER_FOLDERS: thatsadir = findData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY if not thatsadir: ts = findData.ftLastWriteTime d = (ts.dwLowDateTime << 32) | ts.dwHighDateTime lst_files[str(findData.cFileName)] = d if not FindNextFile(find, ctypes.byref(findData)): break except Exception: cache[id(self.parent)] = {} raise IOError('Not a file') finally: FindClose(find) t = lst_files[self.name] fname = self.abspath() if fname in Build.hashes_md5_tstamp: if Build.hashes_md5_tstamp[fname][0] == t: return Build.hashes_md5_tstamp[fname][1] try: fd = os.open(fname, os.O_BINARY | os.O_RDONLY | os.O_NOINHERIT) except OSError: raise IOError('Cannot read from %r' % fname) f = os.fdopen(fd, 'rb') m = Utils.md5() rb = 1 try: while rb: rb = f.read(200000) m.update(rb) finally: f.close() # ensure that the cache is overwritten Build.hashes_md5_tstamp[fname] = (t, m.digest()) return m.digest()
def uid(self): if not hasattr(self, 'uid_'): m = Utils.md5() m.update(self.__class__.__name__) m.update(self.env.get_flat('SUBMODULE_PATH')) self.uid_ = m.digest() return self.uid_
def h_file(filename): import stat st = os.stat(filename) if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file') m = Utils.md5() m.update(str(st.st_mtime)) m.update(str(st.st_size)) m.update(filename) return m.digest()
def unique_id(self): try: return self.uid except AttributeError: "this is not a real hot zone, but we want to avoid surprizes here" m = Utils.md5() m.update("ns-3-module-header-%s" % self.module) self.uid = m.digest() return self.uid
def uid(self): try: return self.uid_ except AttributeError: m=Utils.md5(self.__class__.__name__) up=m.update for x in self.inputs+self.outputs: up(x.abspath()) self.uid_=m.digest() return self.uid_
def uid(self): try: return self.uid_ except AttributeError: m = Utils.md5(self.__class__.__name__) up = m.update for x in self.inputs + self.outputs: up(x.abspath()) self.uid_ = m.digest() return self.uid_
def uid(self): try: return self.uid_ except AttributeError: m = Utils.md5(self.__class__.__name__.encode('latin-1', 'xmlcharrefreplace')) up = m.update for x in self.inputs + self.outputs: up(x.abspath().encode('latin-1', 'xmlcharrefreplace')) self.uid_ = m.digest() return self.uid_
def config_sig(self): m = Utils.md5() def u(s): m.update(s.encode('utf-8')) u(self.srcnode.abspath()) u(self.bldnode.abspath()) keys = self.vars_keys() for k in keys: u(k) u(self.vars[k]) return m.digest()
def uid(self): try: return self.uid_ except AttributeError: m=Utils.md5() up=m.update up(self.__class__.__name__.encode('iso8859-1')) for x in self.inputs+self.outputs: up(x.abspath().encode('iso8859-1')) self.uid_=m.digest() return self.uid_
def uid(self): try: return self.uid_ except AttributeError: m = Utils.md5() up = m.update up(self.__class__.__name__.encode('iso8859-1')) for x in self.inputs + self.outputs: up(x.abspath().encode('iso8859-1')) self.uid_ = m.digest() return self.uid_
def uid(self): if not hasattr(self, 'uid_'): m = Utils.md5() def u(s): m.update(s.encode('utf-8')) u(self.__class__.__name__) u(self.env.get_flat('CMAKE_BLD_DIR')) u(self.env.get_flat('CMAKE_TARGET')) self.uid_ = m.digest() return self.uid_
def hash_range(filename, start, size): f = open(filename, 'rb') m = Utils.md5() try: if start: f.seek(start) chunk = f.read(size) m.update(chunk) finally: f.close() return m.digest()
def uid(self): try: return self.uid_ except AttributeError: m = Utils.md5() src = self.generator.bld.srcnode up = m.update up(self.__class__.__name__.encode()) for x in self.inputs + self.outputs: up(x.path_from(src).encode()) self.uid_ = m.digest() return self.uid_
def make_uuid(v, prefix=None): if isinstance(v, dict): keys = list(v.keys()) keys.sort() tmp = str([(k, v[k]) for k in keys]) else: tmp = str(v) d = Utils.md5(tmp).hexdigest().upper() if prefix: d = '%s%s' % (prefix, d[8:]) gid = uuid.UUID(d, version=4) return str(gid).upper()
def make_uuid(v,prefix=None): if isinstance(v,dict): keys=list(v.keys()) keys.sort() tmp=str([(k,v[k])for k in keys]) else: tmp=str(v) d=Utils.md5(tmp).hexdigest().upper() if prefix: d='%s%s'%(prefix,d[8:]) gid=uuid.UUID(d,version=4) return str(gid).upper()
def uid(self): try: return self.uid_ except AttributeError: # this is not a real hot zone, but we want to avoid surprises here m = Utils.md5() up = m.update up(self.__class__.__name__.encode()) for x in self.inputs + self.outputs: up(x.path_from(x.ctx.srcnode).encode()) self.uid_ = m.digest() return self.uid_
def uid(self): # reimplement so that the signature does not depend on local paths try: return self.uid_ except AttributeError: m = Utils.md5() src = self.generator.bld.srcnode up = m.update up(self.__class__.__name__.encode()) for x in self.inputs + self.outputs: up(x.path_from(src).encode()) self.uid_ = m.digest() return self.uid_
def fast_hash(fname): """ Computes a hash value for a file by using md5 on the file name, modified timestamp and file size """ st = os.stat(fname) if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file') m = Utils.md5() m.update(str(st.st_mtime)) m.update(str(st.st_size)) m.update(fname) return m.digest()
def uid(self): "get a unique id: hash the node paths, the class, the function" try: return self.uid_ except AttributeError: "this is not a real hot zone, but we want to avoid surprizes here" m = Utils.md5() up = m.update up(self.__class__.__name__.encode()) for x in self.inputs + self.outputs: up(x.abspath().encode()) self.uid_ = m.digest() return self.uid_
def h_file(filename) : m = Utils.md5() if os.path.isdir(filename) : for f in os.listdir(filename) : m.update(h_file(os.path.join(filename, f))) else : f = open(filename, 'rb') try : while filename : filename = f.read(100000) m.update(filename) finally : f.close() return m.digest()
def h_file(self): filename = self.abspath() st = os.stat(filename) cache = self.ctx.hashes_md5_tstamp if filename in cache and cache[filename][0] == st.st_mtime: return cache[filename][1] if STRONGEST: ret = Utils.h_file(filename) else: if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('Not a file') ret = Utils.md5(str((st.st_mtime, st.st_size)).encode()).digest() cache[filename] = (st.st_mtime, ret) return ret
def uid(self): try: return self.uid_ except AttributeError: m = Utils.md5() up = m.update up(self.__class__.__name__.encode()) for x in self.inputs + self.outputs: up(x.abspath().encode()) up(self.mode.encode()) if self.mode == 'remove': up(self.header_to_remove.abspath().encode()) self.uid_ = m.digest() return self.uid_
def h_file(filename): st = os.stat(filename) if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file') if filename in Build.hash_cache: if Build.hash_cache[filename][0] == str(st.st_mtime): return Build.hash_cache[filename][1] m = Utils.md5() m.update(str(st.st_mtime)) m.update(str(st.st_size)) m.update(filename) # ensure that the cache is overwritten Build.hash_cache[filename] = (str(st.st_mtime), m.digest()) return m.digest()
def h_file(filename): m = Utils.md5() if os.path.isdir(filename): for f in os.listdir(filename): m.update(h_file(os.path.join(filename, f))) else: f = open(filename, 'rb') try: while filename: filename = f.read(100000) m.update(filename) finally: f.close() return m.digest()
def signature(self): try:return self.cache_sig except AttributeError:pass self.m=Utils.md5() self.m.update(self.hcode) self.sig_explicit_deps() self.sig_vars() if self.scan: try: self.sig_implicit_deps() except Errors.TaskRescan: return self.signature() ret=self.cache_sig=self.m.digest() return ret
def h_file(self): filename=self.abspath() st=os.stat(filename) cache=self.ctx.hashes_md5_tstamp if filename in cache and cache[filename][0]==st.st_mtime: return cache[filename][1] global STRONGEST if STRONGEST: ret=Utils.h_file(filename) else: if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('Not a file') ret=Utils.md5(str((st.st_mtime,st.st_size))).digest() cache[filename]=(st.st_mtime,ret) return ret
def h_file(filename): st = os.stat(filename) if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError("not a file") if filename in Build.hash_cache: if Build.hash_cache[filename][0] == str(st.st_mtime): return Build.hash_cache[filename][1] m = Utils.md5() m.update(str(st.st_mtime)) m.update(str(st.st_size)) m.update(filename) # ensure that the cache is overwritten Build.hash_cache[filename] = (str(st.st_mtime), m.digest()) return m.digest()
def make_uuid(v, prefix=None): """ simple utility function """ if isinstance(v, dict): keys = list(v.keys()) keys.sort() tmp = str([(k, v[k]) for k in keys]) else: tmp = str(v) d = Utils.md5(tmp.encode()).hexdigest().upper() if prefix: d = "{}{}".format(prefix, d[8:]) gid = uuid.UUID(d, version=4) return str(gid).upper()
def make_uuid(v, prefix=None): """ simple utility function """ if isinstance(v, dict): keys = list(v.keys()) keys.sort() tmp = str([(k, v[k]) for k in keys]) else: tmp = str(v) d = Utils.md5(tmp.encode()).hexdigest().upper() if prefix: d = "%s%s" % (prefix, d[8:]) gid = uuid.UUID(d, version=4) return str(gid).upper()
def h_file(filename): m = Utils.md5() if os.path.isdir(filename): for (dp, ds, fs) in os.walk(filename): st = os.stat(dp) m.update(str(st.st_mtime)) m.update(str(st.st_size)) m.update(dp) else: f = open(filename, 'rb') try: while filename: filename = f.read(100000) m.update(filename) finally: f.close() return m.digest()
def uid(self): """ Override uid computation, and make it to be engine path and 3rdParty path independent """ try: return self.uid_ except AttributeError: # this is not a real hot zone, but we want to avoid surprises here m = Utils.md5() up = m.update up(self.__class__.__name__.encode()) for k in self.inputs + self.outputs: s = replace_engine_path_and_tp_root_in_string( self.generator.bld, k.abspath()) up(s) self.uid_ = m.digest() return self.uid_
def h_file(filename): st = os.stat(filename) if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file') if filename in Build.hashes_md5_tstamp: if Build.hashes_md5_tstamp[filename][0] == str(st.st_mtime): return Build.hashes_md5_tstamp[filename][1] if STRONGEST: ret = Utils.h_file_no_md5(filename) Build.hashes_md5_tstamp[filename] = (str(st.st_mtime), ret) return ret else: m = Utils.md5() m.update(str(st.st_mtime)) m.update(str(st.st_size)) m.update(filename) Build.hashes_md5_tstamp[filename] = (str(st.st_mtime), m.digest()) return m.digest()
def signature(self): """ Task signatures are stored between build executions, they are use to track the changes made to the input nodes (not to the outputs!). The signature hashes data from various sources: * explicit dependencies: files listed in the inputs (list of node objects) :py:meth:`waflib.Task.Task.sig_explicit_deps` * implicit dependencies: list of nodes returned by scanner methods (when present) :py:meth:`waflib.Task.Task.sig_implicit_deps` * hashed data: variables/values read from task.vars/task.env :py:meth:`waflib.Task.Task.sig_vars` If the signature is expected to give a different result, clear the cache kept in ``self.cache_sig``:: from waflib import Task class cls(Task.Task): def signature(self): sig = super(Task.Task, self).signature() delattr(self, 'cache_sig') return super(Task.Task, self).signature() :return: the signature value :rtype: string or bytes """ try: return self.cache_sig except AttributeError: pass self.m = Utils.md5(self.hcode) # explicit deps self.sig_explicit_deps() # env vars self.sig_vars() # implicit deps / scanner results if self.scan: try: self.sig_implicit_deps() except Errors.TaskRescan: return self.signature() ret = self.cache_sig = self.m.digest() return ret
def uid(self): """ Obtain a unique id which will be identical in different build instances. The node paths, the class name, and the function are inputs for the hash. :return: hash value :rtype: string """ try: return self.uid_ except AttributeError: # this is not a real hot zone, but we want to avoid surprizes here m = Utils.md5() up = m.update up(self.__class__.__name__.encode()) for x in self.inputs + self.outputs: up(x.abspath().encode()) self.uid_ = m.digest() return self.uid_
def signature(self): """ override signature method and add dictionary to hash """ try: return self.cache_sig except AttributeError: pass self.m = Utils.md5() self.m.update(self.hcode.encode()) # explicit deps self.sig_explicit_deps() # env vars self.sig_vars() #dict self.m.update( repr(sorted(self.resources.items())) ) # implicit deps / scanner results if self.scan: try: self.sig_implicit_deps() except Errors.TaskRescan: return self.signature() ret = self.cache_sig = self.m.digest() return ret
def scan(self): env = self.env gen = self.generator bld = gen.bld zpy = bld.zpy out = self.outputs[0].parent sig = Utils.to_hex( (self.inputs and getattr(self.inputs[0], 'sig', None)) or getattr(out, 'sig', None) or Utils.md5(self.dist.name_and_version).digest() ) deps = ([], []) #self.signode = out.make_node(sig) self.signode = bld.bldnode.make_node( str('.%s.%s' % (out.name, sig)), ) self.signode.mkdir() #deps[0].append(self.signode) return deps
def signature(self): # compute the result one time, and suppose the scan_signature will give the good result try: return self.cache_sig except AttributeError: pass self.m = Utils.md5() self.m.update(self.hcode.encode()) # explicit deps self.sig_explicit_deps() # env vars self.sig_vars() # implicit deps if self.scan: try: imp_sig = self.sig_implicit_deps() except ValueError: return self.signature() ret = self.cache_sig = self.m.digest() return ret