def _generate_words(self): """Create words for the current session We generate one for the current 5 minutes, plus one for the previous 5. This way captcha sessions have a livespan of 10 minutes at most. """ session = self.request[COOKIE_ID] nowish = _TEST_TIME or int(time.time() / 300) # The line above defines nowish, which tells us what five minutes slot # we're in. Indeed, every second, int(time.time()) increments by 1, so # int(time.time() / 300) will increment by 1 every 5 minutes. secret = getUtility(IKeyManager).secret() seeds = [sha(secret + session + str(nowish)).digest(), sha(secret + session + str(nowish - 1)).digest()] # The line above generates a seed based on the "nowish" of 5 minutes ago. words = [] for seed in seeds: word = [] for i in range(WORDLENGTH): index = ord(seed[i]) % len(CHARS) word.append(CHARS[index]) words.append(''.join(word)) return words
def save(self): if not self.hash: random.seed() salt = sha(str(random.random())).hexdigest() self.hash = sha(salt).hexdigest() if not self.ip: self.ip = threadlocals.get_current_ip() super(Person, self).save()
def credentials(self, endpoint, user, password): nonce = sha(str(time.time() + random.random())).digest() now = datetime.datetime.now().isoformat() + "Z" digest = sha(nonce + now + password).digest() wsse = 'UsernameToken Username="******", PasswordDigest="%(p)s", Nonce="%(n)s", Created="%(c)s"' value = dict(u = user, p = base64.encodestring(digest).strip(), n = base64.encodestring(nonce).strip(), c = now) self.endpoint = endpoint self.wsse = wsse % value
def render_sdx(self, code, options, format, prefix='sdedit'): """ Render sequence diagram into a PNG or PDF output file. """ hashkey = code.encode('utf-8') + str(options) + \ str(self.builder.config.sdedit_args) ofname = '%s-%s.%s' % (prefix, sha(hashkey).hexdigest(), format) ifname = '%s-%s.sd' % (prefix, sha(hashkey).hexdigest()) infn = os.path.join(self.builder.outdir, ifname) if hasattr(self.builder, 'imgpath'): # HTML relfn = posixpath.join(self.builder.imgpath, ofname) outfn = os.path.join(self.builder.outdir, '_images', ofname) else: # LaTeX relfn = ofname outfn = os.path.join(self.builder.outdir, ofname) if os.path.isfile(outfn): return relfn, outfn if hasattr(self.builder, '_sdedit_warned'): return None, None ensuredir(os.path.dirname(outfn)) ensuredir(os.path.dirname(infn)) inputfile = open(infn, "w") if isinstance(code, unicode): code = code.encode('utf-8') inputfile.write(code) inputfile.close() path = self.builder.config.sdedit_path if path.endswith(".jar"): sdedit_args = [self.builder.config.sdedit_java_path, "-jar", path] else: sdedit_args = [path] sdedit_args.extend(self.builder.config.sdedit_args) sdedit_args.extend(['-t', format, '-o', outfn, infn]) if options.get("linewrap"): sdedit_args.extend(['--lineWrap', 'true']) if options.get("threadnumber"): sdedit_args.extend(['--threadNumbersVisible', 'true']) try: p = Popen(sdedit_args, stdout=PIPE, stdin=None, stderr=PIPE) except OSError, err: if err.errno != 2: # No such file or directory raise self.builder.warn('sdedit command %r cannot be run (needed for ' 'sequence diagram output), check the sdedit_path ' ' setting' % self.builder.config.sdedit_path) self.builder._sdedit_warned = True return None, None
def render_scruffy(self, code, options, format, prefix='scruffy'): """Render scruffy code into a PNG output file. :param self: Sphinx main class object :param code: `string` of the code to be rendered :param options: `list` of rendering options :param format: `string` image format: png, jpg, etc :param prefix: `string` image file name prefix :return: `tuple` in form (<source image file name>, <output image file name>) """ code = code.replace('\n', ',') hashkey = code.encode('utf-8') + str(options) image_filename = '%s-%s.%s' % (prefix, sha(hashkey).hexdigest(), format) assert hasattr(self.builder, 'imgpath'), "Only HTML output is supported!" source_image_file_name = posixpath.join(self.builder.imgpath, image_filename) output_image_file_name = path.join(self.builder.outdir, '_images', image_filename) if not path.isfile(output_image_file_name): ensuredir(path.dirname(output_image_file_name)) with open(output_image_file_name, 'wb') as stream: scruffy_options = ScruffyOptions(dict((k, True) for k in options)) if scruffy_options.sequence: suml.suml2pic.transform(code, stream, scruffy_options) else: suml.yuml2dot.transform(code, stream, scruffy_options) return source_image_file_name, output_image_file_name
def authenticate(self, username, password): # Requires sending a length preceded username and password even if # authentication is turned off. # protocol version self.writeByte(0) # service requested self.writeString("database") if username: # utf8 encode supplied username self.writeString(username) else: # no username, just output length of 0 self.writeString("") # password supplied, sha-1 hash it m = sha() m.update(password) pwHash = m.digest() self.wbuf.extend(pwHash) self.prependLength() self.flush() # A length, version number, and status code is returned try: self.bufferForRead() except IOError, e: print "ERROR: Connection failed. Please check that the host and port are correct." raise e
def _property_hash(self): """Return a hash() of this Unit's properties (for use with dirty).""" try: return sha(pickle.dumps(self._properties)).digest() except TypeError, x: x.args += (self.__class__.__name__, self._properties.keys()) raise
def checksum(path): try: f = open(path) try: return sha(f.read()).digest() finally: f.close() except IOError: return None
def loadTileset(cls, name): from hashlib import sha256 as sha if name in cls.loadedTilesets: return filepath = cls.knownTilesets[name]['path'] data = open(filepath, 'rb').read() tsInfo = cls.knownTilesets[name] newHash = sha(data).hexdigest() if 'hash' in tsInfo and tsInfo['hash'] == newHash: # file hasn't changed return tsInfo['hash'] = newHash from tileset import KPTileset import time b = time.clock() cls.loadedTilesets[name] = KPTileset.loadFromArc(data) e = time.clock() print "Loading set: {0} in {1}".format(name, e-b)
def split_file(self, file, chunk_size): parts = [] parts_digest = [] file_size = os.path.getsize(file) in_file = open(file, 'rb') number_parts = int(file_size / chunk_size) number_parts += 1 bytes_read = 0 for i in range(0, number_parts, 1): filename = '%s.%02d' % (file, i) part_digest = sha() file_part = open(filename, 'wb') print('Part:', self.euca.get_relative_filename(filename)) part_bytes_written = 0 while part_bytes_written < IMAGE_SPLIT_CHUNK: data = in_file.read(IMAGE_IO_CHUNK) file_part.write(data) part_digest.update(data) data_len = len(data) part_bytes_written += data_len bytes_read += data_len if bytes_read >= file_size: break file_part.close() parts.append(filename) parts_digest.append(hexlify(part_digest.digest())) in_file.close() return (parts, parts_digest)
def test_authenticateWithOldPasswords(self): try: from hashlib import sha1 as sha except: from sha import sha zum = self._makeOne() # synthesize an older account old_password = sha('old_password').hexdigest() zum._user_passwords['old_user'] = old_password zum._login_to_userid['*****@*****.**'] = 'old_user' zum._userid_to_login['old_user'] = '******' # create a new user zum.addUser('new_user', '*****@*****.**', 'new_password') user_id, login = zum.authenticateCredentials( {'login': '******', 'password': '******' }) self.assertEqual(user_id, 'old_user') self.assertEqual(login, '*****@*****.**') user_id, login = zum.authenticateCredentials( {'login': '******', 'password': '******' }) self.assertEqual(user_id, 'new_user') self.assertEqual(login, '*****@*****.**')
def load(self): filename = self.module_filename try: if imp is not None: module_name = "chameleon_%s" % sha(filename).hexdigest() f = open(filename, 'r') try: if module_name in sys.modules: del sys.modules[module_name] module = imp.load_source(module_name, filename, f) finally: f.close() registry = module.registry else: _locals = {} execfile(filename, _locals) registry = _locals['registry'] version = registry['version'] if version != self.version: raise ValueError("Version mismatch: %s != %s" % ( version, self.version)) except (AttributeError, ValueError, TypeError), e: logger.debug( "Error loading cache for %s (%s)." % (self.filename, str(e))) self.purge()
def test_invited_view(self): """ Test that the invited view invite the user from a valid key and fails if the key is invalid or has expired. """ # Valid key puts use the invited template. response = self.client.get(reverse('invitation_invited', kwargs={'invitation_key': self.sample_key.key})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'invitation/invited.html') wrong_key_template = 'invitation/wrong_invitation_key.html' # Expired key use the wrong key template. response = self.client.get(reverse('invitation_invited', kwargs={'invitation_key': self.expired_key.key})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, wrong_key_template) # Invalid key use the wrong key template. response = self.client.get(reverse('invitation_invited', kwargs={'invitation_key': 'foo'})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, wrong_key_template) # Nonexistent key use the wrong key template. response = self.client.get(reverse('invitation_invited', kwargs={'invitation_key': sha('foo'.encode()).hexdigest()})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, wrong_key_template)
def resetPeerIDs(): try: f = open('/dev/urandom','rb') x = f.read(20) f.close() except: x = '' l1 = 0 t = clock() while t == clock(): l1 += 1 l2 = 0 t = long(time()*100) while t == long(time()*100): l2 += 1 l3 = 0 if l2 < 1000: t = long(time()*10) while t == long(time()*10): l3 += 1 x += ( repr(time()) + '/' + str(time()) + '/' + str(l1) + '/' + str(l2) + '/' + str(l3) + '/' + str(getpid()) ) s = '' for i in sha(x).digest()[-11:]: s += mapbase64[ord(i) & 0x3F] _idrandom[0] = s
def write_inituser(fn, user, password): import binascii from hashlib import sha1 as sha pw = binascii.b2a_base64(sha(password.encode('utf-8')).digest())[:-1] with open(fn, "wb") as fp: fp.write(user.encode('utf-8') + b':{SHA}' + pw + b'\n') os.chmod(fn, 0o644)
def archive(self): """ Create the archive. """ import tarfile arch_name = self.get_arch_name() try: self.base_path except: self.base_path = self.path node = self.base_path.make_node(arch_name) try: node.delete() except: pass files = self.get_files() if self.algo.startswith('tar.'): tar = tarfile.open(arch_name, 'w:' + self.algo.replace('tar.', '')) for x in files: tinfo = tar.gettarinfo(name=x.abspath(), arcname=self.get_tar_prefix() + '/' + x.path_from(self.base_path)) tinfo.uid = 0 tinfo.gid = 0 tinfo.uname = 'root' tinfo.gname = 'root' fu = None try: fu = open(x.abspath(), 'rb') tar.addfile(tinfo, fileobj=fu) finally: fu.close() tar.close() elif self.algo == 'zip': import zipfile zip = zipfile.ZipFile(arch_name, 'w', compression=zipfile.ZIP_DEFLATED) for x in files: archive_name = self.get_base_name() + '/' + x.path_from(self.base_path) zip.write(x.abspath(), archive_name, zipfile.ZIP_DEFLATED) zip.close() else: self.fatal('Valid algo types are tar.bz2, tar.gz or zip') try: from hashlib import sha1 as sha except ImportError: from sha import sha try: digest = " (sha=%r)" % sha(node.read()).hexdigest() except: digest = '' Logs.info('New archive created: %s%s' % (self.arch_name, digest))
def _hashPassword(self, password, scheme, salt=None): """ Return hashed password (including salt). """ scheme = scheme.lower() if not scheme in AVAIL_USERPASSWORD_SCHEMES.keys(): raise ValueError, 'Hashing scheme %s not supported for class %s.' % ( scheme, self.__class__.__name__ ) raise ValueError, 'Hashing scheme %s not supported.' % (scheme) if salt is None: if scheme=='crypt': salt = _salt(saltLen=2, saltAlphabet=CRYPT_ALPHABET) elif scheme in ['smd5', 'ssha']: salt = _salt(saltLen=4, saltAlphabet=None) else: salt = '' if scheme=='crypt': return crypt.crypt(password, salt) elif scheme in ['md5', 'smd5']: return base64.encodestring(md5(password.encode()+salt).digest()+salt).strip() elif scheme in ['sha', 'ssha']: return base64.encodestring(sha(password.encode()+salt).digest()+salt).strip() else: return password
def run(self): self.assert_has_content() text = '\n'.join(self.content) parsed = highlight(text, PythonLexer(), HtmlFormatter()) result = [nodes.raw('', parsed, format='html')] if True: # If we want a snapshot - this should check the 'snapshot argument'# fn = '{}.png'.format(sha(text).hexdigest()) env = self.state.document.settings.env rel_filename, filename = env.relfn2path(fn) outfn = os.path.join(env.app.builder.outdir, '_static', rel_filename) ensuredir(os.path.dirname(outfn)) script_to_render = BOT_HEADER + text try: subprocess.call(['sbot', '-o', '%s' % outfn, script_to_render]) except Exception, e: raise ShoebotError(str(e)) # TODO - Support other output formats image_node = nodes.raw('', html_img_tag(rel_filename), format='html') result.insert(0,image_node)
def auth (self, user, password, client_ip): # Sanity check if not user or not password: return False hashed_password = md5(password).hexdigest() # Check the database q = "SELECT COUNT(*) FROM users " + \ "WHERE username='******' AND password='******';" %(locals()) if Query(q)['COUNT(*)'][0] != 1: return False # Set cookie random_str = ''.join ([random.choice(string.letters) for x in range(30)]) cookie_raw = random_str + client_ip cookie = sha(cookie_raw).hexdigest() CTK.cookie['validation'] = cookie + "; path=/; HttpOnly" CTK.cookie['user'] = user + "; path=/; HttpOnly" # Save the session exp_time = time.time() + EXPIRATION expiration = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(exp_time)) q = "REPLACE INTO sessions (user, validation, expiration) " +\ "VALUES ('%(user)s','%(cookie)s','%(expiration)s');" %(locals()) Query(q) return True
def who(request, encoding="utf-8"): """Create a SHA1 Hash of the User's IP Address and User-Agent""" ip = request.remote.ip agent = request.headers.get("User-Agent", "") return sha("{0:s}{1:s}".format(ip, agent).encode(encoding)).hexdigest()
def get_image_filename(self, code, format, options, prefix='rackdiag'): """ Get path of output file. """ if format not in ('PNG', 'PDF'): raise RackdiagError('rackdiag error:\nunknown format: %s\n' % format) if format == 'PDF': try: import reportlab except ImportError: msg = 'rackdiag error:\n' + \ 'colud not output PDF format; Install reportlab\n' raise RackdiagError(msg) hashkey = code.encode('utf-8') + str(options) fname = '%s-%s.%s' % (prefix, sha(hashkey).hexdigest(), format.lower()) if hasattr(self.builder, 'imgpath'): # HTML relfn = posixpath.join(self.builder.imgpath, fname) outfn = os.path.join(self.builder.outdir, '_images', fname) else: # LaTeX relfn = fname outfn = os.path.join(self.builder.outdir, fname) if os.path.isfile(outfn): return relfn, outfn ensuredir(os.path.dirname(outfn)) return relfn, outfn
def read_all_log(app_log_config, max_line, start_datetime="", end_datetime="", keyword=""): lines = [] logs = {} log_times = {} for log in app_log_config['logs']: log_path = "/var/log/%s/%s" % (log["dir"], log["filename"]) if os.path.isfile(log_path) is False: return False lines = read_log(log_path, max_line, log, start_datetime, end_datetime, keyword) if lines is False: return False for line in lines: key = sha(line).hexdigest() logs.update({key: line}) pattern = re.compile(log["time_pattern"]) time_format = log["time_format"] now = time.strptime(pattern.findall(line)[0], time_format) log_times.update({key:int(time.mktime(now))}) array = log_times.items() array.sort(key=lambda a:int(a[1])) lines = [] for sorted_log in array[:max_line]: key = sorted_log[0] lines.append(logs.get(key)) return lines
def run(self): self.assert_has_content() text = "\n".join(self.content) parsed = highlight(text, PythonLexer(), HtmlFormatter()) result = [nodes.raw("", parsed, format="html")] options_dict = dict(self.options) opt_size = options_dict.get("size", (100, 100)) fn = options_dict.get("filename") or "{}.png".format(sha(text).hexdigest()) env = self.state.document.settings.env rel_filename, filename = env.relfn2path(fn) outfn = os.path.join(env.app.builder.outdir, "_static", rel_filename) ensuredir(os.path.dirname(outfn)) script_to_render = BOT_HEADER.format(size=opt_size) + text if os.path.isfile(outfn): raise ShoebotError("File %s exists, not overwriting.") try: cmd = ["sbot", "-o", "%s" % outfn, script_to_render] subprocess.call(cmd) except Exception as e: print("oops %e" % e) print("cmd: ") print(" ".join(cmd)) raise ShoebotError(str(e)) image_node = nodes.image(uri="_static/{}".format(rel_filename), alt="test") result.insert(0, image_node) return result
def authenticateCredentials( self, credentials ): """ See IAuthenticationPlugin. o We expect the credentials to be those returned by ILoginPasswordExtractionPlugin. """ login = credentials.get( 'login' ) password = credentials.get( 'password' ) if login is None or password is None: return None userid = self._login_to_userid.get( login, login ) reference = self._user_passwords.get(userid) if reference is None: return None if AuthEncoding.is_encrypted( reference ): if AuthEncoding.pw_validate( reference, password ): return userid, login # Support previous naive behavior digested = sha( password ).hexdigest() if reference == digested: return userid, login return None
def split_file(path, chunk_size=10 * 1024 * 1024, IMAGE_IO_CHUNK=10 * 1024): parts = [] parts_digest = [] file_size = os.path.getsize(path) in_file = open(path, "rb") number_parts = int(file_size / chunk_size) number_parts += 1 bytes_read = 0 for i in range(0, number_parts, 1): filename = '%s.%d' % (path, i) part_digest = sha() file_part = open(filename, "wb") part_bytes_written = 0 while part_bytes_written < chunk_size: data = in_file.read(IMAGE_IO_CHUNK) file_part.write(data) part_digest.update(data) data_len = len(data) part_bytes_written += data_len bytes_read += data_len if bytes_read >= file_size: break file_part.close() parts.append(filename) parts_digest.append(hexlify(part_digest.digest())) in_file.close() return parts, parts_digest
def get_basename(text, options, prefix="aafig"): options = options.copy() if "format" in options: del options["format"] hashkey = text.encode("utf-8") + str(options) id = sha(hashkey).hexdigest() return "%s-%s" % (prefix, id)
def auth_by_password(name, password, session=None): ''' def auth_by_password(name, password, session=None): Authenticate an user by name and password. If user exists and password is correct, return a cookie In other case return None ''' # python 2.6 compatible if sys.version_info[1] == 6: from hashlib import sha1 as sha else: import sha sha = sha.new password_hash = sha(password).hexdigest() try: db_user = session.query(db.User).\ filter(db.User.name == name).one() except db.InvalidRequestError: return None if db_user.password == password_hash: # si el usuario ya tiene una cookie, devolver esta if db_user.cookie and\ db_user.cookie.expiration > datetime.datetime.now(): return db_user.cookie.id cookie = db.Cookie() db_user.cookie = cookie session.commit() return cookie.id else: return None
def get_torrent_info_hash(path): """get_torrent_info_hash(path) NOTE: Important. These OS functions can throw IOError or OSError. Make sure you catch these in the caller. """ if os.path.getsize(path) > MAX_TORRENT_SIZE: # file is too large, bailout. (see #12301) raise ValueError("%s is not a valid torrent" % path) f = open(path, "rb") try: import libtorrent data = f.read(MAX_TORRENT_SIZE) if not data or data[0] != "d": # File doesn't start with 'd', bailout (see #12301) raise ValueError("%s is not a valid torrent" % path) metainfo = libtorrent.bdecode(data) try: infohash = metainfo["info"] except StandardError: raise ValueError("%s is not a valid torrent" % path) infohash = sha(libtorrent.bencode(infohash)).digest() return infohash finally: f.close()
def process_graphviz_nodes(app, doctree, docname): for node in doctree.traverse(graphviz): try: content = '\n'.join(node['graphviz_content']) filename = '%s' % sha(content).hexdigest() outfn = os.path.join(app.builder.outdir, '_images', 'graphviz', filename) if not os.path.exists(os.path.dirname(outfn)): os.makedirs(os.path.dirname(outfn)) # iterate over the above-listed types for format_mime, format_ext in _output_formats.iteritems(): graphviz_process = Popen([ getattr(app.builder.config, 'graphviz_dot', 'dot'), '-T%s' % (format_ext,), '-o', '%s.%s' % (outfn, format_ext), ], stdin=PIPE) graphviz_process.stdin.write(content) graphviz_process.stdin.close() graphviz_process.wait() relfn = '_images/graphviz/%s' % (filename,) newnode = nodes.image() newnode['candidates'] = dict( [ (format_mime, '%s.%s' % (relfn, format_ext)) for (format_mime, format_ext) in _output_formats.iteritems() ] ) # and that's all, folks! node.replace_self(newnode) except Exception, err: from traceback import format_exception_only msg = ''.join(format_exception_only(err.__class__, err)) newnode = doctree.reporter.error('Exception occured evaluating ' 'graphviz expression: \n%s' % msg, base_node=node) node.replace_self(newnode)
def get_basename(text, options, prefix='aafig'): options = options.copy() if 'format' in options: del options['format'] hashkey = text + str(options) id = sha(hashkey.encode('utf-8')).hexdigest() return '%s-%s' % (prefix, id)
def parsedir(directory, parsed, files, blocked, exts = ['.torrent'], return_metainfo = False, errfunc = _errfunc): if NOISY: errfunc('checking dir') dirs_to_check = [directory] new_files = {} new_blocked = {} torrent_type = {} while dirs_to_check: # first, recurse directories and gather torrents directory = dirs_to_check.pop() newtorrents = False for f in os.listdir(directory): newtorrent = None for ext in exts: if f.endswith(ext): newtorrent = ext[1:] break if newtorrent: newtorrents = True p = os.path.join(directory, f) new_files[p] = [(os.path.getmtime(p), os.path.getsize(p)), 0] torrent_type[p] = newtorrent if not newtorrents: for f in os.listdir(directory): p = os.path.join(directory, f) if os.path.isdir(p): dirs_to_check.append(p) new_parsed = {} to_add = [] added = {} removed = {} # files[path] = [(modification_time, size), hash], hash is 0 if the file # has not been successfully parsed for p,v in new_files.items(): # re-add old items and check for changes oldval = files.get(p) if not oldval: # new file to_add.append(p) continue h = oldval[1] if oldval[0] == v[0]: # file is unchanged from last parse if h: if blocked.has_key(p): # parseable + blocked means duplicate to_add.append(p) # other duplicate may have gone away else: new_parsed[h] = parsed[h] new_files[p] = oldval else: new_blocked[p] = 1 # same broken unparseable file continue if parsed.has_key(h) and not blocked.has_key(p): if NOISY: errfunc('removing '+p+' (will re-add)') removed[h] = parsed[h] to_add.append(p) to_add.sort() for p in to_add: # then, parse new and changed torrents new_file = new_files[p] v,h = new_file if new_parsed.has_key(h): # duplicate if not blocked.has_key(p) or files[p][0] != v: errfunc('**warning** '+ p +' is a duplicate torrent for '+new_parsed[h]['path']) new_blocked[p] = 1 continue if NOISY: errfunc('adding '+p) try: ff = open(p, 'rb') d = bdecode(ff.read()) check_info(d['info']) h = sha(bencode(d['info'])).digest() new_file[1] = h if new_parsed.has_key(h): errfunc('**warning** '+ p +' is a duplicate torrent for '+new_parsed[h]['path']) new_blocked[p] = 1 continue a = {} a['path'] = p f = os.path.basename(p) a['file'] = f a['type'] = torrent_type[p] i = d['info'] l = 0 nf = 0 if i.has_key('length'): l = i.get('length',0) nf = 1 elif i.has_key('files'): for li in i['files']: nf += 1 if li.has_key('length'): l += li['length'] a['numfiles'] = nf a['length'] = l a['name'] = i.get('name', f) def setkey(k, d = d, a = a): if d.has_key(k): a[k] = d[k] setkey('failure reason') setkey('warning message') setkey('announce-list') if return_metainfo: a['metainfo'] = d except: errfunc('**warning** '+p+' has errors') new_blocked[p] = 1 continue try: ff.close() except: pass if NOISY: errfunc('... successful') new_parsed[h] = a added[h] = a for p,v in files.items(): # and finally, mark removed torrents if not new_files.has_key(p) and not blocked.has_key(p): if NOISY: errfunc('removing '+p) removed[v[1]] = parsed[v[1]] if NOISY: errfunc('done checking') return (new_parsed, new_files, new_blocked, added, removed)
def parse(self, filetree=1): if not self.__m_filedata: log.error("No data to process!") return try: self.__m_metadata = bencode.bdecode(self.__m_filedata) except Exception as e: log.warning("Failed to decode torrent data %s: %s", self.filename if self.filename else "", e) raise e self.__m_info_hash = sha(bencode.bencode( self.__m_metadata["info"])).hexdigest() # Get encoding from torrent file if available self.encoding = None if "encoding" in self.__m_metadata: self.encoding = self.__m_metadata["encoding"] elif "codepage" in self.__m_metadata: self.encoding = str(self.__m_metadata["codepage"]) if not self.encoding: self.encoding = "UTF-8" # Check if 'name.utf-8' is in the torrent and if not try to decode the string # using the encoding found. if "name.utf-8" in self.__m_metadata["info"]: self.__m_name = decode_string( self.__m_metadata["info"]["name.utf-8"]) else: self.__m_name = decode_string(self.__m_metadata["info"]["name"], self.encoding) # Get list of files from torrent info paths = {} dirs = {} if "files" in self.__m_metadata["info"]: prefix = "" if len(self.__m_metadata["info"]["files"]) > 1: prefix = self.__m_name for index, f in enumerate(self.__m_metadata["info"]["files"]): if "path.utf-8" in f: path = os.path.join(prefix, *f["path.utf-8"]) else: path = decode_string( os.path.join( prefix, decode_string(os.path.join(*f["path"]), self.encoding)), self.encoding) f["index"] = index paths[path] = f dirname = os.path.dirname(path) while dirname: dirinfo = dirs.setdefault(dirname, {}) dirinfo["length"] = dirinfo.get("length", 0) + f["length"] dirname = os.path.dirname(dirname) if filetree == 2: def walk(path, item): if item["type"] == "dir": item.update(dirs[path]) else: item.update(paths[path]) item["download"] = True file_tree = FileTree2(paths.keys()) file_tree.walk(walk) else: def walk(path, item): if type(item) is dict: return item return [paths[path]["index"], paths[path]["length"], True] file_tree = FileTree(paths) file_tree.walk(walk) self.__m_files_tree = file_tree.get_tree() else: if filetree == 2: self.__m_files_tree = { "contents": { self.__m_name: { "type": "file", "index": 0, "length": self.__m_metadata["info"]["length"], "download": True } } } else: self.__m_files_tree = { self.__m_name: (0, self.__m_metadata["info"]["length"], True) } self.__m_files = [] if "files" in self.__m_metadata["info"]: prefix = "" if len(self.__m_metadata["info"]["files"]) > 1: prefix = self.__m_name for f in self.__m_metadata["info"]["files"]: if "path.utf-8" in f: path = os.path.join(prefix, *f["path.utf-8"]) else: path = decode_string( os.path.join( prefix, decode_string(os.path.join(*f["path"]), self.encoding)), self.encoding) self.__m_files.append({ 'path': path, 'size': f["length"], 'download': True }) else: self.__m_files.append({ "path": self.__m_name, "size": self.__m_metadata["info"]["length"], "download": True })
def gitsha(path): h = sha() data = file(path, 'rb').read() h.update("blob %d\0" % len(data)) h.update(data) return h.hexdigest()
def hashSHA(string): return hexlify(sha(string.encode()).digest()).decode()
def generate_id(self): """Return a new session id.""" return sha('%s' % random.random()).hexdigest()
def check(stamp, resource=None, bits=None, check_expiration=None, ds_callback=None): """Check whether a stamp is valid Optionally, the stamp may be checked for a specific resource, and/or it may require a minimum bit value, and/or it may be checked for expiration, and/or it may be checked for double spending. If 'check_expiration' is specified, it should contain the number of seconds old a date field may be. Indicating days might be easier in many cases, e.g. >>> from hashcash import DAYS >>> check(stamp, check_expiration=28*DAYS) NOTE: Every valid (version 1) stamp must meet its claimed bit value NOTE: Check floor of 4-bit multiples (overly permissive in acceptance) """ if stamp.startswith('0:'): # Version 0 try: date, res, suffix = stamp[2:].split(':') except ValueError: #ERR.write("Malformed version 0 hashcash stamp!\n") return False if resource is not None and resource != res: return False elif check_expiration is not None: good_until = strftime("%y%m%d%H%M%S", localtime(time() - check_expiration)) if date < good_until: return False elif callable(ds_callback) and ds_callback(stamp): return False elif type(bits) is not int: return True else: hex_digits = int(floor(bits / 4)) return sha(stamp).hexdigest().startswith('0' * hex_digits) elif stamp.startswith('1:'): # Version 1 try: claim, date, res, ext, rand, counter = stamp[2:].split(':') except ValueError: #ERR.write("Malformed version 1 hashcash stamp!\n") return False if resource is not None and resource != res: return False elif type(bits) is int and bits > int(claim): return False elif check_expiration is not None: good_until = strftime("%y%m%d%H%M%S", localtime(time() - check_expiration)) if date < good_until: return False elif callable(ds_callback) and ds_callback(stamp): return False else: hex_digits = int(floor(int(claim) / 4)) return sha(stamp).hexdigest().startswith('0' * hex_digits) else: # Unknown ver or generalized hashcash #ERR.write("Unknown hashcash version: Minimal authentication!\n") if type(bits) is not int: return True elif resource is not None and stamp.find(resource) < 0: return False else: hex_digits = int(floor(bits / 4)) return sha(stamp).hexdigest().startswith('0' * hex_digits)
def render_tikz(self,tikz,libs='',stringsubst=False): hashkey = tikz.encode('utf-8') fname = 'tikz-%s.png' % (sha(hashkey).hexdigest()) relfn = posixpath.join(self.builder.imgpath, fname) outfn = path.join(self.builder.outdir, '_images', fname) if path.isfile(outfn): return relfn if hasattr(self.builder, '_tikz_warned'): return None ensuredir(path.dirname(outfn)) curdir = getcwd() latex = DOC_HEAD % libs latex += self.builder.config.tikz_latex_preamble if stringsubst: tikz = tikz % {'wd': curdir} latex += DOC_BODY % tikz if isinstance(latex, str): latex = latex.encode('utf-8') if not hasattr(self.builder, '_tikz_tempdir'): tempdir = self.builder._tikz_tempdir = tempfile.mkdtemp() else: tempdir = self.builder._tikz_tempdir chdir(tempdir) tf = open('tikz.tex', 'w') tf.write(latex) tf.close() try: try: p = Popen(['pdflatex', '--interaction=nonstopmode', 'tikz.tex'], stdout=PIPE, stderr=PIPE) except OSError as err: if err.errno != ENOENT: # No such file or directory raise self.builder.warn('LaTeX command cannot be run') self.builder._tikz_warned = True return None finally: chdir(curdir) stdout, stderr = p.communicate() if p.returncode != 0: raise TikzExtError('Error (tikz extension): latex exited with error:\n' '[stderr]\n%s\n[stdout]\n%s' % (stderr, stdout)) chdir(tempdir) # the following does not work for pdf patterns # p1 = Popen(['convert', '-density', '120', '-colorspace', 'rgb', # '-trim', 'tikz.pdf', outfn], stdout=PIPE, stderr=PIPE) # stdout, stderr = p1.communicate() try: p = Popen(['pdftoppm', '-r', '120', 'tikz.pdf', 'tikz'], stdout=PIPE, stderr=PIPE) except OSError as e: if e.errno != ENOENT: # No such file or directory raise self.builder.warn('pdftoppm command cannot be run') self.builder.warn(err) self.builder._tikz_warned = True chdir(curdir) return None stdout, stderr = p.communicate() if p.returncode != 0: self.builder._tikz_warned = True raise TikzExtError('Error (tikz extension): pdftoppm exited with error:' '\n[stderr]\n%s\n[stdout]\n%s' % (stderr, stdout)) if self.builder.config.tikz_proc_suite == 'ImageMagick': convert_args = [] if self.builder.config.tikz_transparent: convert_args = ['-fuzz', '2%', '-transparent', 'white'] try: p1 = Popen(['convert', '-trim'] + convert_args + ['tikz-1.ppm', outfn], stdout=PIPE, stderr=PIPE) except OSError as e: if e.errno != ENOENT: # No such file or directory raise self.builder.warn('convert command cannot be run') self.builder.warn(err) self.builder._tikz_warned = True chdir(curdir) return None stdout, stderr = p1.communicate() if p1.returncode != 0: self.builder._tikz_warned = True chdir(curdir) raise TikzExtError('Error (tikz extension): convert exited with ' 'error:\n[stderr]\n%s\n[stdout]\n%s' % (stderr, stdout)) elif self.builder.config.tikz_proc_suite == 'Netpbm': try: p1 = Popen(['pnmcrop', 'tikz-1.ppm'], stdout=PIPE, stderr=PIPE) except OSError as err: if err.errno != ENOENT: # No such file or directory raise self.builder.warn('pnmcrop command cannot be run:') self.builder.warn(err) self.builder._tikz_warned = True chdir(curdir) return None pnm_args = [] if self.builder.config.tikz_transparent: pnm_args = ['-transparent', 'white'] try: p2 = Popen(['pnmtopng'] + pnm_args, stdin=p1.stdout, stdout=PIPE, stderr=PIPE) except OSError as err: if err.errno != ENOENT: # No such file or directory raise self.builder.warn('pnmtopng command cannot be run:') self.builder.warn(err) self.builder._tikz_warned = True chdir(curdir) return None pngdata, stderr2 = p2.communicate() dummy, stderr1 = p1.communicate() if p1.returncode != 0: self.builder._tikz_warned = True raise TikzExtError('Error (tikz extension): pnmcrop exited with ' 'error:\n[stderr]\n%s' % (stderr1)) if p2.returncode != 0: self.builder._tikz_warned = True raise TikzExtError('Error (tikz extension): pnmtopng exited with ' 'error:\n[stderr]\n%s' % (stderr2)) f = open(outfn,'wb') f.write(pngdata) f.close() else: self.builder._tikz_warned = True chdir(curdir) raise TikzExtError('Error (tikz extension): Invalid configuration ' 'value for tikz_proc_suite') chdir(curdir) return relfn
else: stats.type = monitors.FileType.Unknown return stats def _getSHA1(self, pathString): """docstring for _getSHA1""" try: file = open(pathString, 'rb') except Exception, e: self.log.error('Failed to open file ' + pathString + ' : ' + str(e)) raise Exception('Failed to open file ' + pathString + ' : ' + str(e)) digest = sha() try: try: block = file.read(1024) while block: digest.update(block) block = file.read(1024) except Exception, e: self.log.error('Failed to SHA1 digest file ' + pathString + ' : ' + str(e)) raise Exception(('Failed to SHA1 digest file ' + pathString + ' : ' + str(e))) finally: file.close() return digest.hexdigest()
def __init__(self, *args, **kwargs): super(KeyAuthContext, self).__init__(*args, **kwargs) self._id = sha(str(SystemRandom().random())).hexdigest()
class TorrentInfo(object): """ Collects information about a torrent file. :param filename: The path to the torrent :type filename: string """ def __init__(self, filename, filetree=1): # Get the torrent data from the torrent file try: log.debug("Attempting to open %s.", filename) self.__m_filedata = open(filename, "rb").read() self.__m_metadata = bencode.bdecode(self.__m_filedata) except Exception, e: log.warning("Unable to open %s: %s", filename, e) raise e self.__m_info_hash = sha(bencode.bencode( self.__m_metadata["info"])).hexdigest() # Get encoding from torrent file if available self.encoding = None if "encoding" in self.__m_metadata: self.encoding = self.__m_metadata["encoding"] elif "codepage" in self.__m_metadata: self.encoding = str(self.__m_metadata["codepage"]) if not self.encoding: self.encoding = "UTF-8" # Check if 'name.utf-8' is in the torrent and if not try to decode the string # using the encoding found. if "name.utf-8" in self.__m_metadata["info"]: self.__m_name = utf8_encoded( self.__m_metadata["info"]["name.utf-8"]) else: self.__m_name = utf8_encoded(self.__m_metadata["info"]["name"], self.encoding) # Get list of files from torrent info paths = {} dirs = {} if self.__m_metadata["info"].has_key("files"): prefix = "" if len(self.__m_metadata["info"]["files"]) > 1: prefix = self.__m_name for index, f in enumerate(self.__m_metadata["info"]["files"]): if "path.utf-8" in f: path = os.path.join(prefix, *f["path.utf-8"]) else: path = utf8_encoded( os.path.join( prefix, utf8_encoded(os.path.join(*f["path"]), self.encoding)), self.encoding) f["index"] = index if "sha1" in f and len(f["sha1"]) == 20: f["sha1"] = f["sha1"].encode('hex') if "ed2k" in f and len(f["ed2k"]) == 16: f["ed2k"] = f["ed2k"].encode('hex') paths[path] = f dirname = os.path.dirname(path) while dirname: dirinfo = dirs.setdefault(dirname, {}) dirinfo["length"] = dirinfo.get("length", 0) + f["length"] dirname = os.path.dirname(dirname) if filetree == 2: def walk(path, item): if item["type"] == "dir": item.update(dirs[path]) else: item.update(paths[path]) item["download"] = True file_tree = FileTree2(paths.keys()) file_tree.walk(walk) else: def walk(path, item): if type(item) is dict: return item return [paths[path]["index"], paths[path]["length"], True] file_tree = FileTree(paths) file_tree.walk(walk) self.__m_files_tree = file_tree.get_tree() else: if filetree == 2: self.__m_files_tree = { "contents": { self.__m_name: { "type": "file", "index": 0, "length": self.__m_metadata["info"]["length"], "download": True } } } else: self.__m_files_tree = { self.__m_name: (0, self.__m_metadata["info"]["length"], True) } self.__m_files = [] if self.__m_metadata["info"].has_key("files"): prefix = "" if len(self.__m_metadata["info"]["files"]) > 1: prefix = self.__m_name for f in self.__m_metadata["info"]["files"]: if "path.utf-8" in f: path = os.path.join(prefix, *f["path.utf-8"]) else: path = utf8_encoded( os.path.join( prefix, utf8_encoded(os.path.join(*f["path"]), self.encoding)), self.encoding) self.__m_files.append({ 'path': path, 'size': f["length"], 'download': True }) else: self.__m_files.append({ "path": self.__m_name, "size": self.__m_metadata["info"]["length"], "download": True })
def verify(t): return int.from_bytes(sha(sha(bytes.fromhex(t)).digest()).digest(), 'little') <= ( int.from_bytes(bytes.fromhex(t[144:150]), 'little')*256**(int(t[150:152], 16)-3) )
from Crypto.Protocol.KDF import PBKDF2 from Crypto.PublicKey import RSA from getpass import getpass from hashlib import sha256 as sha import sys stdout = sys.stdout sys.stdout = sys.stderr keych = 0 if len(sys.argv) != 1 and sys.argv[1] == "Private_key": keych = 1 password = getpass("Password: "******"my_rand:%d" % my_rand.counter, dkLen=n, count=1) my_rand.counter = 0 print "Gerando chave..." key = RSA.generate(2048, randfunc=my_rand) if keych == 1: dkey = key.export_key() dkey_hash = sha(dkey).hexdigest() else: dkey = key.publickey().exportKey() dkey_hash = sha(dkey).hexdigest() print >> stdout, dkey print "SHA256: "+dkey_hash
def validate(self, url, localpath, withreason=False): try: if not os.path.isfile(localpath): raise Error, _("File not found") size = self.getSize(url) if size: lsize = os.path.getsize(localpath) if lsize != size: raise Error, _("Unexpected size (expected %d, got %d)") % \ (size, lsize) filemd5 = self.getMD5(url) if filemd5: try: from hashlib import md5 except ImportError: from md5 import md5 digest = md5() file = open(localpath) data = file.read(BLOCKSIZE) while data: digest.update(data) data = file.read(BLOCKSIZE) lfilemd5 = digest.hexdigest() if lfilemd5 != filemd5: raise Error, _("Invalid MD5 (expected %s, got %s)") % \ (filemd5, lfilemd5) filesha256 = self.getSHA256(url) if filesha256: try: from hashlib import sha256 except ImportError: from smart.util.sha256 import sha256 digest = sha256() file = open(localpath) data = file.read(BLOCKSIZE) while data: digest.update(data) data = file.read(BLOCKSIZE) lfilesha256 = digest.hexdigest() if lfilesha256 != filesha256: raise Error, _("Invalid SHA256 (expected %s, got %s)") % \ (filesha256, lfilesha256) else: filesha = self.getSHA(url) if filesha: try: from hashlib import sha1 as sha except ImportError: from sha import sha digest = sha() file = open(localpath) data = file.read(BLOCKSIZE) while data: digest.update(data) data = file.read(BLOCKSIZE) lfilesha = digest.hexdigest() if lfilesha != filesha: raise Error, _("Invalid SHA (expected %s, got %s)") % \ (filesha, lfilesha) except Error, reason: if withreason: return False, reason return False
def add_key(tracker): key = '' for i in sha(basekeydata + tracker).digest()[-6:]: key += mapbase64[ord(i) & 0x3F] keys[tracker] = key
def on_dialog_response(response_id): if response_id == Gtk.ResponseType.OK: if (self.config['tray_password'] == sha( decode_bytes( dialog.get_password()).encode()).hexdigest()): quit_gtkui()
def makeinfo(file, piece_length, encoding, flag, progress, progress_percent=1): file = abspath(file) if isdir(file): subs = subfiles(file) subs.sort() pieces = [] sh = sha() done = 0L fs = [] totalsize = 0.0 totalhashed = 0L for p, f in subs: totalsize += getsize(f) for p, f in subs: pos = 0L size = getsize(f) fs.append({'length': size, 'path': uniconvertl(p, encoding)}) h = open(f, 'rb') while pos < size: a = min(size - pos, piece_length - done) sh.update(h.read(a)) if flag.isSet(): return done += a pos += a totalhashed += a if done == piece_length: pieces.append(sh.digest()) done = 0 sh = sha() if progress_percent: progress(totalhashed / totalsize) else: progress(a) h.close() if done > 0: pieces.append(sh.digest()) return {'pieces': ''.join(pieces), 'piece length': piece_length, 'files': fs, 'name': uniconvert(split(file)[1], encoding) } else: size = getsize(file) pieces = [] p = 0L h = open(file, 'rb') while p < size: x = h.read(min(piece_length, size - p)) if flag.isSet(): return pieces.append(sha(x).digest()) p += piece_length if p > size: p = size if progress_percent: progress(float(p) / size) else: progress(min(piece_length, size - p)) h.close() return {'pieces': ''.join(pieces), 'piece length': piece_length, 'length': size, 'name': uniconvert(split(file)[1], encoding) }
def hash_sha1(password, salt='', n=12): salt = salt or sha(urandom(32)).hexdigest() hashed = sha(salt + password).hexdigest() for i in xrange(n): hashed = sha(hashed + salt).hexdigest() return '$sha1$%i$%s$%s' % (n, salt, hashed)
def random_password(): """ Generate a random secure password. """ return sha(urandom(40)).hexdigest()
file_contents = f.read() if f is not None: logging.debug('Closing file: %s', file_printed) f.close() except (IOError, OSError), io_error: return 'I/O error({0}): {1}: File:{2}'.format( io_error.errno, io_error.strerror, file_printed ) logging.debug('Scanning file: %s', file_printed) start_time = time.time() score = 0 output_hits = '' output_ir = '' output_res = '' sha1_sum = sha(file_contents).hexdigest() logging.debug('sha sum: %s file: %s', sha1_sum, file_printed) if sha1_sum in sha1_whitelist: output_wl = 'FILE-WHITELIST::%s::SHA1_WL::%s' % (file_printed, sha1_sum) return output_wl if sha1_sum in sha1_blacklist: output_bl = 'FILE-HITS::%s::%s::SHA1_BL::%s\nFILE-RESULT::%s::%s::SHA1_BL::%s' % ( file_printed, datetime.datetime.fromtimestamp( os.lstat(file_name).st_ctime ).strftime('%Y-%m-%d %H:%M:%S'), sha1_sum, file_printed, datetime.datetime.fromtimestamp( os.lstat(file_name).st_ctime ).strftime('%Y-%m-%d %H:%M:%S'), sha1_sum
def on_dialog_response(response_id): if response_id == gtk.RESPONSE_OK: if (self.config['tray_password'] == sha( dialog.get_password()).hexdigest()): quit_gtkui()
def render_tikz(self,tikz,libs='',stringsubst=False): hashkey = tikz.encode('utf-8') fname = 'tikz-%s.png' % (sha(hashkey).hexdigest()) relfn = posixpath.join(self.builder.imgpath, fname) outfn = path.join(self.builder.outdir, '_images', fname) if path.isfile(outfn): return relfn if hasattr(self.builder, '_tikz_warned'): return None ensuredir(path.dirname(outfn)) curdir = getcwd() latex = DOC_HEAD % libs latex += self.builder.config.tikz_latex_preamble if stringsubst: tikz = tikz % {'wd': curdir} latex += DOC_BODY % tikz if isinstance(latex, unicode): latex = latex.encode('utf-8') if not hasattr(self.builder, '_tikz_tempdir'): tempdir = self.builder._tikz_tempdir = tempfile.mkdtemp() else: tempdir = self.builder._tikz_tempdir chdir(tempdir) tf = open('tikz.tex', 'w') tf.write(latex) tf.close() try: try: p = Popen(['pdflatex', '--interaction=nonstopmode', 'tikz.tex'], stdout=PIPE, stderr=PIPE) except OSError, err: if err.errno != ENOENT: # No such file or directory raise self.builder.warn('LaTeX command cannot be run') self.builder._tikz_warned = True return None finally: chdir(curdir) stdout, stderr = p.communicate() if p.returncode != 0: raise TikzExtError('Error (tikz extension): latex exited with error:\n' '[stderr]\n%s\n[stdout]\n%s' % (stderr, stdout)) chdir(tempdir) # the following does not work for pdf patterns # p1 = Popen(['convert', '-density', '120', '-colorspace', 'rgb', # '-trim', 'tikz.pdf', outfn], stdout=PIPE, stderr=PIPE) # stdout, stderr = p1.communicate() try: p = Popen(['pdftoppm', '-r', '120', '-png', 'tikz.pdf', 'tikz'], stdout=PIPE, stderr=PIPE) except OSError, e: if e.errno != ENOENT: # No such file or directory raise self.builder.warn('pdftoppm command cannot be run({0})'.format(e)) self.builder._tikz_warned = True chdir(curdir) return None
def render_ditaa(self, code, options, prefix='ditaa'): """Render ditaa code into a PNG output file.""" hashkey = code.encode('utf-8') + str(options).encode('utf-8') + \ str(self.builder.config.ditaa).encode('utf-8') + \ str(self.builder.config.ditaa_args).encode('utf-8') infname = '%s-%s.%s' % (prefix, sha(hashkey).hexdigest(), "ditaa") outfname = '%s-%s.%s' % (prefix, sha(hashkey).hexdigest(), "png") imgpath = self.builder.imgpath if hasattr(self.builder, 'imgpath') else '' inrelfn = posixpath.join(imgpath, infname) infullfn = path.join(self.builder.outdir, '_images', infname) outrelfn = posixpath.join(imgpath, outfname) outfullfn = path.join(self.builder.outdir, '_images', outfname) if path.isfile(outfullfn): return outrelfn, outfullfn ensuredir(path.dirname(outfullfn)) # ditaa expects UTF-8 by default if isinstance(code, str): code = code.encode('utf-8') ditaa_args = [self.builder.config.ditaa] ditaa_args.extend(self.builder.config.ditaa_args) ditaa_args.extend(options) ditaa_args.extend([infullfn]) ditaa_args.extend([outfullfn]) f = open(infullfn, 'wb') f.write(code) f.close() try: self.builder.warn(ditaa_args) p = Popen(ditaa_args, stdout=PIPE, stdin=PIPE, stderr=PIPE) except OSError as err: if err.errno != ENOENT: # No such file or directory raise self.builder.warn('ditaa command %r cannot be run (needed for ditaa ' 'output), check the ditaa setting' % self.builder.config.ditaa) self.builder._ditaa_warned_dot = True return None, None wentWrong = False try: # Ditaa may close standard input when an error occurs, # resulting in a broken pipe on communicate() stdout, stderr = p.communicate(code) except OSError as err: if err.errno != EPIPE: raise wentWrong = True except IOError as err: if err.errno != EINVAL: raise wentWrong = True if wentWrong: # in this case, read the standard output and standard error streams # directly, to get the error message(s) stdout, stderr = p.stdout.read(), p.stderr.read() p.wait() if p.returncode != 0: raise DitaaError('ditaa exited with error:\n[stderr]\n%s\n' '[stdout]\n%s' % (stderr, stdout)) return outrelfn, outfullfn
def render_math(self, math): """Render the LaTeX math expression *math* using latex and dvipng. Return the filename relative to the built document and the "depth", that is, the distance of image bottom and baseline in pixels, if the option to use preview_latex is switched on. Error handling may seem strange, but follows a pattern: if LaTeX or dvipng aren't available, only a warning is generated (since that enables people on machines without these programs to at least build the rest of the docs successfully). If the programs are there, however, they may not fail since that indicates a problem in the math source. """ use_preview = self.builder.config.pngmath_use_preview latex = DOC_HEAD + self.builder.config.pngmath_latex_preamble latex += (use_preview and DOC_BODY_PREVIEW or DOC_BODY) % math shasum = "%s.png" % sha(latex.encode('utf-8')).hexdigest() relfn = posixpath.join(self.builder.imgpath, 'math', shasum) outfn = path.join(self.builder.outdir, '_images', 'math', shasum) if path.isfile(outfn): depth = read_png_depth(outfn) return relfn, depth # if latex or dvipng has failed once, don't bother to try again if hasattr(self.builder, '_mathpng_warned_latex') or \ hasattr(self.builder, '_mathpng_warned_dvipng'): return None, None # use only one tempdir per build -- the use of a directory is cleaner # than using temporary files, since we can clean up everything at once # just removing the whole directory (see cleanup_tempdir) if not hasattr(self.builder, '_mathpng_tempdir'): tempdir = self.builder._mathpng_tempdir = tempfile.mkdtemp() else: tempdir = self.builder._mathpng_tempdir tf = codecs.open(path.join(tempdir, 'math.tex'), 'w', 'utf-8') tf.write(latex) tf.close() # build latex command; old versions of latex don't have the # --output-directory option, so we have to manually chdir to the # temp dir to run it. ltx_args = [self.builder.config.pngmath_latex, '--interaction=nonstopmode'] # add custom args from the config file ltx_args.extend(self.builder.config.pngmath_latex_args) ltx_args.append('math.tex') curdir = getcwd() chdir(tempdir) try: try: p = Popen(ltx_args, stdout=PIPE, stderr=PIPE) except OSError as err: if err.errno != ENOENT: # No such file or directory raise self.builder.warn( 'LaTeX command %r cannot be run (needed for math ' 'display), check the pngmath_latex setting' % self.builder.config.pngmath_latex) self.builder._mathpng_warned_latex = True return None, None finally: chdir(curdir) stdout, stderr = p.communicate() if p.returncode != 0: raise MathExtError('latex exited with error', stderr, stdout) ensuredir(path.dirname(outfn)) # use some standard dvipng arguments dvipng_args = [self.builder.config.pngmath_dvipng] dvipng_args += ['-o', outfn, '-T', 'tight', '-z9'] # add custom ones from config value dvipng_args.extend(self.builder.config.pngmath_dvipng_args) if use_preview: dvipng_args.append('--depth') # last, the input file name dvipng_args.append(path.join(tempdir, 'math.dvi')) try: p = Popen(dvipng_args, stdout=PIPE, stderr=PIPE) except OSError as err: if err.errno != ENOENT: # No such file or directory raise self.builder.warn('dvipng command %r cannot be run (needed for math ' 'display), check the pngmath_dvipng setting' % self.builder.config.pngmath_dvipng) self.builder._mathpng_warned_dvipng = True return None, None stdout, stderr = p.communicate() if p.returncode != 0: raise MathExtError('dvipng exited with error', stderr, stdout) depth = None if use_preview: for line in stdout.splitlines(): m = depth_re.match(line) if m: depth = int(m.group(1)) write_png_depth(outfn, depth) break return relfn, depth
new_file = new_files[p] v = new_file[0] if new_file[1] in new_parsed: # duplicate if p not in blocked or files[p][0] != v: errfunc(_("**warning** %s is a duplicate torrent for %s") % (p, new_parsed[new_file[1]]['path'])) new_blocked[p] = None continue if NOISY: errfunc('adding '+p) try: ff = open(p, 'rb') d = bdecode(ff.read()) check_message(d) h = sha(bencode(d['info'])).digest() new_file[1] = h if new_parsed.has_key(h): errfunc(_("**warning** %s is a duplicate torrent for %s") % (p, new_parsed[h]['path'])) new_blocked[p] = None continue a = {} a['path'] = p f = os.path.basename(p) a['file'] = f i = d['info'] l = 0 nf = 0 if i.has_key('length'):
def authenticateCredentials(self, credentials): """ See IAuthenticationPlugin. o We expect the credentials to be those returned by ILoginPasswordExtractionPlugin. """ login = credentials.get('login') password = credentials.get('password') if login is None or password is None: return None # Do we have a link between login and userid? Do NOT fall # back to using the login as userid when there is no match, as # that gives a high chance of seeming to log in successfully, # but in reality failing. userid = self._login_to_userid.get(login) if userid is None: # Someone may be logging in with a userid instead of a # login name and the two are not the same. We could try # turning those around, but really we should just fail. # # userid = login # login = self._userid_to_login.get(userid) # if login is None: # return None return None reference = self._user_passwords.get(userid) if reference is None: return None is_authenticated = False if AuthEncoding.is_encrypted(reference): if AuthEncoding.pw_validate(reference, password): is_authenticated = True if not is_authenticated: # Support previous naive behavior digested = sha(password).hexdigest() if reference == digested: is_authenticated = True if is_authenticated: try: user = api.user.get(username=login) except: return userid, login event = ValidPasswordEntered(user) notify(event) return userid, login else: try: user = api.user.get(username=login) except: return None event = InvalidPasswordEntered(user) notify(event) return None
def GetSlivers(data, config=None, plc=None): if 'slivers' not in data: logger.log_missing_data("interfaces.GetSlivers", 'slivers') return for sliver in data['slivers']: slicename = sliver['name'] for tag in sliver['attributes']: if tag['tagname'] == 'interface': interfaces = eval(tag['value']) if not isinstance(interfaces, (list, tuple)): # if interface is not a list, then make it into a singleton list interfaces = [interfaces] for mydict in interfaces: contents = "" # First look for filename/url combination for custom config files if 'filename' in mydict and 'url' in mydict: dest = "/vservers/%s/%s" % (slicename, mydict['filename']) url = mydict['url'] try: contents = curlwrapper.retrieve(url) except xmlrpclib.ProtocolError, e: logger.log( 'interfaces (%s): failed to retrieve %s' % (slicename, url)) continue else: # Otherwise generate /etc/sysconfig/network-scripts/ifcfg-<device> try: dest = "/vservers/%s/etc/sysconfig/network-scripts/ifcfg-%s" % ( slicename, mydict['DEVICE']) except: logger.log('interfaces (%s): no DEVICE specified' % slicename) continue for key, value in mydict.items(): if key in ['bridge', 'vlan']: continue contents += '%s="%s"\n' % (key, value) if sha(contents).digest() == checksum(dest): logger.log('interfaces (%s): no changes to %s' % (slicename, dest)) continue logger.log('interfaces (%s): installing file %s' % (slicename, dest)) try: os.makedirs(os.path.dirname(dest)) except OSError: pass try: f = open(dest, "w") f.write(contents) f.close() except: logger.log('interfaces (%s): error writing file %s' % (slicename, dest)) continue try: subprocess.check_call([ '/usr/sbin/lxcsu', slicename, '/sbin/service', 'network', 'restart' ]) except: logger.log( 'interfaces (%s): error restarting network service' % slicename)
def hash_SHA(byte_string): return hexlify(sha(byte_string).digest())
def makeXpiFile(): """ Creates a Firefox XPI file, based on the various platform version files. """ if not options.host_url: print "Cannot generate xpi file without --host-url." sys.exit(1) print "Generating xpi file" root = options.plugin_root if os.path.isdir(os.path.join(root, 'plugin')): root = os.path.join(root, 'plugin') xpi = zipfile.ZipFile('nppanda3d.xpi', 'w') package_id = '*****@*****.**' #TODO: maybe more customizable? tempFile = tempfile.mktemp('.txt', 'p3d_') rdf = open(tempFile, 'w') rdf.write(install_rdf % { 'package_id' : package_id, 'version' : options.version, 'host_url' : options.host_url, }) rdf.close() xpi.write(tempFile, 'install.rdf') os.unlink(tempFile) subdirs = os.listdir(root) for subdir in subdirs: platform = FirefoxPlatformMap.get(subdir, None) path = os.path.join(root, subdir) if platform and os.path.isdir(path): if subdir in ['win32', 'osx_i386']: pluginsXpiDir = 'plugins' else: # Create the XPI directory platform/<platform name>/plugins pluginsXpiDir = 'platform/%s/plugins' % (platform) # Copy the Firefox plugin into this directory. if subdir.startswith('win32'): pluginFilename = 'nppanda3d.dll' elif subdir.startswith('osx'): pluginFilename = 'nppanda3d.plugin' else: pluginFilename = 'nppanda3d.so' addZipTree(xpi, os.path.join(path, pluginFilename), pluginsXpiDir + '/' + pluginFilename) xpi.close() # Now that we've generated the xpi file, get its hash. data = open('nppanda3d.xpi', 'rb').read() xpi_hash = sha(data).hexdigest() # And now we can generate the update.rdf file. update = open('update.rdf', 'w') update.write(update_rdf % { 'package_id' : package_id, 'version' : options.version, 'host_url' : options.host_url, 'xpi_hash' : xpi_hash, }) update.close()
def random20(): return sha('%s' % random.random()).hexdigest()
def render_tikz(self, node, libs='', stringsubst=False): # must use unique filenames for all tmpfiles to support sphinx -j tikz = cleanup_tikzcode(self, node) shasum = sha(tikz.encode('utf-8')).hexdigest() fname = 'tikz-%s.%s' % (shasum, OUT_EXTENSION[self.builder.config.tikz_proc_suite]) relfn = posixpath.join(self.builder.imgpath, fname) outfn = path.join(self.builder.outdir, '_images', fname) if path.isfile(outfn): return relfn if hasattr(self.builder, '_tikz_warned'): return None ensuredir(path.dirname(outfn)) latex = DOC_HEAD % libs latex += self.builder.config.tikz_latex_preamble latex += DOC_BODY % tikz latex = latex.encode('utf-8') with changedir(self.builder._tikz_tempdir): tf = open('tikz-%s.tex' % shasum, 'wb') tf.write(latex) tf.close() system([ self.builder.config.latex_engine, '--interaction=nonstopmode', 'tikz-%s.tex' % shasum ], self.builder) resolution = str(self.builder.config.tikz_resolution) if self.builder.config.tikz_proc_suite in ['ImageMagick', 'Netpbm']: system([ 'pdftoppm', '-r', resolution, 'tikz-%s.pdf' % shasum, 'tikz-%s' % shasum ], self.builder) ppmfilename = glob('tikz-%s*.ppm' % shasum)[0] if self.builder.config.tikz_proc_suite == "ImageMagick": if self.builder.config.tikz_transparent: convert_args = ['-fuzz', '2%', '-transparent', 'white'] else: convert_args = [] system([which('convert'), '-trim'] + convert_args + [ppmfilename, outfn], self.builder) elif self.builder.config.tikz_proc_suite == "Netpbm": if self.builder.config.tikz_transparent: pnm_args = ['-transparent', 'rgb:ff/ff/ff'] else: pnm_args = [] system(['pnmtopng'] + pnm_args + [ppmfilename], self.builder, outfile=outfn) elif self.builder.config.tikz_proc_suite == "GhostScript": ghostscript = which('ghostscript') or which('gs') or which( 'gswin64') if self.builder.config.tikz_transparent: device = "pngalpha" else: device = "png256" system([ ghostscript, '-dBATCH', '-dNOPAUSE', '-sDEVICE=%s' % device, '-sOutputFile=%s' % outfn, '-r' + resolution + 'x' + resolution, '-f', 'tikz-%s.pdf' % shasum ], self.builder) elif self.builder.config.tikz_proc_suite == "pdf2svg": system(['pdf2svg', 'tikz-%s.pdf' % shasum, outfn], self.builder) else: self.builder._tikz_warned = True raise TikzExtError('Error (tikz extension): Invalid configuration ' 'value for tikz_proc_suite') return relfn