class pgctl(Normalized): """normalize pgctl's output""" rules = timestamp.rules + ( (Regex(r'\(pid \d+\)'), '(pid {PID})'), (Regex(r'\pid: \d+'), 'pid: {PID}'), (Regex(r' [\d.]+ seconds'), ' {TIME} seconds'), ( Regex( r'(?m)^UID +PID +PPID +PGID +SID +C +STIME +TTY +STAT +TIME +CMD' ), '{PS-HEADER}', ), ( Regex(r'(?m)^\S+ +\d+ +\d+ +\d+ +\d+ +\d+ +\S+ +\S+ +\S+ +\S+ +'), '{PS-STATS} ', ), # TODO-TEST: the slow-fuser case: (Regex(r' \(it took [\d.]+s to poll\)'), ''), (Regex(r'\B%s\b' % escape(prefix)), '${PREFIX}'), (Regex(r'/(pypy3?|python[23])(\.[0-9]+)? '), '/python '), (Regex(r' LOCK: \d+\n'), ' LOCK: ${LOCK}\n'), (Regex(r' loop: check_time [0-9.]+\n'), ' loop: check_time $TIME\n'), )
# I wish I didn't need this =/ # surely there's a better way -.- # NOTE: `pip install TOP` causes an infinite copyfiles loop, under tox >.< from venv_update import __file__ as venv_update_path, dotpy # symlink so that we get coverage, where possible venv_update_path = Path(dotpy(venv_update_path)) local_vu = Path(venv_update_path.basename) local_vu.mksymlinkto(venv_update_path) # coverage.py adds some helpful warnings to stderr, with no way to quiet them. coverage_warnings_regex = Regex( r'^Coverage.py warning: (%s)\n' % '|'.join(( r'Module .* was never imported\.', r'No data was collected\.', r'Module venv_update was previously imported, but not measured\.', )), flags=MULTILINE, ) def strip_coverage_warnings(stderr): return coverage_warnings_regex.sub('', stderr) # pip adds some helpful warnings to stderr, with no way to quiet them. pip_warnings_regex = Regex( '|'.join(( r"^ Url '[^']*/\.cache/pip-faster/wheelhouse' is ignored: it is neither a file nor a directory\.\n", r'^You are using pip version [0-9.]+, however version [0-9.]+ is available\.\n', r"^You should consider upgrading via the 'pip install --upgrade pip' command\.\n",
#! /usr/bin/env python # $ ./foo.py # BEFORE: A \\" \\\" Z # AFTER : A \\ \\\" Z # # BEFORE: A \\\" \\" Z # AFTER : A \\\" \\ Z from re import compile as Regex def remove_first_group(m): start = m.start(1) - m.start(0) end = m.end(1) - m.start(0) whole_match = m.group(0) return whole_match[:start] + whole_match[end:] unescaped_doublequote = Regex(r'(?<!\\)(?:\\\\)*(")') for test in ( r'A \\" \\\" Z', r'A \\\" \\" Z', ): print 'BEFORE:', test print 'AFTER :', unescaped_doublequote.sub(remove_first_group, test) print
"""Run a python script that imports venv_update""" # symlink so that we get coverage, where possible venv_update_symlink_pwd() # write it to a file so we get more-reasonable stack traces testscript = Path('testscript.py') testscript.write(pyscript) return run('%s/bin/python' % venv, testscript.strpath) # coverage.py adds some helpful warnings to stderr, with no way to quiet them. coverage_warnings_regex = Regex( r'^Coverage.py warning: (%s)\n' % '|'.join(( r'Module .* was never imported\.', r'No data was collected\.', r'Module venv_update was previously imported, but not measured\.', )), flags=MULTILINE, ) def strip_coverage_warnings(stderr): return coverage_warnings_regex.sub('', stderr) def strip_pip_warnings(stderr): return stderr.replace( ''.join(( 'DEPRECATION: Python 2.6 is no longer supported by the Python core team, please upgrade your Python. ', 'A future version of pip will drop support for Python 2.6\n', )),
def destroy(self): self.text.unbind('<1>') super().destroy() class CodeMirrorView(ShellMirrorView): def __init__(self, *args, **kwargs): kwargs['line_numbers'] = True kwargs['font'] = 'EditorFont' super().__init__(*args, **kwargs) except ImportError: # We're probably running unit tests outside of Thonny, so it's fine. pass copyablePattern = Regex(r'#\s*COPYABLE.*?#\s*END\s*COPYABLE', DOTALL | IGNORECASE) blurCharPattern = Regex(r'\w') blurLinePattern = Regex(r'^(.+)#\s*BLUR(\s*)$', IGNORECASE | MULTILINE) # vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv # # Logs go to Thonny/frontend.log in ~/Library (mac) or ~\AppData\Roaming (win) # This file gets installed in ~\AppData\Roaming\Python\Python37\site-packages\thonnycontrib (win) # or in /Applications/Thonny.app/Contents/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages # # To Install: # 1a - Windows: Need to install git first - can get it from here: https://git-scm.com/download/win # 1b - Mac: Prefix the below command with sudo. It will prompt for the password (which won't be shown) after. May have to install Xcode command line tools if prompted. # 2 - Everyone: pip3 install git+https://github.com/TaylorSMarks/classroom_sync.git # # BUGS SOMETIMES SEEN:
class timestamp(Normalized): """normalize pgctl's output""" rules = ( # 2015-10-16 17:05:56.635827500 (Regex(r'(?m)^\d{4}(-\d\d){2} (\d\d:){2}\d\d\.\d{6,9} '), '{TIMESTAMP} '), )
class User(db.Model, UserMixin): # should be sufficient EMAIL_LENGTH = 128 EMAIL_REGEX = Regex(r'([A-Za-z._0-9]+)@([A-Za-z._0-9]{2,}.[a-z]{2,})') # maximum hash length: 512 b == to bytes => 64 B == base16 => 128 B # this will allow for quite nice changing between hash algorithms PASSWORD_LENGTH = 64 * 2 # size (in bytes) of salt+password's hash HASH_SIZE = 32 # used by SQLAlchemy if native enums is supported by database USER_TYPE_ENUM_NAME = "UserType" # allowed user types keys USER_TYPE_COMMON = "common" USER_TYPE_ADMIN = "admin" # allowed user values USER_TYPES = { USER_TYPE_COMMON: "COMMON", USER_TYPE_ADMIN: "ADMINISTRATOR" } TOKEN_LENGTH = 1024 __tablename__ = "User" id = db.Column(db.Integer, primary_key = True) email = db.Column(db.String(EMAIL_LENGTH), unique = True, nullable = False) salt = db.Column(db.String(PASSWORD_LENGTH), nullable = False) password = db.Column(db.String(PASSWORD_LENGTH), nullable = False) type = db.Column(db.Enum(*tuple(USER_TYPES.values()), name = USER_TYPE_ENUM_NAME), default = USER_TYPES[USER_TYPE_COMMON]) register_date = db.Column(db.DateTime) last_seen_date = db.Column(db.DateTime) # token = db.Column(db.String(TOKEN_LENGTH), default=None) favourites = db.relationship('Recipe', secondary=favour_recipes, lazy='dynamic') @staticmethod def gen_salt(length = HASH_SIZE): return urandom(length) @staticmethod def combine(salt, pwd): return salt + pwd @staticmethod def hash_pwd(bytes): return sha256(bytes).hexdigest() @staticmethod def is_valid_email(text): return User.EMAIL_REGEX.match(text) is not None def __init__(self, email, password, type = None, register_date = None, last_seen_date = None): # validate arguments if not User.is_valid_email(email): raise ValueError() # set fields values self.email = email self.salt = hexlify(User.gen_salt()) self.password = User.hash_pwd(User.combine(self.salt, password.encode())) if type is not None and type in User.USER_TYPES: self.type = User.USER_TYPES[type] if register_date is None: self.register_date = datetime.now() if last_seen_date is None: self.last_seen_date = self.register_date def to_json(self): photo = Photo.query.filter(Photo.avatar_user_id == self.id).first(); return { "id": self.id, "email": self.email, "register_date": self.register_date.isoformat(), "last_seen_date": self.last_seen_date.isoformat(), "recipes": self.recipes.count(), "comments": self.comments.count(), "average_rate": self.count_average_rate(), "avatar": photo.id if photo is not None else "" } def get_id(self): return self.id def count_average_rate(self): sum_rate = 0 sum_count = 0 for recipe in self.recipes: sum_rate += recipe.taste_comments * recipe.comments.count() sum_count += recipe.comments.count() if sum_count == 0: return 0 else: return sum_rate/sum_count def __repr__(self): return '<User: %r, with password: %r and email: %r>' % (self.id, self.password, self.email)
# I wish I didn't need this =/ # surely there's a better way -.- # NOTE: `pip install TOP` causes an infinite copyfiles loop, under tox >.< from venv_update import __file__ as venv_update_path, dotpy # symlink so that we get coverage, where possible venv_update_path = Path(dotpy(venv_update_path)) local_vu = Path(venv_update_path.basename) local_vu.mksymlinkto(venv_update_path) # coverage.py adds some helpful warnings to stderr, with no way to quiet them. coverage_warnings_regex = Regex( r'^Coverage.py warning: (%s)\n' % '|'.join(( r'Module .* was never imported\.', r'No data was collected\.', r'Module venv_update was previously imported, but not measured\.', )), flags=MULTILINE, ) def strip_coverage_warnings(stderr): return coverage_warnings_regex.sub('', stderr) # pip adds some helpful warnings to stderr, with no way to quiet them. pip_warnings_regex = Regex( '|'.join(( (r'^DEPRECATION: Python 2\.6 is no longer supported by the Python core team, please upgrade your Python\. ' r'A future version of pip will drop support for Python 2\.6\n'), r"^ Url '[^']*/\.cache/pip-faster/wheelhouse' is ignored: it is neither a file nor a directory\.\n",
import asyncio import logging from re import compile as Regex from typing import Optional from youtube_dl import YoutubeDL as YoutubeDLClient from discodo.exceptions import NoSearchResults log = logging.getLogger("discodo.extractor") YOUTUBE_PLAYLIST_ID_REGEX = Regex( r"(?:http|https|)(?::\/\/|)(?:www.|)(?:music.|)(?:youtu\.be\/|youtube\.com(?:\/embed\/|\/v\/|\/watch\?v=|\/ytscreeningroom\?v=|\/feeds\/api\/videos\/|\/user\S*[^\w\-\s]|\S*[^\w\-\s]))([\w\-]{12,})[a-z0-9;:@#?&%=+\/\$_.-]*(?:&index=|)([0-9]*)?" ) def _extract(query: str) -> Optional[dict]: option = { "format": "(bestaudio[ext=opus]/bestaudio/best)[protocol!=http_dash_segments]", "nocheckcertificate": True, "ignoreerrors": True, "no_warnings": True, "default_search": "auto", "source_address": "0.0.0.0", "logger": log, "skip_download": True, "writesubtitles": True, } YoutubePlaylistMatch = YOUTUBE_PLAYLIST_ID_REGEX.match(query) if YoutubePlaylistMatch and not YoutubePlaylistMatch.group(1).startswith(
def venv_update_script(pyscript, venv='virtualenv_run'): """Run a python script that imports venv_update""" # symlink so that we get coverage, where possible venv_update_symlink_pwd() # write it to a file so we get more-reasonable stack traces testscript = Path('testscript.py') testscript.write(pyscript) return run('%s/bin/python' % venv, testscript.strpath) # coverage.py adds some helpful warnings to stderr, with no way to quiet them. from re import compile as Regex, MULTILINE coverage_warnings_regex = Regex( r'^Coverage.py warning: (Module .* was never imported\.|No data was collected\.)\n', flags=MULTILINE, ) def strip_coverage_warnings(stderr): return coverage_warnings_regex.sub('', stderr) def uncolor(text): # the colored_tty, uncolored_pipe tests cover this pretty well. from re import sub return sub('\033\\[[^A-z]*[A-z]', '', text)
class ChatServerInterface(Service): ENCODING = 'utf-8' BUFFER_SIZE = 1024 REGEX = Regex(r'^(?::(?P<prefix>\S+)\s+)?(?P<command>\w+)\s*(?P<arguments>[^:]*)\s*(?::(?P<trailing>.*))?$') PREFIX_REGEX = Regex(r'') outgoing_data = b'' incoming_data = b'' has_identified = False has_connected = False def __init__(self,host,nick,events): super().__init__(events,host,['irc']) self.nick = nick self.events = events def respond(self,addr,command): prefix = command.get("prefix",None) trailing = command.get("trailing",None) arguments = command.get("arguments",[]) command = command['command'] self.enqueue_message(command,trailing=trailing,prefix=prefix,args=arguments) def write_data(self,sock): if len(self.outgoing_data)>0: sock.send(self.outgoing_data) self.outgoing_data = b'' def read_data(self,sock): if not self.has_connected: self.has_connected = True self.broadcast({ 'kind':"info", 'meta':{ 'event':'connected' } }) bs = self.BUFFER_SIZE data = sock.recv(bs) while len(data)>=bs: self.incoming_data = self.incoming_data + data data = sock.recv(bs) self.incoming_data = self.incoming_data + data last_line = self.incoming_data.find(b"\n",1) while last_line>=0: line = self.incoming_data[:last_line] try: line = line.decode(self.ENCODING).strip() print(line) self.process(line) except: print("Unable to process line:\n->",line) self.incoming_data = self.incoming_data[last_line+1:] last_line = self.incoming_data.find(b"\n") if not self.has_identified: self.enqueue_message("NICK",args=[self.nick]) self.enqueue_message("USER",args=[self.nick,"0","*"],trailing=self.nick) def parse_prefix(self,prefix): excite = prefix.find("!") nick = prefix where = None user = None if excite>=0: nick = prefix[:excite] where = prefix.find("@",excite) if where >= 0: user = prefix[excite+1:where] where = prefix[where+1:] else: user = prefix[excite+1:] where = None else: where = None user = None return { 'nick':nick, 'user':user, 'host':where } def enqueue_message(self,command,trailing=None,prefix=None,args=None): print("Sending message:\n->",command,*(args if args else [])) msg = self.build_message(command,trailing=trailing,prefix=prefix,args=args) self.outgoing_data = self.outgoing_data + msg def process(self,line): m = self.REGEX.match(line) if m: prefix = m.group("prefix") r = { "kind":"irc", 'prefix':self.parse_prefix(prefix) if prefix else None, 'command':m.group('command'), 'arguments':list(map(str.strip,m.group('arguments').split())), 'trailing':m.group("trailing") } if r['command']=='PING': self.enqueue_message("PONG",trailing=r['trailing']) else: if r['command']=='001': self.broadcast({ "kind":'info', "meta":{ "event":"identified" } }) self.has_identified = True self.broadcast(r) def build_message(self,command,trailing=None,prefix=None,args=None): args = args or [] prefix = prefix or "" trailing = ":"+trailing if trailing else "" return ("{} {} {} {}".format(prefix,command," ".join(args),trailing).strip()+"\r\n").encode(self.ENCODING)
from viewers.common import * from re import compile as Regex from collections import namedtuple from math import log, exp, sqrt from datetime import datetime from html import escape from viewers.default import view # also exported! REQUEST_SEEN_PATTERN = Regex('\[(.*?)\]') CompilationEvent = namedtuple('CompilationEvent', ['started', 'duration', 'request', 'recomp']) def pretty_duration(d): digits = str(int(d)) result = 'µs' while digits: result = digits[-3:] + ' ' + result digits = digits[:-3] return result def html_aggregate(total_duration, duration_mean, duration_variance, duration_geometric_mean, events): yield 'Valid entries: <b>%s</b><br>' % len(events) yield 'Total duration: <b>%s</b><br>' % pretty_duration(total_duration) yield 'Duration mean: <b>%s</b> (σ = <b>%s</b>)<br>' % (pretty_duration(duration_mean), pretty_duration(sqrt(duration_variance))) yield 'Duration variance (σ²): <b>%s²</b><br>' % pretty_duration(duration_variance) yield '<del>Duration geometric mean</del>: <b>%s</b><br>' % pretty_duration(duration_geometric_mean) yield '<table class="mono">'