def deserialize(self, l): seed, pub, sig=l self.seed=decode(seed) self.pub=fixPub(rsa.key.PublicKey.load_pkcs1(decode(pub), 'DER')) self.sig=decode(sig) h=hashlib.sha1() h.update(self.pub.save_pkcs1('DER')) self.owner=base64.b32encode(h.digest()) h=hashlib.sha1() h.update(self.serialize()) self.id=base64.b32encode(h.digest())
def readPlayers(url): """ Reads a player list from the given URL. @type url: strings @param url: The URL that holds the player list information. @rtype: list of strings @return: List of players in a special format that can be parsed into a player object. """ # Load the player list from URL into a string txt = util.getUrlAsString(url) if not txt is None: players = [] for line in util.lineIter(txt): # Decode the player list from known format of tennischannel.com files line = util.decode(line) strings = line.split("\t") rank = int(strings[0]) strings = strings[1].split(",") lastName = strings[0] strings = strings[1].split("(") firstName = strings[0].strip() country = strings[1][0:3] # Creating list of easily parsed strings representative of player objects. # These strings are "pretty" enough to use in dropdown list. players.append('{}. {}, {} ({})'.format(rank, lastName, firstName, country)) else: print("readPlayers: Couldn't open {}, exiting.".format(url)) # Maybe something better to do than exit. Consider in future version. sys.exit(1) return players
def load(self, keepUndo=False): """Loads the current url. Returns True if loading succeeded, False if an error occurred, and None when the current url is empty or non-local. Currently only local files are supported. If keepUndo is True, the loading can be undone (with Ctrl-Z). """ fileName = self.url().toLocalFile() if fileName: try: with open(fileName) as f: data = f.read() except (IOError, OSError): return False # errors are caught in MainWindow.openUrl() text = util.decode(data) if keepUndo: c = QTextCursor(self) c.select(QTextCursor.Document) c.insertText(text) else: self.setPlainText(text) self.setModified(False) self.loaded() app.documentLoaded(self) return True
def math_clean(form): """ Cleans a form, validating answer to math question in the process. The given ``form`` must be an instance of either ``MathCaptchaModelForm`` or ``MathCaptchaForm``. Answer keys are communicated in the ``math_captcha_question`` field which is evaluated to give the correct answer after being validated against the ``SECRET_KEY`` """ try: value = form.cleaned_data['math_captcha_field'] test_secret, question = decode( form.cleaned_data['math_captcha_question']) assert len(test_secret) == 40 and question except (TypeError, AssertionError): # problem decoding, junky data form._errors['math_captcha_field'] = form.error_class(["Invalid token"]) del form.cleaned_data['math_captcha_field'] except KeyError: return if encode(question) != form.cleaned_data['math_captcha_question']: # security problem, hack attempt form._errors['math_captcha_field'] = form.error_class(["Invalid token"]) del form.cleaned_data['math_captcha_field'] if eval(question) != value: form._errors['math_captcha_field'] = form.error_class(["Wrong answer, try again"]) del form.cleaned_data['math_captcha_field']
def handle_change(self, thread_name, check_id, check_name, lock_uid, status, check_result): if status == 'offline': updown = 'down' elif status == 'online': updown = 'up' safe_print("[%s] ... confirmed, target is %s state now", (thread_name, status)) update_result = Check.objects.raw("UPDATE checks SET status = %s, confirmations = 0, `lock` = '', last_checked = NOW() WHERE id = %s AND `lock` = %s", (status, check_id, lock_uid)) if update_result.rowcount == 1: # we still had the lock at the point where status was toggled # then, send the alert alert_result = database.query("SELECT contacts.id, contacts.type, contacts.data FROM contacts, alerts WHERE contacts.id = alerts.contact_id AND alerts.check_id = %s AND alerts.type IN ('both', %s)", (check_id, updown)) for alert_row in alert_result.fetchall(): safe_print("[%s] ... alerting contact %d", (thread_name, alert_row['id'])) alert_func = getattr(alerts, alert_row['type'], None) if not alert_func: util.die("Invalid alert handler [%s]!" % (alert_row['type'])) # build context context = {} context['check_id'] = check_id context['check_name'] = check_name context['contact_id'] = alert_row['id'] context['title'] = "Check %s: %s" % (status, check_name) context['status'] = status context['updown'] = updown context['message'] = check_result['message'] alert_func(util.decode(alert_row['data']), context) # also add an event database.query("INSERT INTO check_events (check_id, type) VALUES (%s, %s)", (check_id, updown))
def slotButtonShowDiff(self): """Called when the user clicks Show Difference.""" docs = self.selectedDocuments() or self.allDocuments() if not docs: return d = docs[0] if documentwatcher.DocumentWatcher.instance(d).isdeleted(): return filename = d.url().toLocalFile() try: with open(filename) as f: disktext = util.decode(f.read()) except (IOError, OSError): return currenttext = d.toPlainText() html = htmldiff.htmldiff( currenttext, disktext, _("Current Document"), _("Document on Disk"), numlines=5) dlg = widgets.dialog.Dialog(self, buttons=('close',)) view = QTextBrowser(lineWrapMode=QTextBrowser.NoWrap) view.setHtml(html) dlg.setMainWidget(view) dlg.setWindowTitle(app.caption("Differences")) dlg.setMessage(_( "Document: {url}\n" "Difference between the current document and the file on disk:").format( url=filename)) dlg.setWindowModality(Qt.NonModal) dlg.setAttribute(Qt.WA_QuitOnClose, False) dlg.setAttribute(Qt.WA_DeleteOnClose) qutil.saveDialogSize(dlg, "externalchanges/diff/dialog/size", QSize(600, 300)) dlg.show()
def version(filename): """Returns the LilyPond version if set in the file, as a tuple of ints. First the function searches inside LilyPond syntax. Then it looks at the 'version' document variable. Then, if the document is not a LilyPond document, it simply searches for a \\version command string, possibly embedded in a comment. The version is cached until the file changes. """ mkver = lambda strings: tuple(map(int, strings)) with open(filename) as f: text = util.decode(f.read()) mode = textmode(text) tokens_ = list(ly.lex.state(mode).tokens(text)) version = ly.parse.version(tokens_) if version: return mkver(re.findall(r"\d+", version)) # look at document variables version = variables.variables(text).get("version") if version: return mkver(re.findall(r"\d+", version)) # parse whole document for non-lilypond comments if mode != "lilypond": m = re.search(r'\\version\s*"(\d+\.\d+(\.\d+)*)"', text) if m: return mkver(m.group(1).split('.'))
def _serve_players(self): for socket, player_id in self.connected_sockets: try: data = socket.recv(MAX_DATA_LEN) except error: continue events = get_event_list(decode(data)) # print("received ", events) if decode(data) != '': if events[0] == "start": self._handle_start_event(socket, player_id) elif events[0] == "action": self._handle_player_action_events( socket, player_id, events)
def post(self): if "ROLE_USER" not in self.current_user['roles']: raise tornado.web.HTTPError(401) action = self.get_argument('action') user_id = self.current_user['_id'] time = datetime.datetime.now() working_model = models.Workings(self.db) report_model = models.Reports(self.db) if action == 'working': if self.get_argument('do') == 'stop': wid = util.decode(str(self.get_argument('wid')), 'url') res = yield working_model.end(wid, time) # TODO: show a message? self.redirect(self.reverse_url('user')) return else: query = {'user_id': user_id, 'end': {'$exists': True}, 'end': None} started = yield working_model.get(query) if started: wid = util.decode(str(self.get_argument('wid')), 'url') res = yield working_model.end(wid, time) # TODO: show a message? self.redirect(self.reverse_url('user')) return else: res = working_model.start(time, user_id) # TODO: show a message? self.redirect(self.reverse_url('user')) return elif action == 'report': report = self.get_argument('report') if len(report) < 10: self.notifications.set('user_messages', ['Length Of Report Is Very Low']) self.redirect(self.reverse_url('user')) return res = yield report_model.create(report, user_id, time) if res: self.notifications.set('user_messages', ['Report Is Saved!']) else: self.notifications.set('user_messages', ['Report Is not Saved!']) self.redirect(self.reverse_url('user')) return
def get_persist( pfx, key ) : try: mc = memcache.Client( etc.memcached_addr, debug=0 ) raw_data = mc.get( pfx+'_persist_'+str(key) ) persist = None if raw_data : persist = util.decode( raw_data ) return persist except Exception as e : log.exp( e ) return None
def get_node( pfx, key ) : try: mc = memcache.Client( etc.memcached_addr, debug=0 ) raw_data = mc.get( pfx+'_node_'+str(key) ) node = None if raw_data : node = util.decode( raw_data ) return node except Exception as e : log.exp( e ) return None
def get_item( pfx, key ) : try: mc = memcache.Client( etc.memcached_addr, debug=0 ) raw_data = mc.get( pfx+'_item_'+str(key) ) item = None if raw_data : item = util.decode( raw_data ) return item except Exception as e : log.exp( e ) return None
def get_ss( ss_id ) : try: mc = memcache.Client( etc.memcached_addr, debug=0 ) raw_data = mc.get( 'ss_'+ss_id ) ss_data = None if raw_data : ss_data = util.decode( raw_data ) return ss_data except Exception as e: log.exp(e) return None
def get_item( pfx, key ) : try: r = redis.Redis( connection_pool=pool ) raw_data = r.get( pfx+'_item_'+str(key) ) item = None if raw_data : item = util.decode( raw_data ) return item except Exception as e : log.exp( e ) return None
def parse(self,reader:BGLReader,reset:bool=True): if reset: reader.reset() self.reader=reader self._read_properties(reader) #props at end sometimes reader.reset() charset_s=self.properties[gls.P_S_CHARSET] charset_t=self.properties[gls.P_T_CHARSET] while not reader.eof(): rec=reader.next_rec() if rec[0] == gls.TERM_A or rec[0] == gls.TERM_1: (title_r,definition_r,alternatives_r,properties_r)=unpack_term(rec[1]) title=util.decode(bytes(title_r),charset_s) definition=util.decode(bytes(definition_r), charset_s) alternatives=[] for alt in alternatives_r: alternatives.append( util.decode(bytes(alt), charset_s) ) properties=self._parse_term_properties(properties_r) self.handle_term(title, definition, alternatives, properties) elif rec[0] == gls.RESOURCE: (name_r,data)=unpack_res(rec[1]) self.handle_res( bytes(name_r).decode('latin1'), bytes(data) ) elif rec[0] == gls.TERM_B: raise Exception("TERM_B not implemented") self.reader.close() self.handle_parse_complete() return
def _cached(filename): """Return a _CachedDocument instance for the filename, else creates one.""" filename = os.path.realpath(filename) try: c = _document_cache[filename] except KeyError: with open(filename, 'rb') as f: text = util.decode(f.read()) c = _document_cache[filename] = _CachedDocument() c.variables = v = variables.variables(text) c.document = ly.document.Document(text, v.get("mode")) c.filename = c.document.filename = filename return c
def deserialize(self, l): self.sig=decode(l[0]) self.pub=l[1] self.pub=loadPublic(self.pub) self.time=l[2] self.cmd=l[3] self.coin=Coin() self.coin.deserialize(l[4]) if len(l)==6: if self.cmd=='create': self.args=l[5] else: self.args=loadPublic(l[5])
def get_item_dict( pfx, key_arr ) : item_dict = {} if len(key_arr) <= 0 : return item_dict try: mc = memcache.Client( etc.memcached_addr, debug=0 ) str_key_arr = [ pfx+'_item_'+str(i) for i in key_arr ] raw_datas = mc.get_multi( str_key_arr ) for (k,v) in raw_datas.items() : item = util.decode( v ) item_dict[long(k.replace(pfx+'_item_',''))] = item return item_dict except Exception as e : log.exp( e ) return {}
def get_item_dict( pfx, key_arr ) : item_dict = {} if len(key_arr) <= 0 : return item_dict r = redis.Redis( connection_pool=pool ) for key in key_arr : try: raw_data = r.get( pfx+'_item_'+str(key) ) item = None if raw_data : item = util.decode( raw_data ) item_dict[key] = item except Exception as e : log.exp( e ) return item_dict
def worker(self): thread_name = threading.currentThread().getName() while True: check_id, check_name, check_type, check_data, status, max_confirmations, confirmations, lock_uid = self.q.get() safe_print("[%s] processing check %d: calling checks.%s", (thread_name, check_id, check_type)) check_result = checks.run_check(check_type, util.decode(check_data), check_id) safe_print("[%s] check %d result: %s", (thread_name, check_id, str(check_result))) if not type(check_result) is dict or 'status' not in check_result: util.die("[%s] bad check handler [%s]: returned non-dict or missing status" % (thread_name, check_type)) elif 'message' not in check_result: if check_result['status'] == 'fail': check_result['message'] = "Check offline: %s" % (check_name) else: check_result['message'] = "Check online: %s" % (check_name) if check_result['status'] == 'fail': safe_print("[%s] ... got failure!", (thread_name)) if status == 'online': with recent_failures_lock: recent_failures.add((check_id, util.time())) if confirmations + 1 >= max_confirmations: # target has failed self.handle_change(thread_name, check_id, check_name, lock_uid, 'offline', check_result) else: # increase confirmations database.query("UPDATE checks SET confirmations = confirmations + 1, `lock` = '', last_checked = NOW() WHERE id = %s AND `lock` = %s", (check_id, lock_uid)) else: database.query("UPDATE checks SET confirmations = 0, `lock` = '', last_checked = NOW() WHERE id = %s AND `lock` = %s", (check_id, lock_uid)) elif check_result['status'] == 'success': safe_print("[%s] ... got success", (thread_name)) if status == 'offline': if confirmations + 1 >= max_confirmations: # target has come back online self.handle_change(thread_name, check_id, check_name, lock_uid, 'online', check_result) else: # increase confirmations database.query("UPDATE checks SET confirmations = confirmations + 1, `lock` = '', last_checked = NOW() WHERE id = %s AND `lock` = %s", (check_id, lock_uid)) else: database.query("UPDATE checks SET confirmations = 0, `lock` = '', last_checked = NOW() WHERE id = %s AND `lock` = %s", (check_id, lock_uid)) else: util.die("Check handler [%s] returned invalid status code [%s]!") % (check_type, check_result['status'])
def insertFromFile(self): ext = os.path.splitext(self.currentDocument().url().path())[1] filetypes = app.filetypes(ext) caption = app.caption(_("dialog title", "Insert From File")) directory = os.path.dirname(self.currentDocument().url().toLocalFile()) or app.basedir() filename = QFileDialog.getOpenFileName(self, caption, directory, filetypes) if filename: try: data = open(filename).read() except (IOError, OSError) as err: QMessageBox.warning(self, app.caption(_("Error")), _("Can't read from source:\n\n{url}\n\n{error}").format( url=filename, error=err.strerror)) else: text = util.decode(data) self.currentView().textCursor().insertText(text)
def _play(self, song, seek=None): with self.condition: logger.info(u'播放歌曲 %s (%d)', util.decode(song.oneline()), seek or 0) if self.song and self.song != song: self._clearTmpfile() self.song.mp3source = None self.song = song self.songs = [] self.player.pause() self.player.next() self.player.queue(song.mp3source) if seek: self.player.seek(seek) self.player.play() self.condition.notifyAll()
def readCountries(self): """Read in IOC country codes (key) from URL, and set corresponding country and flag name (values).""" # Read in the country information as a string fro the URL txt = util.getUrlAsString(countryUrl) if not txt is None: # String read from URL self.countries = {} for line in util.lineIter(txt): # Parse each line, key = IOC code, value = tuple of country name, flag file name prefix. line = util.decode(line) # Handle unicode strings = line.split(",") key = strings[0] val = (strings[1], strings[2].strip()) self.countries[key] = val else: # Error in reading string from URL # Is there something better to do than exit? Later version. print("readCountries: Couldn't open {}, exiting.".format(countryUrl)) sys.exit(1)
def load_data(cls, url, encoding=None): """Class method to load document contents from an url. This is intended to open a document without instantiating one if loading the contents fails. This method returns the text contents of the url as decoded text, thus a unicode string. """ filename = url.toLocalFile() # currently, we do not support non-local files if not filename: raise IOError("not a local file") with open(filename, 'rb') as f: data = f.read() return util.decode(data, encoding)
def insertFromFile(self): ext = os.path.splitext(self.currentDocument().url().path())[1] filetypes = app.filetypes(ext) caption = app.caption(_("dialog title", "Insert From File")) directory = os.path.dirname(self.currentDocument().url().toLocalFile()) or app.basedir() filename = QFileDialog.getOpenFileName(self, caption, directory, filetypes) if filename: try: with open(filename) as f: data = f.read() except IOError as e: msg = _("{message}\n\n{strerror} ({errno})").format( message = _("Could not read from: {url}").format(url=filename), strerror = e.strerror, errno = e.errno) QMessageBox.critical(self, app.caption(_("Error")), msg) else: text = util.decode(data) self.currentView().textCursor().insertText(text)
def load(self): """Loads the current url. Returns True if loading succeeded, False if an error occurred, and None when the current url is empty or non-local. Currently only local files are supported. """ fileName = self.url().toLocalFile() if fileName: try: with open(fileName) as f: data = f.read() except (IOError, OSError): return False # errors are caught in MainWindow.openUrl() self.setPlainText(util.decode(data)) self.setModified(False) self.loaded() app.documentLoaded(self) return True
def handle_change(self, thread_name, check_id, check_name, lock_uid, status, check_result): if status == 'offline': import subprocess googleResult = subprocess.Popen(['ping', 'ipv4.google.com', '-c', '3', '-w', '3'], stdout=subprocess.PIPE).stdout.read() if '100% packet loss' in googleResult: print "What?! Google down? Aborting alert sending as it seems like the monitor host has no connectivity" return updown = 'down' elif status == 'online': updown = 'up' safe_print("[%s] ... confirmed, target is %s state now", (thread_name, status)) update_result = database.query("UPDATE checks SET status = %s, confirmations = 0, `lock` = '', last_checked = NOW() WHERE id = %s AND `lock` = %s", (status, check_id, lock_uid)) if update_result.rowcount == 1: # we still had the lock at the point where status was toggled # then, send the alert alert_result = database.query("SELECT contacts.id, contacts.type, contacts.data FROM contacts, alerts WHERE contacts.id = alerts.contact_id AND alerts.check_id = %s AND alerts.type IN ('both', %s)", (check_id, updown)) for alert_row in alert_result.fetchall(): safe_print("[%s] ... alerting contact %d", (thread_name, alert_row['id'])) alert_func = getattr(alerts, alert_row['type'], None) if not alert_func: util.die("Invalid alert handler [%s]!" % (alert_row['type'])) # build context context = {} context['check_id'] = check_id context['check_name'] = check_name context['contact_id'] = alert_row['id'] context['title'] = "Check %s: %s" % (status, check_name) context['status'] = status context['updown'] = updown context['message'] = check_result['message'] alert_func(util.decode(alert_row['data']), context) # also add an event database.query("INSERT INTO check_events (check_id, type) VALUES (%s, %s)", (check_id, updown))
def math_clean(form): """ Cleans a form, validating answer to math question in the process. The given ``form`` must be an instance of either ``MathCaptchaModelForm`` or ``MathCaptchaForm``. Answer keys are communicated in the ``math_captcha_question`` field which is evaluated to give the correct answer after being validated against the ``SECRET_KEY`` """ try: value = form.cleaned_data['math_captcha_field'] test_secret, question = decode(form.cleaned_data['math_captcha_question']) assert len(test_secret) == 40 and question except (TypeError, AssertionError): # problem decoding, junky data raise forms.ValidationError('Invalid token') except KeyError: return if encode(question) != form.cleaned_data['math_captcha_question']: # security problem, hack attempt raise forms.ValidationError(u'Будь ласка, введіть правильну відповіль цифрою') if eval(question) != value: raise forms.ValidationError(u'Будь ласка, введіть правильну відповідь цифрою')
def run(self): try: url = self.song.url logger.debug(u'下载歌曲 %s', util.decode(url)) suffix = util.getSuffix(url) if suffix == '.m4a': logger.debug(u'文件类型无法被 pyglet seek %s' % url) return fd, tmpfile = tempfile.mkstemp(suffix) self.song.tmpfile = tmpfile response = urllib2.urlopen(url) while True: data = response.read(4096) if not data: break os.write(fd, data) response.close() os.close(fd) self.song.file = tmpfile logger.debug(u'下载完成 <%s> %s', tmpfile, url) except Exception: logger.exception(u'下载文件出错 %s', url)
def run(self): self.limpiarTemporales() descargado_y_ok = False try: self.descargarZIP() self.descomprimirZIP() descargado_y_ok = True except ErrorDescargando: self.error_importando( _("Error descargando la informacion WiiTDB desde %s") % self.url) if os.path.exists(self.fichXML) and descargado_y_ok: xmldoc = libxml2.parseFile(self.fichXML) ctxt = xmldoc.xpathNewContext() nodo = ctxt.xpathEval("//*[name() = 'datafile']")[0] if self.callback_empieza_importar: self.callback_empieza_importar(self.fichXML) cont = 0 while not self.salir and nodo != None: if nodo.type == "element": if nodo.name == "datafile": nodo = nodo.children elif nodo.name == "WiiTDB": self.version = int(self.leerAtributo(nodo, 'version')) self.games = int( self.leerAtributo(nodo, 'games').split("/")[0]) elif nodo.name == "game": if nodo.type == "element": iniciado = False juego = None name = self.leerAtributo(nodo, 'name') if nodo.children is not None: nodo = nodo.children saltado = False while not saltado and nodo.next is not None: if nodo.type == "element": # id, region, locale, developer, publisher, date, genre, rating, wi-fi, input, rom if not iniciado: if nodo.name == "id": idgame = nodo.content sql = util.decode( 'idgame == "%s"' % idgame) juego_wbfs = session.query( Juego).filter( util.sql_text( sql)).first() # si no los tienes y estas descargando en masivo -> se lo salta saltado = juego_wbfs is None and self.todos if not saltado: sql = util.decode( "idgame=='%s'" % (idgame)) try: juego = session.query( JuegoWIITDB ).filter( util.sql_text( sql)).first() except: self.error_importando( _("XML invalido")) if juego == None: juego = JuegoWIITDB( nodo.content, name) iniciado = True # ya se ha iniciado else: if nodo.name == "region": juego.region = nodo.content elif nodo.name == "locale": lang = self.leerAtributo( nodo, 'lang') sql = util.decode( "lang=='%s' and idJuegoWIITDB='%s'" % (lang, juego.idJuegoWIITDB)) descripcion = session.query( JuegoDescripcion).filter( util.sql_text( sql)).first() if descripcion == None: descripcion = JuegoDescripcion( lang) if self.callback_nuevo_descripcion: self.callback_nuevo_descripcion( descripcion) if nodo.children is not None: nodo = nodo.children while nodo.next is not None: if nodo.type == "element": if nodo.name == "title": descripcion.title = nodo.content elif nodo.name == "synopsis": descripcion.synopsis = nodo.content nodo = nodo.next nodo = nodo.parent # añadimos la descripcion al juego juego.descripciones.append( descripcion) elif nodo.name == "developer": juego.developer = nodo.content elif nodo.name == "publisher": juego.publisher = nodo.content elif nodo.name == "date": try: year = int( self.leerAtributo( nodo, 'year')) month = int( self.leerAtributo( nodo, 'month')) day = int( self.leerAtributo( nodo, 'day')) fecha = date( year, month, day) juego.fecha_lanzamiento = fecha except ValueError: pass elif nodo.name == "genre": valores = nodo.content for valor in valores.split( ","): valor = valor.strip( ).replace("'", "`") sql = util.decode( "nombre=='%s'" % (valor)) genero = session.query( Genero).filter( util.sql_text( sql)).first() if genero == None: genero = Genero(valor) if self.callback_nuevo_genero: self.callback_nuevo_genero( genero) juego.genero.append(genero) elif nodo.name == "rating": # crear un tipo de rating si es nuevo tipo = self.leerAtributo( nodo, 'type') sql = util.decode( "tipo=='%s'" % (tipo)) rating_type = session.query( RatingType).filter( util.sql_text( sql)).first() if rating_type == None: rating_type = RatingType( tipo) juego.rating_type = rating_type # crea una relacion si es un nuevo valor del tipo valor = self.leerAtributo( nodo, 'value') sql = util.decode( "idRatingType=='%s' and valor=='%s'" % (rating_type.idRatingType, valor)) rating_value = session.query( RatingValue).filter( util.sql_text( sql)).first() if rating_value == None: rating_value = RatingValue( valor) rating_type.valores.append( rating_value) juego.rating_value = rating_value if nodo.children is not None: nodo = nodo.children while nodo.next is not None: if nodo.type == "element": if nodo.name == "descriptor": valores = nodo.content for valor in valores.split( ","): valor = valor.strip( ) sql = util.decode( "idRatingType=='%s' and valor=='%s'" % (rating_type . idRatingType, valor) ) rating_content = session.query( RatingContent ).filter( util. sql_text( sql ) ).first() if rating_content == None: rating_content = RatingContent( valor ) rating_type.contenidos.append( rating_content ) juego.rating_contents.append( rating_content ) nodo = nodo.next nodo = nodo.parent elif nodo.name == "wi-fi": juego.wifi_players = self.leerAtributo( nodo, 'players') try: int(juego.wifi_players) except: juego.wifi_players = 0 if nodo.children is not None: nodo = nodo.children while nodo.next is not None: if nodo.type == "element": if nodo.name == "feature": valores = nodo.content for valor in valores.split( ","): valor = valor.strip( ) sql = util.decode( "valor=='%s'" % (valor) ) online_feature = session.query( OnlineFeatures ).filter( util. sql_text( sql ) ).first() if online_feature == None: online_feature = OnlineFeatures( valor ) if self.callback_nuevo_online_feature: self.callback_nuevo_online_feature( online_feature ) juego.features.append( online_feature ) nodo = nodo.next nodo = nodo.parent elif nodo.name == "input": juego.input_players = self.leerAtributo( nodo, 'players') if nodo.children is not None: nodo = nodo.children while nodo.next is not None: if nodo.type == "element": if nodo.name == "control": nombres = self.leerAtributo( nodo, 'type' ).split(",") obligatorio = self.leerAtributo( nodo, 'required') ''' wiimotenunchuk, wiimote:nunchuk ''' if nombres[ 0] == "wiimotenunchuk" or nombres[ 0] == "wiimote:nunchuk": nombres = [ 'wiimote', 'nunchuck' ] for nombre in nombres: nombre = nombre.strip( ) ''' wiimote = wimmote nunchuk = nunchuck gamecube = gamegube, gamecube controller classiccontroller = calssiccontroller, classccontroller, classic controller, classic balanceboard = wii balance board, balance board motionplus = motion.plus, wii motionplus wheel = steering wheel zapper = wii zapper microphone = mic ''' if nombre == "wimmote": nombre = "wiimote" elif nombre == "nunchuck": nombre = "nunchuk" elif nombre == "gamegube" or nombre == "gamecube controller": nombre = "gamecube" elif nombre == "calssiccontroller" or nombre == "classccontroller" or nombre == "classic controller" or nombre == "classic": nombre = "classiccontroller" elif nombre == "wii balance board" or nombre == "balance board": nombre = "balanceboard" elif nombre == "motion.plus" or nombre == "wii motionplus": nombre = "motionplus" elif nombre == "steering wheel": nombre = "wheel" elif nombre == "wii zapper": nombre = "zapper" elif nombre == "mic": nombre = "microphone" sql = util.decode( "nombre=='%s'" % (nombre )) accesorio = session.query( Accesorio ).filter( util. sql_text( sql ) ).first() if accesorio == None: accesorio = Accesorio( nombre ) if self.callback_nuevo_accesorio: self.callback_nuevo_accesorio( accesorio, obligatorio == 'true' ) if obligatorio == 'true': juego.obligatorio.append( accesorio ) else: juego.opcional.append( accesorio ) # siguiente control nodo = nodo.next # volvemos a input nodo = nodo.parent elif nodo.name == "rom": version = self.leerAtributo( nodo, 'version') name = self.leerAtributo( nodo, 'name') size = self.leerAtributo( nodo, 'size') crc = self.leerAtributo( nodo, 'crc') md5 = self.leerAtributo( nodo, 'md5') sha1 = self.leerAtributo( nodo, 'sha1') rom = Rom( version, name, size, crc, md5, sha1) juego.roms.append(rom) # siguiente hijo de game nodo = nodo.next #volver a game nodo = nodo.parent if not saltado: if iniciado: # for compatibility with sqlalchemy try: session.add(juego) except: session.save(juego) if self.callback_nuevo_juego: self.callback_nuevo_juego(juego) else: self.error_importando( _("XML invalido")) else: self.error_importando(_("XML invalido")) cont += 1 # callback cada 1% try: llamarCallback = (cont % (self.games / 100) == 0) except ZeroDivisionError: llamarCallback = True if llamarCallback and self.callback_spinner: self.callback_spinner(cont, self.games) nodo = nodo.next elif nodo.name == "companies": if nodo.children is not None: nodo = nodo.children while nodo.next is not None: if nodo.type == "element": if nodo.name == "company": code = self.leerAtributo(nodo, 'code') name = self.leerAtributo(nodo, 'name') sql = util.decode("code=='%s'" % (code)) companie = session.query( Companie).filter( util.sql_text(sql)).first() if companie == None: companie = Companie(code, name) # for compatibility with sqlalchemy try: session.add(companie) except: session.save(companie) if self.callback_nuevo_companie: self.callback_nuevo_companie( companie) nodo = nodo.next nodo = nodo.parent else: self.error_importando(_("XML invalido")) nodo = nodo.next # libera el xml ctxt.xpathFreeContext() xmldoc.freeDoc() # hacemos efectivas las transacciones session.commit() self.limpiarTemporales() if self.callback_termina_importar: self.callback_termina_importar(self.fichXML, self.todos) else: self.error_importando(_("No existe el XML"))
def walk_doc(metadata, text): new_metadata = metadata.copy() if 'rcslog' in new_metadata: del new_metadata['rcslog'] del new_metadata['rcsauthor'] assert isinstance(text, bytes) #print(metadata['path']) m = regex.match(b'(.{,2048}\n)?THE (FULL |SHORT |)LOGICAL RULESET\n\n', text, regex.S) if m: # this is a ruleset! lr_start = m.end() n = regex.search(b'\nEND OF THE [^ ]* LOGICAL RULESET', text, pos=lr_start) if n: lr_end = n.end() else: lr_end = len(text) ruleset = m.group(0) ruleset_bits = regex.split( b'\n------------------------------+|====================+\n', text[lr_start:lr_end]) have_rulenums = [] mode = find_rules_mode_for_path(metadata['path']) for datas in map(partial(find_rules_in_flr_bit, mode), ruleset_bits): for data in datas: if data['number'] is not None: have_rulenums.append(data['number']) yield {'meta': new_metadata, 'data': data} # explicit repeal annotations in RCS? if 'rcslog' in metadata and metadata['rcsauthor'] == 'comex': # split by semicolon, but not semicolons in parens logs = regex.findall(br';\s*((?:\([^\)]*\)|[^;\(]+)*)', metadata['rcslog']) for log in logs: log = log.strip() if log in { b'formatting', b'update xrefs', b'lots of formatting fixes' }: continue # old stuff I put in n = regex.match(b'Rule ([0-9]+) (?:\([^\)]*\) )?repealed', log) if not n: raise Exception('unknown RCS annotation %r' % log) number = int(n.group(1)) yield { 'meta': new_metadata, 'data': { 'number': number, 'revnum': None, 'title': None, 'header': None, 'extra': None, 'text': None, 'annotations': None, 'history': [decode(log)], } } # repeals? yield { 'meta': new_metadata, 'data': { 'no_rules_except': have_rulenums } } # handle any remaining data rest = text[lr_end:].lstrip() if rest: yield from walk_doc(metadata, rest) return elif b'THE RULES OF INTERNOMIC' in text: # this is ... a fake ruleset! return else: # not a ruleset if 'rcslog' in metadata and 'current_flr.txt,v' in metadata['path']: with warnx(): print(repr(text)) print("this should be a flr but doesn't match") for data in find_rules(find_rules_mode_for_path(metadata['path']), text): yield {'meta': new_metadata, 'data': data}
def worker(self): thread_name = threading.currentThread().getName() while True: check_id, check_name, check_type, check_data, status, confirmations, lock_uid = self.q.get( ) safe_print("[%s] processing check %d: calling checks.%s", (thread_name, check_id, check_type)) check_result = checks.run_check(check_type, util.decode(check_data)) safe_print("[%s] check %d result: %s", (thread_name, check_id, str(check_result))) if not type(check_result) is dict or 'status' not in check_result: util.die( "[%s] bad check handler [%s]: returned non-dict or missing status" % (thread_name, check_type)) elif 'message' not in check_result: if check_result['status'] == 'fail': check_result['message'] = "Check offline: %s" % ( check_name) else: check_result['message'] = "Check online: %s" % (check_name) if check_result['status'] == 'fail': safe_print("[%s] ... got failure!", (thread_name)) if status == 'online': with recent_failures_lock: recent_failures.add((check_id, util.time())) if confirmations + 1 >= config['confirmations']: # target has failed self.handle_change(thread_name, check_id, check_name, lock_uid, 'offline', check_result) else: # increase confirmations database.query( "UPDATE checks SET confirmations = confirmations + 1, `lock` = '', last_checked = NOW() WHERE id = %s AND `lock` = %s", (check_id, lock_uid)) else: database.query( "UPDATE checks SET confirmations = 0, `lock` = '', last_checked = NOW() WHERE id = %s AND `lock` = %s", (check_id, lock_uid)) elif check_result['status'] == 'success': safe_print("[%s] ... got success", (thread_name)) if status == 'offline': if confirmations + 1 >= config['confirmations']: # target has come back online self.handle_change(thread_name, check_id, check_name, lock_uid, 'online', check_result) else: # increase confirmations database.query( "UPDATE checks SET confirmations = confirmations + 1, `lock` = '', last_checked = NOW() WHERE id = %s AND `lock` = %s", (check_id, lock_uid)) else: database.query( "UPDATE checks SET confirmations = 0, `lock` = '', last_checked = NOW() WHERE id = %s AND `lock` = %s", (check_id, lock_uid)) else: util.die( "Check handler [%s] returned invalid status code [%s]!" ) % (check_type, check_result['status'])
def getParticion(self, DEVICE): sql = util.decode("device=='%s'" % (DEVICE)) particion = session.query(Particion).filter(util.sql_text(sql)).first() return particion
def __setattr__(self, name, value): if isinstance(value, str): value = util.decode(value) object.__setattr__(self, name, value)
def getJuegoWIITDB(self): sql = util.decode("juego_wiitdb.idgame=='%s'" % (self.idgame)) return session.query(JuegoWIITDB).filter(util.sql_text(sql)).first()
async def setup(message, args, **kwargs): if not message.author.guild_permissions.manage_guild: await message.channel.send( "You require the `MANAGE_GUILD` permission to use this command!" ) return configs = await get_guild_configs(message.guild.id) if not args: if not ( message.channel.permissions_for(message.guild.me).embed_links and message.channel.permissions_for(message.guild.me).attach_files ): await message.channel.send( "I require the `EMBED_LINKS` and `ATTACH_FILES` permissions to use this command!" ) return embed = discord.Embed(color=util.get_color(message.guild.me)) config_export = "None generated." files = [] if configs.guild_id: config_export = configs.export() if len(config_export) > 1024: b = BytesIO() b.write(config_export.encode("utf-8")) b.seek(0) config_export = ( "This string was too long to send. Please check the uploaded file." ) files += [discord.File(b, "config_export.txt")] embed.add_field(name="Config Export", value=config_export) guild_file = None guild_export = { "roles": [ [i.name, str(i.id), str(i.color)] for i in sorted( message.guild.roles, key=lambda x: x.position, reverse=True ) if i.id != message.guild.id ], "channels": [ [i.name, str(i.id)] for i in message.guild.text_channels if i.permissions_for(message.guild.me).send_messages ], } guild_export = encode(json.dumps(guild_export)) full_guild_export = guild_export if len(guild_export) > 2048: b = BytesIO() b.write(guild_export.encode("utf-8")) b.seek(0) guild_export = ( "This string was too long to send. Please check the uploaded file." ) files += [discord.File(b, "guild_data_export.txt")] elif len(guild_export) > 1024: embed.title = "Guild Data Export (Full code)" embed.description = guild_export guild_export = "This string was too long to put in here. Please check the long bit of text above." embed.add_field(name="Guild Data Export", value=guild_export) ret = "Welcome to the ⌚ setup!\nPlease go to https://sink.discord.bot/⌚ to generate an import code!\nRun this command with the Import config to set up the bot on this guild." if len(full_guild_export) <= 2000 and message.author.is_on_mobile(): ret += "\n\nI am detecting that you are currently on a mobile device. React to this message with ☎ (`telephone`) to receive a DM with the data that can easily be copied." msg = await message.channel.send(ret, embed=embed, files=files) if len(full_guild_export) <= 2000: def check(reaction, user): return ( reaction.message.id == msg.id and reaction.emoji.strip("\uFE0F\uFE0E") == "☎" and user.id == message.author.id ) try: reaction, user = await bot.wait_for("reaction_add", check=check) except asyncio.TimeoutError: return if reaction: try: await message.author.send(full_guild_export) except: await message.channel.send( "DM failed. Please ensure your DMs are enabled and run the command again." ) return True else: if not ( message.channel.permissions_for(message.guild.me).embed_links and message.channel.permissions_for(message.guild.me).add_reactions ): await message.channel.send( "I require the `EMBED_LINKS` and `ADD_REACTIONS` permissions to use this command!" ) return channel = None try: args = json.loads(decode(args)) args["guild_id"] = message.guild.id args["post_channel"] = configs.post_channel args["special_roles"] = [int(r) for r in args["roles"]] args["prefix"] = args["prefix"].strip()[:32] if args["prefix"] else None args["options"] = int(args["options"]) offset = 0 if not args["offset"] else args["offset"] args["_offset"] = max(0, min(2147483647, int(offset)) - 1) if not configs.guild_id: args["post_channel"] = int(args["channel"]) channel = message.guild.get_channel(args["post_channel"]) if not channel: raise ValueError except: await message.channel.send("Invalid input!") return if configs: args["offset"] = configs.offset emotes = ["✅", "❎"] args = Configs.from_row(args) msg = await message.channel.send( "Here are your imported settings! Please react with ✅ to confirm them. (You can check then again later with the `settings` command)", embed=args.as_embed(message.guild), ) for e in emotes: await msg.add_reaction(e) def check(reaction, user): return ( reaction.message.id == msg.id and reaction.emoji in emotes and user.id == message.author.id ) try: reaction, user = await bot.wait_for("reaction_add", check=check) except asyncio.TimeoutError: return if reaction.emoji == "✅": await bot.db.execute( """ INSERT INTO guild_configs ( guild_id, post_channel, prefix, options, latest_event_count, special_roles, recent_events, _offset ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8) ON CONFLICT (guild_id) DO UPDATE SET prefix = EXCLUDED.prefix, options = EXCLUDED.options, special_roles = EXCLUDED.special_roles ;""", *args.db_insert(), ) bot._guild_prefix_cache[message.guild.id] = args.prefix await message.channel.send("Your settings have been updated.") else: await message.channel.send("Process aborted.") return True
def __init__(self, tipo): self.tipo = util.decode(tipo).strip()
def __init__(self, valor): self.valor = util.decode(valor).strip()
def __init__(self, lang='', title='', synopsis=''): self.lang = lang self.title = util.decode(title) self.synopsis = util.decode(synopsis)
def __init__(self, idgame, title, size): self.idgame = util.decode(idgame) self.title = util.decode(title) self.size = float(size)
def text(self): """The text of the file (as unicode string).""" with open(self.filename) as f: return util.decode(f.read())
def __init__(self, nombre): self.nombre = util.decode(nombre).strip()
def __init__(self, nombre, descripcion=''): self.nombre = util.decode(nombre).strip() self.descripcion = util.decode(descripcion).strip()
def sincronizarParticiones(self, detector=config.DETECTOR_WBFS): salida = util.getSTDOUT_NOERROR_iterador(detector) listaParticiones = [] for linea in salida: if linea.find("/dev/") != -1: cachos = linea.strip().split(config.SEPARADOR) device = util.decode(cachos[0]) particion = self.getParticion(device) if particion is not None: # borrar TODOS los juegos de esta particion query = session.query(Juego) query = query.filter( util.sql_text("idParticion = %d" % particion.idParticion)) for juego in query: session.delete(juego) # ya borro la particion session.delete(particion) # guardar cambios session.commit() try: particion = Particion(cachos) # for compatibility with sqlalchemy try: session.add(particion) except: session.save(particion) listaParticiones.append(particion) session.commit() except SintaxisInvalida: continue if len(listaParticiones) > 0: # borrar TODOS los juegos que no sean de las particiones encontradas query = session.query(Juego) for particion in listaParticiones: query = query.filter( util.sql_text("idParticion <> %d" % particion.idParticion)) for juego in query: session.delete(juego) # borrar TODAS las particiones que no sean de las particiones encontradas query = session.query(Particion) for particion in listaParticiones: query = query.filter( util.sql_text("idParticion <> %d" % particion.idParticion)) for particion in query: session.delete(particion) # subimos cambios session.commit() return listaParticiones