def _fetch_http(self, url, params): """ Standard HTTP request handler for this class with gzip and cookie support. This was separated out of :py:func:`MediaWiki.call` to make :py:func:`MediaWiki.normalize_api_url` useful. .. note:: This function should not be used. Use :py:func:`MediaWiki.call` instead. :param url: URL to send POST request to :param params: dictionary of query string parameters """ params['format'] = 'json' # urllib.urlencode expects str objects, not unicode fixed = dict([(to_bytes(b[0]), to_bytes(b[1])) for b in params.items()]) request = urllib2.Request(url, urllib.urlencode(fixed)) request.add_header('Accept-encoding', 'gzip') response = self._opener.open(request) if isinstance(self._cj, cookielib.MozillaCookieJar): self._cj.save() if response.headers.get('Content-Encoding') == 'gzip': compressed = StringIO(response.read()) gzipper = gzip.GzipFile(fileobj=compressed) data = gzipper.read() else: data = response.read() return data
def urbandictionary_cmd(self, protocol, caller, source, command, raw_args, parsed_args): args = raw_args.split() # Quick fix for new command handler signature ### Get LastFM username to use username = None if len(args) == 0: caller.respond("Usage: {CHARS}urbandictionary <term>") return term = " ".join(args) try: definition, permalink = self.get_definition(term) if definition is None: source.respond('[UD] "%s" is not defined yet' % term) else: # TODO: Limit definition length source.respond('[UD] "%s" - %s - (%s)' % (term, to_bytes(definition) .replace('\r', '') .replace('\n', ' '), to_bytes(permalink))) except: self.logger.exception("Cannot get definition for '%s'" % term) source.respond("There was an error while fetching the definition -" " please try again later, or alert a bot admin.")
def enterClassID(self, ctx): print("--------IDStart") ################## if self.inRelation == True: self.output.write(to_bytes(self.label2class[ctx.getText()])) if self.secondRelationPart == True: if self.inReal == True: self.output.write( "[arrowhead=\"none\", style=\"dashed\"]\n") if self.inGen == True: self.output.write("[arrowhead=\"none\"]\n") if self.inAgg == True: self.output.write( "[arrowhead=\"none\", arrowtail=\"odiamond\"]\n") if self.inComp == True: self.output.write( "[arrowhead=\"vee\", arrowtail=\"diamond\"]\n") self.secondRelationPart == False return if ctx.getText() in self.label2class.keys(): return toPlace = ctx.getText() if toPlace.find(';') > -1: indx = toPlace.find(';') toPlace = " " + unichr(171) + "interface" + unichr( 187) + "\\" + "n" + toPlace[indx + 1:] self.output.write( to_bytes(str(self.labeler) + "[label = \"{" + toPlace + "|")) self.label2class[ctx.getText( )] = self.labeler # maps the number to the classID for later recall self.labeler += 1
def test_exception_to_bytes_custom(self): # If given custom functions, then we should not mangle c = [ lambda e: converters.to_bytes(e.args[0], encoding='euc_jp'), lambda e: converters.to_bytes(e, encoding='euc_jp') ] tools.eq_( converters.exception_to_bytes(self.exceptions['euc_jpn'], converters=c), self.euc_jp_japanese) c.extend(converters.EXCEPTION_CONVERTERS) tools.eq_( converters.exception_to_bytes(self.exceptions['euc_jpn'], converters=c), self.euc_jp_japanese) c = [ lambda e: converters.to_bytes(e.args[0], encoding='latin1'), lambda e: converters.to_bytes(e, encoding='latin1') ] tools.eq_( converters.exception_to_bytes(self.exceptions['latin1_spanish'], converters=c), self.latin1_spanish) c.extend(converters.EXCEPTION_CONVERTERS) tools.eq_( converters.exception_to_bytes(self.exceptions['latin1_spanish'], converters=c), self.latin1_spanish)
def validate_password(self, user, user_name, password): ''' Check the supplied user_name and password against existing credentials. Note: user_name is not used here, but is required by external password validation schemes that might override this method. If you use SqlAlchemyIdentityProvider, but want to check the passwords against an external source (i.e. PAM, LDAP, Windows domain, etc), subclass SqlAlchemyIdentityProvider, and override this method. :arg user: User information. :arg user_name: Given username. Not used. :arg password: Given, plaintext password. :returns: True if the password matches the username. Otherwise False. Can return False for problems within the Account System as well. ''' # pylint: disable-msg=R0201,W0613 # :R0201: TG identity providers must instantiate this method. # crypt.crypt(stuff, '') == '' # Just kill any possibility of blanks. if not user.password: return False if not password: return False # pylint: disable-msg=W0613 # :W0613: TG identity providers have this method return to_bytes(user.password) == crypt.crypt( to_bytes(password), to_bytes(user.password) )
def validate_identity(self, user_name, password, visit_key): ''' Look up the identity represented by user_name and determine whether the password is correct. Must return either None if the credentials weren't valid or an object with the following properties: :user_name: original user name :user: a provider dependant object (TG_User or similar) :groups: a set of group IDs :permissions: a set of permission IDs ''' # pylint: disable-msg=R0201 # TG identity providers have this method so we can't get rid of it. try: user = JsonFasIdentity(visit_key, username=user_name, password=password) except FedoraServiceError as e: log.warning('Error logging in %(user)s: %(error)s' % { 'user': to_bytes(user_name), 'error': to_bytes(e) }) return None return user
def triple_value_2(value, datatype): """ This function takes as input the predicate's value and returns it in the write format, be it a integer, decimal, double, boolean, date, time or dateTime datatype or whether it is a URI """ # Check whether the value is null or empty if value is None: return "" else: value = value.strip() # Return an empty string if the value is an empty string if value == "": return "" if value == "\\": value += "\\" # Replace double quote with a single quote value = to_bytes(value) value = value.replace('"', "'") # TEXT \u005c if re.search("[“”’`\r\n'\"]+", value, re.IGNORECASE): return b"\"\"\"{0}\"\"\"".format(value).replace("\\", "\\\\") else: return to_bytes(b"\"{0}\"^^xsd:{1}".format(value, datatype)).replace( "\\", "\\\\")
def __save_ids(self, save): '''Save the cached ids file. :arg save: The dict of usernames to ids to save. ''' # Make sure the directory exists if not path.isdir(b_SESSION_DIR): try: os.mkdir(b_SESSION_DIR, 0o755) except OSError as e: self.log.warning('Unable to create %(dir)s: %(error)s' % {'dir': b_SESSION_DIR, 'error': to_bytes(e)}) try: with open(b_SESSION_FILE, 'wb') as session_file: os.chmod(b_SESSION_FILE, stat.S_IRUSR | stat.S_IWUSR) pickle.dump(save, session_file) except Exception as e: # pylint: disable-msg=W0703 # If we can't save the file, issue a warning but go on. The # session just keeps you from having to type your password over # and over. self.log.warning( 'Unable to write to session file %(session)s:' ' %(error)s' % { 'session': b_SESSION_FILE, 'error': to_bytes(e) } )
def validate_identity(self, user_name, password, visit_key): ''' Look up the identity represented by user_name and determine whether the password is correct. Must return either None if the credentials weren't valid or an object with the following properties: :user_name: original user name :user: a provider dependant object (TG_User or similar) :groups: a set of group IDs :permissions: a set of permission IDs :arg user_name: user_name we're authenticating. If None, we'll try to lookup a username from SSL variables :arg password: password to authenticate user_name with :arg visit_key: visit_key from the user's session ''' using_ssl = False if not user_name and config.get('identity.ssl'): if cherrypy.request.headers['X-Client-Verify'] == 'SUCCESS': user_name = cherrypy.request.headers['X-Client-CN'] cherrypy.request.fas_provided_username = user_name using_ssl = True # pylint: disable-msg=R0201 # TG identity providers have this method so we can't get rid of it. try: user = JsonFasIdentity(visit_key, username=user_name, password=password, using_ssl=using_ssl) except FedoraServiceError, e: self.log.warning(b_('Error logging in %(user)s: %(error)s') % { 'user': to_bytes(user_name), 'error': to_bytes(e)}) return None
def __add_entries(self, entries, config, all_lang=True): """ Add a sections and their content from a dict or dict as would be returned by the get_classes or get_properties functions. :arg entries, dict of dict containing the section and their content to be added to the configuration. :arg config, the configuration object as obtained by ConfigParser :arg all_lang, a boolean specifying if the configuration should contain all the language available or just english. """ for entry in entries: config.add_section(entry) for key in entries[entry]: value = entries[entry][key] if type(value) == list: values = [] for val in value: lang = None if type(val) == rdflib.Literal and val.language: lang = val.language if all_lang or lang is None or lang == 'en': try: tmp = val.n3() except AttributeError: tmp = val tmp = to_bytes(tmp) values.append(tmp) value = values if len(value) == 1: value = value[0] if value: config.set(entry, to_bytes(key), value) return config
def deliver(self, formatted_message, recipient, raw_fedmsg): """ Deliver a message to the recipient. Args: formatted_message (str): The formatted message that is ready for delivery to the user. It has been formatted according to the user's preferences. recipient (dict): The recipient of the message. raw_fedmsg (dict): The original fedmsg that was used to produce the formatted message. """ try: # TODO handle the mail server being down gracefully yield smtp.sendmail( self.host, to_bytes(self.from_address), [to_bytes(recipient['email address'])], formatted_message.encode('utf-8'), port=self.port, ) _log.info('Email successfully delivered to %s', recipient['email address']) except smtp.SMTPClientError as e: _log.info('Failed to email %s: %s', recipient['email address'], str(e)) if e.code == 550: self.handle_bad_email_address(recipient) else: raise
def validate_identity(self, user_name, password, visit_key): ''' Look up the identity represented by user_name and determine whether the password is correct. Must return either None if the credentials weren't valid or an object with the following properties: :user_name: original user name :user: a provider dependant object (TG_User or similar) :groups: a set of group IDs :permissions: a set of permission IDs ''' # pylint: disable-msg=R0201 # TG identity providers have this method so we can't get rid of it. try: user = JsonFasIdentity(visit_key, username=user_name, password=password) except FedoraServiceError as e: log.warning('Error logging in %(user)s: %(error)s' % { 'user': to_bytes(user_name), 'error': to_bytes(e)}) return None return user
def cmd(self, command, args, channel, **kwargs): if command == 'aww' or command == 'sad' or command == 'depressed' or command == 'morn': if self.aww_updated_at == None or (time() - self.aww_updated_at) > 600: url = 'https://reddit.com/r/aww/hot.json?limit=100' self.aww_list = json.loads(self.reddit_opener.open(url).read()) if 'error' in self.aww_list: print(self.aww_list['error']) return [(0, channel, kwargs['from_nick'], 'I\'m so sorry, Reddit gave me an error. :(')] else: self.aww_updated_at = time() item = self.aww_list['data']['children'][randint(1,len(self.aww_list['data']['children']) - 1)] message = item['data']['url'] message = self.redditfix_re.sub('&', message) nick = kwargs['from_nick'] if args: args = args.split(' ') if len(args) >= 1 and len(args[0]) > 2: nick = args[0] if len(args) == 2 and len(args[1]) > 2: if args[1][0] == '#': channel = args[1] else: channel = '#' + args[1] if command == 'morn': message = choice(self.mornlist) + message try: return[(0, channel, to_bytes(to_unicode(nick)), to_bytes(to_unicode(message)))] except: return[(0, channel, kwars['from_nick'], 'Couldn\'t convert to unicode. :(')]
def characters(self, content): if self.state == STATE_START: pass elif self.state == STATE_UNIT_SEARCH: pass elif self.state == STATE_PROCESSING_LOOP: pass elif self.state == STATE_READING_COMMENT: self.buffer.write(to_bytes(content)) elif self.state == STATE_EXPECTING_FUNCTION: pass elif self.state == STATE_READING_FUNCTION_SIGNATURE: if self.function_sig_state == FUNCSIG_STATE_READING_FUNCTION_NAME: self.function_name_buffer.write(to_bytes(content)) self.buffer.write(to_bytes(content)) elif self.state == STATE_READING_TYPE_NAME: if self.read_content: self.buffer.write(to_bytes(content)) else: raise Exception("Invalid state encountered: {0}".format( self.state))
def validate_password(self, user, user_name, password): ''' Check the supplied user_name and password against existing credentials. Note: user_name is not used here, but is required by external password validation schemes that might override this method. If you use SqlAlchemyIdentityProvider, but want to check the passwords against an external source (i.e. PAM, LDAP, Windows domain, etc), subclass SqlAlchemyIdentityProvider, and override this method. :arg user: User information. :arg user_name: Given username. Not used. :arg password: Given, plaintext password. :returns: True if the password matches the username. Otherwise False. Can return False for problems within the Account System as well. ''' # pylint: disable-msg=R0201,W0613 # :R0201: TG identity providers must instantiate this method. # crypt.crypt(stuff, '') == '' # Just kill any possibility of blanks. if not user.password: return False if not password: return False # pylint: disable-msg=W0613 # :W0613: TG identity providers have this method return to_bytes(user.password) == crypt.crypt(to_bytes(password), to_bytes(user.password))
def test_to_bytes_errors(self): tools.eq_(converters.to_bytes(self.u_mixed, encoding='latin1'), self.latin1_mixed_replace) tools.eq_(converters.to_bytes(self.u_mixed, encoding='latin', errors='ignore'), self.latin1_mixed_ignore) tools.assert_raises(UnicodeEncodeError, converters.to_bytes, *[self.u_mixed], **{'errors': 'strict', 'encoding': 'latin1'})
def __save_ids(self, save): '''Save the cached ids file. :arg save: The dict of usernames to ids to save. ''' # Make sure the directory exists if not path.isdir(b_SESSION_DIR): try: os.mkdir(b_SESSION_DIR, 0o755) except OSError as e: self.log.warning('Unable to create %(dir)s: %(error)s' % { 'dir': b_SESSION_DIR, 'error': to_bytes(e) }) try: with open(b_SESSION_FILE, 'wb') as session_file: os.chmod(b_SESSION_FILE, stat.S_IRUSR | stat.S_IWUSR) pickle.dump(save, session_file) except Exception as e: # pylint: disable-msg=W0703 # If we can't save the file, issue a warning but go on. The # session just keeps you from having to type your password over # and over. self.log.warning('Unable to write to session file %(session)s:' ' %(error)s' % { 'session': b_SESSION_FILE, 'error': to_bytes(e) })
def enterClassID(self, ctx): print("--------IDStart") ################## if self.inRelation == True: self.output.write(to_bytes(self.label2class[ctx.getText()])) if self.secondRelationPart == True: if self.inReal == True: self.output.write("[arrowhead=\"none\", style=\"dashed\"]\n") if self.inGen == True: self.output.write("[arrowhead=\"none\"]\n") if self.inAgg == True: self.output.write("[arrowhead=\"none\", arrowtail=\"odiamond\"]\n") if self.inComp == True: self.output.write("[arrowhead=\"vee\", arrowtail=\"diamond\"]\n") self.secondRelationPart == False return if ctx.getText() in self.label2class.keys(): return toPlace = ctx.getText() if toPlace.find(';') > -1: indx = toPlace.find(';') toPlace = " " + unichr(171) + "interface" + unichr(187) + "\\" + "n" + toPlace[indx + 1:] self.output.write(to_bytes(str(self.labeler) + "[label = \"{" + toPlace + "|")) self.label2class[ctx.getText()] = self.labeler # maps the number to the classID for later recall self.labeler += 1
def lngettext(self, msgid1, msgid2, n): if n == 1: tmsg = msgid1 else: tmsg = msgid2 if not isinstance(msgid1, basestring): return '' msgid1 = to_unicode(msgid1, encoding=self.input_charset) try: #pylint:disable-msg=E1101 tmsg = self._catalog[(msgid1, self.plural(n))] except KeyError: if self._fallback: try: tmsg = self._fallback.ngettext(msgid1, msgid2, n) except UnicodeError: # Ignore UnicodeErrors: We'll do our own encoding next pass # Make sure that we're returning a str if self._output_charset: return to_bytes(tmsg, encoding=self._output_charset, nonstring='empty') return to_bytes(tmsg, encoding=locale.getpreferredencoding(), nonstring='empty')
def byte_string_textual_width_fill(msg, fill, chop=None, left=True, prefix='', suffix='', encoding='utf-8', errors='replace'): '''Expand a byte :class:`str` to a specified :term:`textual width` or chop to same :arg msg: byte :class:`str` encoded in :term:`UTF-8` that we want formatted :arg fill: pad :attr:`msg` until the :term:`textual width` is this long :kwarg chop: before doing anything else, chop the string to this length. Default: Don't chop the string at all :kwarg left: If :data:`True` (default) left justify the string and put the padding on the right. If :data:`False`, pad on the left side. :kwarg prefix: Attach this byte :class:`str` before the field we're filling :kwarg suffix: Append this byte :class:`str` to the end of the field we're filling :rtype: byte :class:`str` :returns: :attr:`msg` formatted to fill the specified :term:`textual width`. If no :attr:`chop` is specified, the string could exceed the fill length when completed. If :attr:`prefix` or :attr:`suffix` are printable characters, the string could be longer than fill width. .. note:: :attr:`prefix` and :attr:`suffix` should be used for "invisible" characters like highlighting, color changing escape codes, etc. The fill characters are appended outside of any :attr:`prefix` or :attr:`suffix` elements. This allows you to only highlight :attr:`msg` inside of the field you're filling. .. seealso:: :func:`~kitchen.text.display.textual_width_fill` For example usage. This function has only two differences. 1. it takes byte :class:`str` for :attr:`prefix` and :attr:`suffix` so you can pass in arbitrary sequences of bytes, not just unicode characters. 2. it returns a byte :class:`str` instead of a :class:`unicode` string. ''' prefix = to_bytes(prefix, encoding=encoding, errors=errors) suffix = to_bytes(suffix, encoding=encoding, errors=errors) if chop is not None: msg = textual_width_chop(msg, chop, encoding=encoding, errors=errors) width = textual_width(msg) msg = to_bytes(msg) if width >= fill: if prefix or suffix: msg = ''.join([prefix, msg, suffix]) else: extra = ' ' * (fill - width) if left: msg = ''.join([prefix, msg, suffix, extra]) else: msg = ''.join([extra, prefix, msg, suffix]) return msg
def jenkins_ci_notification(repo, pagure_ci_token, username=None, namespace=None): """ Jenkins Build Notification -------------------------- At the end of a build on Jenkins, this URL is used (if the project is rightly configured) to flag a pull-request with the result of the build. :: POST /api/0/ci/jenkins/<repo>/<token>/build-finished """ project = pagure.lib._get_project(SESSION, repo, user=username, namespace=namespace, case=APP.config.get( 'CASE_SENSITIVE', False)) flask.g.repo_locked = True flask.g.repo = project if not project: raise pagure.exceptions.APIError(404, error_code=APIERROR.ENOPROJECT) if not constant_time.bytes_eq(to_bytes(pagure_ci_token), to_bytes(project.ci_hook.pagure_ci_token)): raise pagure.exceptions.APIError(401, error_code=APIERROR.EINVALIDTOK) data = flask.request.get_json() if not data: APP.logger.debug("Bad Request: No JSON retrieved") raise pagure.exceptions.APIError(400, error_code=APIERROR.EINVALIDREQ) build_id = data.get('build', {}).get('number') if not build_id: APP.logger.debug("Bad Request: No build ID retrieved") raise pagure.exceptions.APIError(400, error_code=APIERROR.EINVALIDREQ) try: lib_ci.process_jenkins_build( SESSION, project, build_id, requestfolder=APP.config['REQUESTS_FOLDER']) except pagure.exceptions.NoCorrespondingPR as err: raise pagure.exceptions.APIError(400, error_code=APIERROR.ENOCODE, error=str(err)) except pagure.exceptions.PagureException as err: APP.logger.error('Error processing jenkins notification', exc_info=err) raise pagure.exceptions.APIError(400, error_code=APIERROR.ENOCODE, error=str(err)) APP.logger.info('Successfully proccessed jenkins notification') return ('', 204)
def _clean_environ(self, environ): ''' Delete the ``keys`` from the supplied ``environ`` ''' log.debug('clean_environ(%s)' % to_bytes(self.clear_env)) for key in self.clear_env: if key in environ: log.debug('Deleting %(key)s from environ' % {'key': to_bytes(key)}) del (environ[key])
def enterVmText(self, ctx): #print("----vmTextStart") ################## #print(ctx.getText()) if self.inMethod == True : self.output.write( "|" + to_bytes(ctx.getText().replace(";", "\\" + "n")) ) return self.output.write( to_bytes(ctx.getText().replace(";", "\\" + "n")) )
def _clean_environ(self, environ): ''' Delete the ``keys`` from the supplied ``environ`` ''' log.debug('clean_environ(%s)' % to_bytes(self.clear_env)) for key in self.clear_env: if key in environ: log.debug('Deleting %(key)s from environ' % {'key': to_bytes(key)}) del(environ[key])
def send_request(self, method, auth=False, verb='POST', **kwargs): """Make an HTTP request to a server method. The given method is called with any parameters set in req_params. If auth is True, then the request is made with an authenticated session cookie. :arg method: Method to call on the server. It's a url fragment that comes after the :attr:`base_url` set in :meth:`__init__`. :kwarg auth: If True perform auth to the server, else do not. :kwarg req_params: Extra parameters to send to the server. :kwarg file_params: dict of files where the key is the name of the file field used in the remote method and the value is the local path of the file to be uploaded. If you want to pass multiple files to a single file field, pass the paths as a list of paths. :kwarg verb: HTTP verb to use. GET and POST are currently supported. POST is the default. """ # Decide on the set of auth cookies to use method = absolute_url(self.base_url, method) self._authed_verb_dispatcher = { (False, 'POST'): self._session.post, (False, 'GET'): self._session.get, (True, 'POST'): self._authed_post, (True, 'GET'): self._authed_get } if 'timeout' not in kwargs: kwargs['timeout'] = self.timeout try: func = self._authed_verb_dispatcher[(auth, verb)] except KeyError: raise Exception('Unknown HTTP verb') try: output = func(method, **kwargs) except LoginRequiredError: raise AuthError() try: data = output.json() except ValueError as e: # The response wasn't JSON data raise ServerError( method, output.status_code, 'Error returned from' ' json module while processing %(url)s: %(err)s\n%(output)s' % { 'url': to_bytes(method), 'err': to_bytes(e), 'output': to_bytes(output.text), }) data = munchify(data) return data
def add_package(self, pkg, owner=None, description=None, branches=None, cc_list=None, comaintainers=None, groups=None): '''Add a package to the database. :arg pkg: Name of the package to edit :kwarg owner: If set, make this person the owner of both branches :kwarg description: If set, make this the description of both branches :kwarg branches: List of branches to operate on :kwarg cc_list: If set, list or tuple of usernames to watch the package. :kwarg comaintainers: If set, list or tuple of usernames to comaintain the package. :kwarg groups: If set, list or tuple of group names that can commit to the package. :raises AppError: If the server returns an error .. versionadded:: 0.3.13 ''' # See if we have the information to # create it if not owner: raise AppError(name='AppError', message='We do not have ' 'enough information to create package %(pkg)s. ' 'Need version owner.' % {'pkg': to_bytes(pkg)}) data = {'owner': owner, 'summary': description} # This call creates the package and an initial branch for # Fedora devel response = self.send_request('/acls/dispatcher/add_package/%s' % pkg, auth=True, req_params=data) if 'status' in response and not response['status']: raise AppError(name='PackageDBError', message= 'PackageDB returned an error creating %(pkg)s:' ' %(msg)s' % {'pkg': to_bytes(pkg), 'msg': to_bytes(response['message'])}) if cc_list: data['ccList'] = json.dumps(cc_list) if comaintainers: data['comaintList'] = json.dumps(comaintainers) # Parse the groups information if groups: data['groups'] = json.dumps(groups) # Parse the Branch abbreviations into collections if branches: data['collections'] = [] data['collections'] = branches del data['owner'] if cc_list or comaintainers or groups or branches: response = self.send_request('/acls/dispatcher/' 'edit_package/%s' % pkg, auth=True, req_params=data) if 'status' in response and not response['status']: raise AppError(name='PackageDBError', message='Unable to save all information for' ' %(pkg)s: %(msg)s' % {'pkg': to_bytes(pkg), 'msg': to_bytes(response['message'])})
def edit_package( self, pkg, owner=None, description=None, branches=None, cc_list=None, comaintainers=None, groups=None ): """Edit a package. :arg pkg: Name of the package to edit :kwarg owner: If set, make this person the owner of both branches :kwarg description: If set, make this the description of both branches :kwarg branches: List of branches to operate on :kwarg cc_list: If set, list or tuple of usernames to watch the package. :kwarg comaintainers: If set, list or tuple of usernames to comaintain the package. :kwarg groups: If set, list or tuple of group names that can commit to the package. :raises AppError: If the server returns an error This method takes information about a package and either edits the package to reflect the changes to information. Note: This method will be going away in favor of methods that do smaller chunks of work: 1) A method to add a new branch 2) A method to edit an existing package 3) A method to edit an existing branch .. versionadded:: 0.3.13 """ # Change the branches, owners, or anything else that needs changing data = {} if owner: data["owner"] = owner if description: data["summary"] = description if cc_list: data["ccList"] = json.dumps(cc_list) if comaintainers: data["comaintList"] = json.dumps(comaintainers) # Parse the groups information if groups: data["groups"] = json.dumps(groups) # Parse the Branch abbreviations into collections if branches: data["collections"] = [] data["collections"] = branches # Request the changes response = self.send_request("/acls/dispatcher/edit_package/%s" % pkg, auth=True, req_params=data) if "status" in response and not response["status"]: raise AppError( name="PackageDBError", message=b_("Unable to save" " all information for %(pkg)s: %(msg)s") % {"pkg": to_bytes(pkg), "msg": to_bytes(response["message"])}, )
def flatten(x, name=''): if type(x) is dict: for a in x: flatten(x[a], name + a + '_') elif type(x) is list and len(x)>0 and type(x[0]) is dict: for i,a in enumerate(x): flatten(a, name + to_bytes(i) + '_') else: out[to_bytes(name[:-1])] = x
def enterVmText(self, ctx): print("----vmTextStart") ################## print(ctx.getText()) if self.inMethod == True: self.output.write("|" + to_bytes(ctx.getText().replace(";", "\\" + "n"))) return self.output.write(to_bytes(ctx.getText().replace(";", "\\" + "n")))
def test_to_bytes(self): '''Test to_bytes when the user gives good values''' tools.eq_(converters.to_bytes(self.utf8_japanese, encoding='latin1'), self.utf8_japanese) tools.eq_(converters.to_bytes(self.u_spanish), self.utf8_spanish) tools.eq_(converters.to_bytes(self.u_japanese), self.utf8_japanese) tools.eq_(converters.to_bytes(self.u_spanish, encoding='latin1'), self.latin1_spanish) tools.eq_(converters.to_bytes(self.u_japanese, encoding='euc_jp'), self.euc_jp_japanese)
def send_request(self, method, auth=False, verb='POST', **kwargs): """Make an HTTP request to a server method. The given method is called with any parameters set in req_params. If auth is True, then the request is made with an authenticated session cookie. :arg method: Method to call on the server. It's a url fragment that comes after the :attr:`base_url` set in :meth:`__init__`. :kwarg auth: If True perform auth to the server, else do not. :kwarg req_params: Extra parameters to send to the server. :kwarg file_params: dict of files where the key is the name of the file field used in the remote method and the value is the local path of the file to be uploaded. If you want to pass multiple files to a single file field, pass the paths as a list of paths. :kwarg verb: HTTP verb to use. GET and POST are currently supported. POST is the default. """ # Decide on the set of auth cookies to use method = absolute_url(self.base_url, method) self._authed_verb_dispatcher = {(False, 'POST'): self._session.post, (False, 'GET'): self._session.get, (True, 'POST'): self._authed_post, (True, 'GET'): self._authed_get} if 'timeout' not in kwargs: kwargs['timeout'] = self.timeout try: func = self._authed_verb_dispatcher[(auth, verb)] except KeyError: raise Exception('Unknown HTTP verb') try: output = func(method, **kwargs) except LoginRequiredError: raise AuthError() try: data = output.json() except ValueError as e: # The response wasn't JSON data raise ServerError( method, output.status_code, 'Error returned from' ' json module while processing %(url)s: %(err)s\n%(output)s' % { 'url': to_bytes(method), 'err': to_bytes(e), 'output': to_bytes(output.text), }) data = munchify(data) return data
def import_page(self, page, pages): title = to_unicode(page['post_title']) self.vprint("BEGIN Importing page '{0}'".format(to_bytes(title)), 1) mezz_page = self.get_or_create(RichTextPage, title=title) mezz_page.created = page['post_modified'] mezz_page.updated = page['post_modified'] mezz_page.content = to_unicode(page['post_content']) mezz_page.save() self.vprint("END Importing page'{0}'".format(to_bytes(title)), 1)
def _fetch_json(self, url, params): self._check_rate_limit() # urllib.urlencode expects str objects, not unicode fixed = dict([(to_bytes(b[0]), to_bytes(b[1])) for b in params.items()]) request = urllib2.Request(url + '?' + urllib.urlencode(fixed)) request.add_header('Accept', 'application/json') response = urllib2.urlopen(request) data = json.loads(response.read()) self.last_request_time = datetime.now() return data
def convert_latin_chars(self, strings): """ Check for latin similar characters mixed with Greek and convert them to Greek. """ debug = False strings = [strings] if not isinstance(strings, list) else strings try: newstrings = [] rgx = ur'(?P<a>[Α-Ωα-ω])?([a-z]|[A-Z]|\d|\?)(?(a).*|[Α-Ωα-ω])' latin = re.compile(rgx, re.U) for string in strings: string = to_unicode(string) mymatch = re.search(latin, string) if not mymatch: newstring = string else: subs = {u'a': u'α', # y u'A': u'Α', # n u'd': u'δ', # y u'e': u'ε', # y u'E': u'Ε', u'Z': u'Ζ', u'H': u'Η', u'i': u'ι', # y u'I': u'Ι', u'k': u'κ', u'K': u'Κ', # y u'v': u'ν', # y u'N': u'Ν', u'o': u'ο', # y u'O': u'Ο', # ΝΝΝ u'p': u'ρ', # y u'P': u'Ρ', # y u't': u'τ', # y u'T': u'Τ', # y u'Y': u'Υ', u'x': u'χ', u'X': u'Χ', # y u'w': u'ω', # y u'?': u';'} if debug: print 'Latin character found in Greek string: ' if debug: print mymatch.group(), 'in', to_bytes(string) newstring = multiple_replace(string, subs) if debug: print 'replaced with Greek characters:' if debug: print to_bytes(newstring) newstrings.append(newstring) if len(newstrings) == 1: newstrings = newstrings[0] return newstrings except Exception: print traceback.format_exc(12) return False
def test_to_bytes_errors(self): tools.eq_(converters.to_bytes(self.u_mixed, encoding='latin1'), self.latin1_mixed_replace) tools.eq_( converters.to_bytes(self.u_mixed, encoding='latin', errors='ignore'), self.latin1_mixed_ignore) tools.assert_raises(UnicodeEncodeError, converters.to_bytes, *[self.u_mixed], **{ 'errors': 'strict', 'encoding': 'latin1' })
def send_mail(to_addr, subject, text, from_addr=None): if from_addr is None: from_addr = config.get('accounts_email') message = turbomail.Message(from_addr, to_addr, subject) message.plain = text if config.get('mail.on', False): turbomail.enqueue(message) else: log.debug('Would have sent: %(subject)s' % {'subject': to_bytes(subject)}) log.debug('To: %(recipients)s' % {'recipients': to_bytes(to_addr)}) log.debug('From: %(sender)s' % {'sender': to_bytes(from_addr)})
def __call__(self, environ, start_response): ''' This method is called for each request. It looks for a user-supplied CSRF token in the GET/POST parameters, and compares it to the token attached to ``environ['repoze.who.identity']['_csrf_token']``. If it does not match, or if a token is not provided, it will remove the user from the ``environ``, based on the ``clear_env`` setting. ''' request = Request(environ) log.debug( b_('CSRFProtectionMiddleware(%(r_path)s)') % {'r_path': to_bytes(request.path)}) token = environ.get('repoze.who.identity', {}).get(self.csrf_token_id) csrf_token = environ.get(self.token_env) if token and csrf_token and token == csrf_token: log.debug(b_('User supplied CSRF token matches environ!')) else: if not environ.get(self.auth_state): log.debug(b_('Clearing identity')) self._clean_environ(environ) if 'repoze.who.identity' not in environ: environ['repoze.who.identity'] = Bunch() if 'repoze.who.logins' not in environ: # For compatibility with friendlyform environ['repoze.who.logins'] = 0 if csrf_token: log.warning( b_('Invalid CSRF token. User supplied' ' (%(u_token)s) does not match what\'s in our' ' environ (%(e_token)s)') % { 'u_token': to_bytes(csrf_token), 'e_token': to_bytes(token) }) response = request.get_response(self.application) if environ.get(self.auth_state): log.debug(b_('CSRF_AUTH_STATE; rewriting headers')) token = environ.get('repoze.who.identity', {})\ .get(self.csrf_token_id) loc = update_qs(response.location, {self.csrf_token_id: str(token)}) response.location = loc log.debug( b_('response.location = %(r_loc)s') % {'r_loc': to_bytes(response.location)}) environ[self.auth_state] = None return response(environ, start_response)
def test_to_bytes_nonstring_with_objects_that_have__unicode__and__str__(self): if sys.version_info < (3, 0): # This object's _str__ returns a utf8 encoded object tools.eq_(converters.to_bytes(StrNoUnicode(), nonstring='simplerepr'), self.utf8_spanish) # No __str__ method so this returns repr string = converters.to_bytes(UnicodeNoStr(), nonstring='simplerepr') self._check_repr_bytes(string, 'UnicodeNoStr') # This object's __str__ returns unicode which to_bytes converts to utf8 tools.eq_(converters.to_bytes(StrReturnsUnicode(), nonstring='simplerepr'), self.utf8_spanish) # Unless we explicitly ask for something different tools.eq_(converters.to_bytes(StrReturnsUnicode(), nonstring='simplerepr', encoding='latin1'), self.latin1_spanish) # This object has no __str__ so it returns repr string = converters.to_bytes(UnicodeReturnsStr(), nonstring='simplerepr') self._check_repr_bytes(string, 'UnicodeReturnsStr') # This object's __str__ returns unicode which to_bytes converts to utf8 tools.eq_(converters.to_bytes(UnicodeStrCrossed(), nonstring='simplerepr'), self.utf8_spanish) # This object's __repr__ returns unicode which to_bytes converts to utf8 tools.eq_(converters.to_bytes(ReprUnicode(), nonstring='simplerepr'), u'ReprUnicode(El veloz murciélago saltó sobre el perro perezoso.)'.encode('utf8')) tools.eq_(converters.to_bytes(ReprUnicode(), nonstring='repr'), u'ReprUnicode(El veloz murciélago saltó sobre el perro perezoso.)'.encode('utf8'))
def find_prefix(self, message, **kwargs): for prefix in self.prefixes: findstring = prefix[:] findstring = re.sub("%%nick%%", kwargs.get("connection", None).config.nick, findstring) findstring = re.sub("%%prefix%%", kwargs.get("command_prefix", ""), findstring) findstring = to_bytes(findstring) msg = to_bytes(message.message) if msg.startswith(findstring): return findstring return None
def send_mail(to_addr, subject, text, from_addr=None): if from_addr is None: from_addr = config.get('accounts_email') message = turbomail.Message(from_addr, to_addr, subject) message.plain = text if config.get('mail.on', False): turbomail.enqueue(message) else: log.debug('Would have sent: %(subject)s' % { 'subject': to_bytes(subject)}) log.debug('To: %(recipients)s' % { 'recipients': to_bytes(to_addr)}) log.debug('From: %(sender)s' % { 'sender': to_bytes(from_addr)})
def test_exception_to_bytes_custom(self): # If given custom functions, then we should not mangle c = [lambda e: converters.to_bytes(e, encoding='euc_jp')] tools.ok_(converters.exception_to_bytes(self.exceptions['euc_jpn'], converters=c) == self.euc_jp_japanese) c.extend(converters.EXCEPTION_CONVERTERS) tools.ok_(converters.exception_to_bytes(self.exceptions['euc_jpn'], converters=c) == self.euc_jp_japanese) c = [lambda e: converters.to_bytes(e, encoding='latin1')] tools.ok_(converters.exception_to_bytes(self.exceptions['latin1_spanish'], converters=c) == self.latin1_spanish) c.extend(converters.EXCEPTION_CONVERTERS) tools.ok_(converters.exception_to_bytes(self.exceptions['latin1_spanish'], converters=c) == self.latin1_spanish)
def __call__(self, environ, start_response): ''' This method is called for each request. It looks for a user-supplied CSRF token in the GET/POST parameters, and compares it to the token attached to ``environ['repoze.who.identity']['_csrf_token']``. If it does not match, or if a token is not provided, it will remove the user from the ``environ``, based on the ``clear_env`` setting. ''' request = Request(environ) log.debug('CSRFProtectionMiddleware(%(r_path)s)' % {'r_path': to_bytes(request.path)}) token = environ.get('repoze.who.identity', {}).get(self.csrf_token_id) csrf_token = environ.get(self.token_env) if token and csrf_token and token == csrf_token: log.debug('User supplied CSRF token matches environ!') else: if not environ.get(self.auth_state): log.debug('Clearing identity') self._clean_environ(environ) if 'repoze.who.identity' not in environ: environ['repoze.who.identity'] = Bunch() if 'repoze.who.logins' not in environ: # For compatibility with friendlyform environ['repoze.who.logins'] = 0 if csrf_token: log.warning('Invalid CSRF token. User supplied' ' (%(u_token)s) does not match what\'s in our' ' environ (%(e_token)s)' % {'u_token': to_bytes(csrf_token), 'e_token': to_bytes(token)}) response = request.get_response(self.application) if environ.get(self.auth_state): log.debug('CSRF_AUTH_STATE; rewriting headers') token = environ.get('repoze.who.identity', {})\ .get(self.csrf_token_id) loc = update_qs( response.location, {self.csrf_token_id: str(token)}) response.location = loc log.debug('response.location = %(r_loc)s' % {'r_loc': to_bytes(response.location)}) environ[self.auth_state] = None return response(environ, start_response)
def _fetch_http(self, url, params, force_get=False): """ Standard HTTP request handler for this class with gzip and cookie support. This was separated out of :py:func:`MediaWiki.call` to make :py:func:`MediaWiki.normalize_api_url` useful. .. note:: This function should not be used. Use :py:func:`MediaWiki.call` instead. :param url: URL to send POST request to :param params: dictionary of query string parameters :param force_get: force a GET request instead of POST """ params['format'] = 'json' if sys.version_info[0] == 3: fixed = urllib.urlencode(tuple(params.items())) # urllib.urlencode (in Python 2) expects str objects, not unicode elif sys.version_info[0] == 2: fixed = urllib.urlencode( tuple((to_bytes(k), to_bytes(v)) for k, v in params.items())).encode('utf-8') if force_get: request = urllib2.Request(url + '?' + fixed) else: if sys.version_info[0] == 3: fixed = bytearray(fixed, 'utf-8') request = urllib2.Request(url, fixed) if self._http_user is not None: auth_str = '%s:%s' % (self._http_user, self._http_password) if sys.version_info[0] == 3: auth_str = bytearray(auth_str, 'utf-8') base64string = base64.encodestring(auth_str).replace('\n', '') request.add_header("Authorization", "Basic %s" % base64string) request.add_header('Accept-encoding', 'gzip') response = self._opener.open(request) if isinstance(self._cj, cookielib.FileCookieJar): self._cj.save() if response.headers.get('Content-Encoding') == 'gzip': compressed = StringIO(response.read()) gzipper = gzip.GzipFile(fileobj=compressed) data = gzipper.read() else: data = response.read() if sys.version_info[0] == 3: encoding = response.info().get_content_charset() or "utf-8" data = data.decode(encoding) return data
def add_metadata(self, environ, identity): request = Request(environ) log.debug( b_('CSRFMetadataProvider.add_metadata(%(r_path)s)') % {'r_path': to_bytes(request.path)}) session_id = environ.get(self.auth_session_id) if not session_id: session_id = request.cookies.get(self.session_cookie) log.debug(b_('session_id = %(s_id)r') % {'s_id': to_bytes(session_id)}) if session_id and session_id != 'Set-Cookie:': environ[self.auth_session_id] = session_id token = sha1(session_id).hexdigest() identity.update({self.csrf_token_id: token}) log.debug(b_('Identity updated with CSRF token')) path = self.strip_script(environ, request.path) if path == self.login_handler: log.debug(b_('Setting CSRF_AUTH_STATE')) environ[self.auth_state] = True environ[self.token_env] = token else: environ[self.token_env] = self.extract_csrf_token(request) app = environ.get('repoze.who.application') if app: # This occurs during login in some application configurations if isinstance(app, HTTPFound) and environ.get(self.auth_state): log.debug( b_('Got HTTPFound(302) from' ' repoze.who.application')) # What possessed people to make this a string or # a function? location = app.location if hasattr(location, '__call__'): location = location() loc = update_qs(location, {self.csrf_token_id: str(token)}) headers = app.headers.items() replace_header(headers, 'location', loc) app.headers = ResponseHeaders(headers) log.debug( b_('Altered headers: %(headers)s') % {'headers': to_bytes(app.headers)}) else: log.warning( b_('Invalid session cookie %(s_id)r, not setting CSRF' ' token!') % {'s_id': to_bytes(session_id)})
def guess_type(filename, data): """ Guess the type of a file based on its filename and data. Return value is a tuple (type, encoding) where type or encoding is None if it can't be guessed. :param filename: file name string :param data: file data string """ mimetype = None encoding = None if filename: mimetype, encoding = mimetypes.guess_type(filename) if data: if not mimetype: if not isinstance(data, six.text_type) and b"\0" in data: mimetype = "application/octet-stream" else: mimetype = "text/plain" if mimetype.startswith("text/") and not encoding: try: encoding = pagure.lib.encoding_utils.guess_encoding( ktc.to_bytes(data) ) except pagure.exceptions.PagureException: # pragma: no cover # We cannot decode the file, so bail but warn the admins _log.exception("File could not be decoded") return mimetype, encoding
def print_specs(specs): print Ut.headings("SPECIFICATIONS DATA", line=False) # PRINT SPECS for key, data in specs.items(): if key == "target" or key == "source": new_line = "\n" else: new_line = "" if type(data) == str or type(data) == int or type(data) == unicode: value = to_unicode(data) #.encode(encoding='utf-8') elif type(data) == float or type(data) == int: value = to_unicode(data) else: value = type(data) print "{}\t{:22}{}".format(new_line, key, "{}".format(": {}".format(to_bytes(value)))) if type(data) == dict: for detail, val in data.items(): print "\t\t{:18}: {}".format(detail, val) print ""
def _search_cmd_result(self, protocol, caller, source, result): """ Receives the API response for search """ loud = self.commands.perm_handler.check("domainr.search.loud", caller, source, protocol) target = None if loud: target = source else: target = caller try: if "results" in result: msgs = [] for res in result["results"]: self.logger.trace(res) msg = u"%s%s - %s" % (res["domain"], res["path"], res["availability"]) msgs.append(msg) self._msg(protocol, target, msgs) elif "message" in result: self.logger.error("Message from Domainr API:\r\n{}", result["message"]) else: self.logger.error("Unexpected response from API:\r\n{}", to_bytes(result)) except: self.logger.exception("Please tell the developer about this error")
def write_clusters(verbs, top_feats): """writeclustering from kmeans.""" with open('out/verbs.csv', 'w+') as verbfile, \ open('out/feats.csv', 'w+') as featfile: verbout = csv.DictWriter(verbfile, ['Id', 'Code', 'Distance', 'Orig_Text', 'Top_N_Feats', 'Cluster_Size', 'Verb_ID']) featout = csv.DictWriter(featfile, ['Label', 'Feats', 'Size']) verbout.writeheader() featout.writeheader() clustering = defaultdict(list) for verb in verbs: clustering[verb.code].append(verb) for label in sorted(clustering): try: top_feats_com = '-'.join([str(ft) for ft in top_feats[label]]) except UnicodeEncodeError: pass cluster_size = len(clustering[label]) featout.writerow({'Label': label, 'Feats': top_feats_com, 'Size': cluster_size}) for i, verb in enumerate(clustering[label]): try: verbout.writerow({'Id': i, 'Code': verb.code, 'Distance': verb.distance, 'Orig_Text': to_bytes(verb.orig_text, errors='ignore'), 'Top_N_Feats': top_feats_com, 'Cluster_Size': cluster_size, 'Verb_ID': verb.sql_id}) except UnicodeDecodeError: pass
def call_web_hooks(project, topic, msg): ''' Sends the web-hook notification. ''' log.info("Processing project: %s - topic: %s", project.fullname, topic) log.debug('msg: %s', msg) # Send web-hooks notification global _i _i += 1 year = datetime.datetime.now().year if isinstance(topic, six.text_type): topic = to_bytes(topic, encoding='utf8', nonstring="passthru") msg = dict( topic=topic.decode('utf-8'), msg=msg, timestamp=int(time.time()), msg_id=str(year) + '-' + str(uuid.uuid4()), i=_i, ) content = json.dumps(msg) hashhex = hmac.new(str(project.hook_token), content, hashlib.sha1).hexdigest() headers = {'X-Pagure-Topic': topic, 'X-Pagure-Signature': hashhex} msg = json.dumps(msg) for url in project.settings.get('Web-hooks').split('\n'): url = url.strip() log.info('Calling url %s' % url) try: req = requests.post(url, headers=headers, data={'payload': msg}) if not req: log.info('An error occured while querying: %s - ' 'Error code: %s' % (url, req.status_code)) except (requests.exceptions.RequestException, Exception) as err: log.info('An error occured while querying: %s - Error: %s' % (url, err))
def normalize(self, strings): """ Primary normalization method that applies all of the other methods. This may be called with either a single string or a list of strings. It will return the same type as was supplied initially as the argument, except that any strings will be returned as unicode strings, regardless of their encoding or type when they were supplied. """ debug = False if debug: print 'starting normalize' strings = [strings] if not isinstance(strings, list) else strings strings = self.convert_latin_chars(strings) if debug: print 'about to normalize accents' if debug: print 'sending strings ============================\n' if debug: for s in strings: print type(s), to_bytes(s) strings = self.normalize_accents(strings) strings = self.strip_extra_spaces(strings) if len(strings) == 1: strings = strings[0] return strings
def coerce_bytestr(cls, thing): unicode_thing = cls.coerce_unicode(thing) byte_return = converters.to_bytes(unicode_thing, "utf8") assert \ isinstance(byte_return, str), \ "something went wrong, should return str not %s" % type(byte_return) return byte_return
def utf8_width_chop(msg, chop=None): '''**Deprecated** Return a string chopped to a given :term:`textual width` Use :func:`~kitchen.text.display.textual_width_chop` and :func:`~kitchen.text.display.textual_width` instead:: >>> msg = 'く ku ら ra と to み mi' >>> # Old way: >>> utf8_width_chop(msg, 5) (5, 'く ku') >>> # New way >>> from kitchen.text.converters import to_bytes >>> from kitchen.text.display import textual_width, textual_width_chop >>> (textual_width(msg), to_bytes(textual_width_chop(msg, 5))) (5, 'く ku') ''' warnings.warn('kitchen.text.utf8.utf8_width_chop is deprecated. Use' ' kitchen.text.display.textual_width_chop instead', DeprecationWarning, stacklevel=2) if chop == None: return textual_width(msg), msg as_bytes = not isunicodestring(msg) chopped_msg = textual_width_chop(msg, chop) if as_bytes: chopped_msg = to_bytes(chopped_msg) return textual_width(chopped_msg), chopped_msg
def _reencode_if_necessary(self, message, output_encoding): '''Return a byte string that's valid in a specific charset. .. warning:: This method may mangle the message if the inpput encoding is not known or the message isn't represntable in the chosen output encoding. ''' valid = False msg = None try: valid = byte_string_valid_encoding(message, output_encoding) except TypeError: # input was unicode, so it needs to be encoded pass if valid: return message try: # Decode to unicode so we can re-encode to desired encoding msg = to_unicode(message, encoding=self.input_charset, nonstring='strict') except TypeError: # Not a string; return an empty byte string return '' # Make sure that we're returning a str of the desired encoding return to_bytes(msg, encoding=output_encoding)