def refresh_access_token(self): params = { 'refresh_token': self.auth_data['refresh_token'], 'client_id': self.registry['google_oauth_id'], 'client_secret': self.registry['google_oauth_secret'], 'grant_type': 'refresh_token' } resp = requests.post('https://accounts.google.com/o/oauth2/token', data=params) self.auth_data.update(resp.json()) self.registry['google_auth_data'] = safe_text( json.dumps(self.auth_data))
def confirm_authorization(self): url = 'https://accounts.google.com/o/oauth2/token' data = { 'code': self.req.form['code'], 'client_id': self.registry['google_oauth_id'], 'client_secret': self.registry['google_oauth_secret'], 'redirect_uri': '%s/authorize-google' % self.site.absolute_url(), 'grant_type': 'authorization_code' } resp = requests.post(url, data=data) # TODO: Check if resp.content is text or bytes in py3 self.registry['google_auth_data'] = safe_text(resp.content) self.req.response.redirect(self.site.absolute_url())
def PUT(self, REQUEST=None, RESPONSE=None): """DAV method to replace the file field with a new resource.""" request = REQUEST if REQUEST is not None else self.REQUEST response = RESPONSE if RESPONSE is not None else request.response infile = request.get('BODYFILE', None) first_line = infile.readline() infile.seek(0) if not headerRE.match(safe_text(first_line)): self.dav__init(request, response) self.dav__simpleifhandler(request, response, refresh=1) filename = request['PATH_INFO'].split('/')[-1] self.file = NamedBlobFile(data=infile.read(), filename=six.text_type(filename)) modified(self) return response else: return super(File, self).PUT(REQUEST=request, RESPONSE=response)
def PUT(self, REQUEST=None, RESPONSE=None): """DAV method to replace the file field with a new resource.""" request = REQUEST if REQUEST is not None else self.REQUEST response = RESPONSE if RESPONSE is not None else request.response infile = request.get('BODYFILE', None) first_line = infile.readline() infile.seek(0) if not headerRE.match(safe_text(first_line)): self.dav__init(request, response) self.dav__simpleifhandler(request, response, refresh=1) filename = request['PATH_INFO'].split('/')[-1] self.file = NamedBlobFile( data=infile.read(), filename=six.text_type(filename)) modified(self) return response else: return super(File, self).PUT(REQUEST=request, RESPONSE=response)
def upload(self, file, portal, storage, status): """Add the redirections from the CSV file `file`. If anything goes wrong, do nothing.""" # No file picked. Theres gotta be a better way to handle this. if not file.filename: err = _(u"Please pick a file to upload.") status.addStatusMessage(err, type='error') self.form_errors['file'] = err return # Turn all kinds of newlines into LF ones. The csv module doesn't do # its own newline sniffing and requires either \n or \r. contents = safe_text(file.read()).splitlines() file = StringIO('\n'.join(contents)) # Use first two lines as a representative sample for guessing format, # in case one is a bunch of headers. dialect = csv.Sniffer().sniff(file.readline() + file.readline()) file.seek(0) # key is old path, value is tuple(new path, datetime, manual) successes = {} had_errors = False for i, fields in enumerate(csv.reader(file, dialect)): if len(fields) >= 2: redirection = fields[0] target = fields[1] now = None manual = True if len(fields) >= 3: dt = fields[2] if dt: try: now = DateTime(dt) except DateTimeError: logger.warning('Failed to parse as DateTime: %s', dt) now = None if len(fields) >= 4: manual = fields[3].lower() # Compare first character with false, no, 0. if manual and manual[0] in 'fn0': manual = False else: manual = True abs_redirection, err = absolutize_path(redirection, is_source=True) abs_target, target_err = absolutize_path(target, is_source=False) if err and target_err: if (i == 0 and not redirection.startswith('/') and not target.startswith('/')): # First line is a header. Ignore this. continue err = "%s %s" % (err, target_err) # sloppy w.r.t. i18n elif target_err: err = target_err else: if abs_redirection == abs_target: # TODO: detect indirect recursion err = _( u"Alternative urls that point to themselves will cause" u"an endless cycle of redirects.") else: err = _(u"Each line must have 2 or more columns.") if not err: if not had_errors: # else don't bother successes[abs_redirection] = (abs_target, now, manual) else: had_errors = True self.csv_errors.append( dict( line_number=i + 1, line=dialect.delimiter.join(fields), message=err, )) if not had_errors: storage.update(successes) status.addStatusMessage( _( u"${count} alternative urls added.", mapping={'count': len(successes)}, ), type='info', ) else: self.csv_errors.insert( 0, dict( line_number=0, line='', message=_( u'msg_delimiter', default=u"Delimiter detected: ${delimiter}", mapping={'delimiter': dialect.delimiter}, ), ), )
def upload(self, file, portal, storage, status): """Add the redirections from the CSV file `file`. If anything goes wrong, do nothing.""" # No file picked. Theres gotta be a better way to handle this. if not file.filename: err = _(u"Please pick a file to upload.") status.addStatusMessage(err, type='error') self.form_errors['file'] = err return # Turn all kinds of newlines into LF ones. The csv module doesn't do # its own newline sniffing and requires either \n or \r. contents = safe_text(file.read()).splitlines() file = StringIO('\n'.join(contents)) # Use first two lines as a representative sample for guessing format, # in case one is a bunch of headers. dialect = csv.Sniffer().sniff(file.readline() + file.readline()) file.seek(0) successes = [] # list of tuples: (abs_redirection, target) had_errors = False for i, fields in enumerate(csv.reader(file, dialect)): if len(fields) == 2: redirection, target = fields abs_redirection, err = absolutize_path(redirection, is_source=True) abs_target, target_err = absolutize_path(target, is_source=False) if err and target_err: err = "%s %s" % (err, target_err) # sloppy w.r.t. i18n elif target_err: err = target_err else: if abs_redirection == abs_target: # TODO: detect indirect recursion err = _( u"Alternative urls that point to themselves will cause" u"an endless cycle of redirects.") else: err = _(u"Each line must have 2 columns.") if not err: if not had_errors: # else don't bother successes.append((abs_redirection, abs_target)) else: had_errors = True self.csv_errors.append( dict( line_number=i + 1, line=dialect.delimiter.join(fields), message=err, )) if not had_errors: for abs_redirection, abs_target in successes: storage.add(abs_redirection, abs_target) status.addStatusMessage( _( u"${count} alternative urls added.", mapping={'count': len(successes)}, ), type='info', )