def issue_2factor_code(self, username): cache_key = self.get_2factor_code_key(username) code = get_random_string(8).upper() # store code to check again later cache.set(cache_key, { 'code': code, 'timestamp': time.time() }) return code
def get_tmp_field_id(self): action = self.request.get("%s.action" % self.name, None) try: action = json.loads(action) if action.get('replace'): return action['tmp_field_id'] except (ValueError, TypeError): pass return 'tmp_' + utils.get_random_string()
def _create_slot(self): annotations = IAnnotations(self.portal) tiles = PersistentList() for _ in range(2): tile_id = utils.get_random_string(30) annotations[TILE_ANNOTATIONS_KEY_PREFIX + '.' + tile_id] = PersistentDict({ 'foo': 'bar' }) tiles.append({ 'id': tile_id }) annotations[TILE_ANNOTATIONS_KEY_PREFIX + '.' + self.slot_id] = PersistentDict({ 'tiles': tiles })
def issue_country_exception(self, user, country): # capture information about the request data = { 'referrer': self.request.get_header('REFERER'), 'user_agent': self.request.get_header('USER_AGENT'), 'ip': get_ip(self.request), 'username': user.getUserName(), 'userid': user.getId(), 'country': country, 'timestamp': time.time(), 'code': get_random_string(50), 'granted': False } cache_key = self.get_country_exception_cache_key(user.getId()) cache.set(cache_key, data, 12 * 60 * 60) # valid for 12 hours return data
def chunk_upload(self): chunk = int(self.request.form['chunk']) chunk_size = int(self.request.form['chunkSize']) total_size = int(self.request.form['totalSize']) total_chunks = int(math.ceil(float(total_size) / float(chunk_size))) _id = self.request.form.get('id') existing_id = self.request.form.get('content', None) field_name = self.request.form.get('field', None) if chunk > total_chunks: raise Exception("More chunks than what should be possible") cache_key_prefix = '%s-uploads-' % '/'.join(self.context.getPhysicalPath()[1:]) if chunk == 1: # initializing chunk upload _id = utils.get_random_string(50) filename = self.request.form['name'] tmp_dir = tempfile.mkdtemp() tmp_filename = os.path.join(tmp_dir, filename) info = { 'last_chunk': 1, 'total_size': total_size, 'chunk_size': chunk_size, 'tmp_file': tmp_filename, 'name': filename } else: info = cache.get(cache_key_prefix + _id) # check things are matching up if info['last_chunk'] != chunk - 1: raise Exception('Invalid chunk sequence') if info['total_size'] != total_size: raise Exception('Invalid total size') if info['chunk_size'] != chunk_size: raise Exception('Inconsistent chunk size') info['last_chunk'] = chunk mode = 'wb' if chunk > 1: # appending to file now mode = 'ab+' if not os.path.exists(info['tmp_file']): raise Exception('No tmp upload file found') fi = open(info['tmp_file'], mode) while True: data = self.request.form['file'].read(2 << 16) if not data: break fi.write(data) fi.close() if chunk == total_chunks: # finish upload dup = False if not existing_id: try: obj = self.create_file_content(info) except duplicates.DuplicateException as ex: obj = ex.obj dup = True else: try: info['existing_id'] = existing_id info['field_name'] = field_name obj, success, msg = self.update_file_content(info) if not success: self.update_file_content(info) self._clean_tmp(info) return json.dumps({ 'success': False, 'id': _id, 'reason': msg }) except Exception: logger.warning( 'Failed to update content.', exc_info=True) self._clean_tmp(info) return json.dumps({ 'success': False, 'id': _id }) if not info.get('field_name', '').startswith('tmp_'): # tmp files need to stick around and be managed later... self._clean_tmp(info) cache.delete(cache_key_prefix + _id) return dump_object_data(obj, dup) else: cache.set(cache_key_prefix + _id, info) check_put = None while check_put is None: try: check_put = cache.get(cache_key_prefix + _id) except Exception: cache.set(cache_key_prefix + _id, info) return json.dumps({ 'success': True, 'id': _id })
def chunk_upload(self): chunk = int(self.request.form['chunk']) chunk_size = int(self.request.form['chunkSize']) total_size = int(self.request.form['totalSize']) total_chunks = int(math.ceil(float(total_size) / float(chunk_size))) _id = self.request.form.get('id') if chunk > total_chunks: raise Exception("More chunks than what should be possible") cache_key_prefix = '%s-uploads-' % '/'.join( self.context.getPhysicalPath()[1:]) if chunk == 1: # initializing chunk upload _id = utils.get_random_string(50) filename = self.request.form['name'] tmp_dir = tempfile.mkdtemp() tmp_filename = os.path.join(tmp_dir, filename) info = { 'last_chunk': 1, 'total_size': total_size, 'chunk_size': chunk_size, 'tmp_file': tmp_filename, 'name': filename } else: info = cache.get(cache_key_prefix + _id) # check things are matching up if info['last_chunk'] != chunk - 1: raise Exception('Invalid chunk sequence') if info['total_size'] != total_size: raise Exception('Invalid total size') if info['chunk_size'] != chunk_size: raise Exception('Inconsistent chunk size') info['last_chunk'] = chunk mode = 'wb' if chunk > 1: # appending to file now mode = 'ab+' if not os.path.exists(info['tmp_file']): raise Exception('No tmp upload file found') fi = open(info['tmp_file'], mode) while True: data = self.request.form['file'].read(2 << 16) if not data: break fi.write(data) fi.close() if chunk == total_chunks: # finish upload dup = False try: obj = self.create_file_content(info) except duplicates.DuplicateException as ex: obj = ex.obj dup = True tmp_dir = '/'.join(info['tmp_file'].split('/')[:-1]) shutil.rmtree(tmp_dir) cache.delete(cache_key_prefix + _id) return dump_object_data(obj, dup) else: cache.set(cache_key_prefix + _id, info) check_put = None while check_put is None: try: check_put = cache.get(cache_key_prefix + _id) except: cache.set(cache_key_prefix + _id, info) return json.dumps({'success': True, 'id': _id})
def test_random_functions(self): self.assertEqual(len(utils.get_random_string(15)), 15) self.assertEqual(len(utils.make_random_key(15)), 15)