def post(self): '''make a new paste''' if not 'paste' in request.form: return json.dumps({'error':'paste required'}) if 'api_key' in request.form: user = database.ApiKey.objects(key=request.form['api_key']).first() if user: user = user.user else: return json.dumps({'error': 'invalid api_key'}) paste = database.Paste() paste.name = random_string() #deduplicate paste name while database.Paste.objects(name=paste.name).first(): paste.name = random_string() paste.paste = request.form['paste'] paste.language = request.form['language'] if 'language' in request.form else None #TODO: autodetect language here paste.user = user paste.digest = sha1(paste.paste.encode('utf-8')).hexdigest() paste.time = arrow.utcnow().datetime if 'expiration' in request.form: #expiration needs to be a time in seconds, >0 try: seconds = int(request.form['expiration']) if seconds < 0: return json.dumps({'error': 'cannot expire in the past'}) if seconds > 0: paste.expire = arrow.utcnow().replace(seconds=+seconds) except ValueError: return json.dumps({'error': 'invalid expiration format, should be number of seconds'}) paste.save() #domain is optional, no validation done. if you feel like using one of the alternatives (vomitb.in, not-pasteb.in), set the domain before sending the paste return json.dumps({'success': 1, 'url': 'https://{domain}/{name}'.format(domain=request.form['domain'] if 'domain' in request.form else 'zifb.in', name=paste.name)})
def save_file(post_file, extension=None, subdir=None, dir_to_save=None): """ Saves a file to a directory. * file must be base64 encoded stream. - Ensures no file clashes. - Returns the filename. """ if extension == None: extension = ".jpg" if dir_to_save == None: from radar import app dir_to_save = app.config["upload_dir"] if subdir == None: subdir = "" working_dir = os.path.join(dir_to_save, subdir) file_data = base64.b64decode(post_file) file_name = random_string() + extension absolute_write_path = os.path.join(working_dir, file_name) while os.path.exists(absolute_write_path): file_name = random_string() + extension absolute_write_path = os.path.join(working_dir, file_name) file = open(absolute_write_path,'wb+') file.write(file_data) file.close() return file_name
def get_a_random_table(): type_array = [ "TEXT", "INTEGER", "REAL" ] nr_columns = random.randint(1, 20) column_types = [ type_array[ random.randint(0, len(type_array) - 1) ] for _ in range(nr_columns) ] table = QasinoTable(util.random_string(4, 20)) table.set_column_names( [ util.random_string(1, 40) for _ in range(nr_columns) ] ) table.set_column_types( column_types ) rows = [] for row_index in range(random.randint(1, 300)): row = [] for column_index in range(nr_columns): if column_types[column_index] == "TEXT": row.append(util.random_string(1, 50)) elif column_types[column_index] == "REAL": row.append(random.randint(0, 3483839392) / 100.0) else: row.append(random.randint(0, 3483839392)) table.add_row(row) return table
def main(): if current_user.is_authenticated(): form = PasteForm(request.form) else: print 'test' form = PasteFormNoAuth(request.form) if form.validate_on_submit(): times = { '0':None, '1':{'minutes':+15}, '2':{'minutes':+30}, '3':{'hours':+1}, '4':{'hours':+6}, '5':{'hours':+12}, '6':{'days':+1}, '7':{'weeks':+1}, '8':{'months':+1} } paste = database.Paste() paste.paste = form.text.data paste.digest = sha1(paste.paste.encode('utf-8')).hexdigest() if (current_user.is_authenticated()): paste.user = current_user.to_dbref() #Create a name and make sure it doesn't exist paste.name = random_string() collision_check = database.Paste.objects(name__exact=paste.name).first() while collision_check is not None: paste.name = random_string() collision_check = database.Paste.objects(name__exact=paste.name).first() if form.language.data is not None: paste.language = form.language.data else: try: paste.language = guess_lexer(form.text.data).name except: paste.language = 'text' paste.time = datetime.utcnow() if times.get(form.expiration.data) is not None: paste.expire = arrow.utcnow().replace(**times.get(form.expiration.data)).datetime if times.get(form.expiration.data) is None and not current_user.is_authenticated(): paste.expire = arrow.utcnow.replace(**times.get(7)) paste.save() return redirect('/{id}'.format(id=paste.name)) return render_template('new_paste.html', form=form)
def temp_save_file(post_file, extension, encoded, working_dir): file_data = get_data_from_post_file(post_file, encoded) file_name = random_string() + extension absolute_write_path = os.path.join(working_dir, file_name) while os.path.exists(absolute_write_path): file_name = random_string() + extension absolute_write_path = os.path.join(working_dir, file_name) file = open(absolute_write_path, 'wb+') file.write(file_data) file.close() return absolute_write_path, file_name
def paste(): paste = None language = None user = None expiration = None domain = 'https://zifb.in/' try: data = json.loads(request.data) except ValueError: return json.dumps({'error': 'invalid json'}), 400 #Get Paste if not data.has_key('paste'): return json.dumps({'error': 'paste not found'}), 400 paste = data.get('paste') #Get Language if data.has_key('language'): language = data.get('language') #Get API_KEY/User if data.has_key('api_key'): user = database.ApiKey.objects(key=data.get('api_key')).first().user #Get Expiration if data.has_key('expiration'): s = data.get('expiration') try: s = int(s) except ValueError: return json.dumps({'error': 'invalid expiration format, should be number of seconds'}) if s is None or s == 0: expiration = None else: expiration = arrow.utcnow().replace(seconds=+s).datetime if not user and not expiration: expiration = arrow.utcnow().replace(hours=+1).datetime #Get Domain if data.has_key('domain'): domain = 'https://{0}/'.format(data.get('domain')) paste = database.Paste(name='testing', paste=paste, digest=sha1(paste.encode('utf-8')).hexdigest(), time=arrow.utcnow().datetime, expire=expiration, user=user, language=language) paste.name = random_string() while database.Paste.objects(name=paste.name).first(): paste.name = random_string() paste.save() return json.dumps({'paste': '{0}{1}'.format(domain, paste.name), 'expires': arrow.get(paste.expire).format('YYYY/MM/DD hh:mm')})
def handle_build(self): # Arch. if self.args.arch: arches = self.args.arch.split() (package,) = self.args.package self.server.create_scratch_build({}) return # Temporary folter for source package. tmpdir = "/tmp/pakfire-%s" % util.random_string() try: os.makedirs(tmpdir) pakfire.dist(package, resultdir=[tmpdir,]) for file in os.listdir(tmpdir): file = os.path.join(tmpdir, file) print file finally: if os.path.exists(tmpdir): util.rm(tmpdir)
def srp_fleet_add_view(request): logger.debug("srp_fleet_add_view called by user %s" % request.user) completed = False completed_srp_code = "" if request.method == 'POST': form = SrpFleetMainForm(request.POST) logger.debug("Request type POST contains form valid: %s" % form.is_valid()) if form.is_valid(): authinfo = AuthServicesInfoManager.get_auth_service_info(request.user) character = EveManager.get_character_by_id(authinfo.main_char_id) srp_fleet_main = SrpFleetMain() srp_fleet_main.fleet_name = form.cleaned_data['fleet_name'] srp_fleet_main.fleet_doctrine = form.cleaned_data['fleet_doctrine'] srp_fleet_main.fleet_time = form.cleaned_data['fleet_time'] srp_fleet_main.fleet_srp_code = random_string(8) srp_fleet_main.fleet_commander = character srp_fleet_main.save() completed = True completed_srp_code = srp_fleet_main.fleet_srp_code logger.info("Created SRP Fleet %s by user %s" % (srp_fleet_main.fleet_name, request.user)) else: logger.debug("Returning blank SrpFleetMainForm") form = SrpFleetMainForm() render_items = {'form': form, "completed": completed, "completed_srp_code": completed_srp_code} return render_to_response('registered/srpfleetadd.html', render_items, context_instance=RequestContext(request))
def srp_fleet_add_view(request): completed = False completed_srp_code = "" if request.method == 'POST': form = SrpFleetMainForm(request.POST) if form.is_valid(): authinfo = AuthServicesInfoManager.get_auth_service_info(request.user) character = EveManager.get_character_by_id(authinfo.main_char_id) srp_fleet_main = SrpFleetMain() srp_fleet_main.fleet_name = form.cleaned_data['fleet_name'] srp_fleet_main.fleet_doctrine = form.cleaned_data['fleet_doctrine'] srp_fleet_main.fleet_time = form.cleaned_data['fleet_time'] srp_fleet_main.fleet_srp_code = random_string(8) srp_fleet_main.fleet_commander = character srp_fleet_main.save() completed = True completed_srp_code = srp_fleet_main.fleet_srp_code else: form = SrpFleetMainForm() render_items = {'form': form, "completed": completed, "completed_srp_code": completed_srp_code} return render_to_response('registered/srpfleetadd.html', render_items, context_instance=RequestContext(request))
def save_to_s3(unique_identifier, post_file, tmp_folder, encoded=None): """ Saves file to Amazon S3 with a unique identifier. (Can be a user id) """ conn = S3.AWSAuthConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) generator = S3.QueryStringAuthGenerator(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) key = S3_KEY_NAME + "_" + str(unique_identifier) + "_" + random_string() + "_" + str(int(time.time())) #check if bucket exists, if not cr8 it if not conn.check_bucket_exists(S3_BUCKET_NAME).status == 200: conn.create_located_bucket(S3_BUCKET_NAME, S3_LOCATION) #resize file absolute_write_path, file_name = temp_save_file(post_file, ".jpg", encoded, tmp_folder) resize_img(absolute_write_path) #upload orig file orig_file = open(absolute_write_path, "r") obj = S3Object(orig_file.read()) conn.put(S3_BUCKET_NAME, key + ".jpg", obj) #upload resized file resized_filename = os.path.splitext(file_name)[0] + "_optimized.jpg" resized_file = open(os.path.join(tmp_folder, resized_filename),"r") obj = S3Object(resized_file.read()) conn.put(S3_BUCKET_NAME, key+"_optimized.jpg", obj) #remove temp files os.remove(absolute_write_path) os.remove(os.path.join(tmp_folder, resized_filename)) return key
def assign_short_string(self): assert self.short_string is None, "Tried to assign short_string twice" while not self.short_string: # 1 / (31^6) = 1e-9 is the probability two strings collide # in an empty space astr = util.random_string(6) if not Visit.get_by_short_string(astr): self.short_string = astr
def assign_short_string(self): assert self.short_string is None, "Tried to assign short_string twice" while not self.short_string: # 1 / (31^6) = 1e-9 is the probability two strings collide # in an empty space # TODO(dan): This should be in a transaction astr = util.random_string(6) if not Patient.get_by_short_string(astr): self.short_string = astr
def _send_file(self): files = QFileDialog.getOpenFileNames( self.widget, "Select one or more files to send", QDir.homePath(), "Any file (*.*)" ) ftinfo = cyemussa.CyFileTransferInfo() ftinfo.sender = self.app.me.yahoo_id ftinfo.receiver = self.cybuddy.yahoo_id ftinfo.host = ym.ft_host ftinfo.transfer_id = util.string_to_base64( util.string_to_md5(util.random_string(25)) ) transfer_task = FileTransferTask() transfer_task.transfer_info = ftinfo files_absolute = [] sizes = [] for path in files: f = cyemussa.CyFile() f.filename = QFile(path).fileName() f.filesize = QFile(path).size() transfer_task.files.append(f) files_absolute.append(QFileInfo(f.filename).fileName()) sizes.append(f.filesize) self.transfer_tasks[transfer_task.transfer_info.transfer_id] = transfer_task thumbnail = None if len(files) == 1 and QImageReader.imageFormat(files[0]): icon = util.scalePixmapAspectFill(QPixmap(files[0]), QSize(32, 32)) icon_base64 = util.pixmap_to_base64(icon, 'JPG') thumbnail = icon_base64 else: icon = QFileIconProvider().icon(QFileInfo(files[0])) icon_base64 = util.pixmap_to_base64(icon.pixmap(QSize(32, 32))) self._javascript( 'file_out', ftinfo.receiver, json.dumps(files_absolute), json.dumps(sizes), ftinfo.transfer_id, icon_base64 ) ym.send_files( self.cybuddy.yahoo_id, transfer_task.transfer_info.transfer_id, files, thumbnail )
def __init__(self, distro_name=None, *args, **kwargs): self.distro_name = distro_name kwargs.update({ "path" : os.path.join(BUILD_ROOT, util.random_string()), }) Pakfire.__init__(self, *args, **kwargs) # Let's see what is our host distribution. self.host_distro = distro.Distribution()
def srp_fleet_enable(request, fleet_id): logger.debug("srp_fleet_enable called by user %s for fleet id %s" % (request.user, fleet_id)) if SrpFleetMain.objects.filter(id=fleet_id).exists(): srpfleetmain = SrpFleetMain.objects.get(id=fleet_id) srpfleetmain.fleet_srp_code = random_string(8) srpfleetmain.save() logger.info("SRP Fleet %s enable by user %s" % (srpfleetmain.fleet_name, request.user)) else: logger.error( "Unable to enable SRP fleet id %s for user %s - fleet matching id not found." % (fleet_id, request.user) ) return HttpResponseRedirect("/srp")
def run_exec(self): log.debug(_("Executing scriptlet...")) # Check if the interpreter does exist and is executable. if self.interpreter: interpreter = "%s/%s" % (self.pakfire.path, self.interpreter) if not os.path.exists(interpreter): raise ActionError, _("Cannot run scriptlet because no interpreter is available: %s" \ % self.interpreter) if not os.access(interpreter, os.X_OK): raise ActionError, _("Cannot run scriptlet because the interpreter is not executable: %s" \ % self.interpreter) # Create a name for the temporary script file. script_file_chroot = os.path.join("/", LOCAL_TMP_PATH, "scriptlet_%s" % util.random_string(10)) script_file = os.path.join(self.pakfire.path, script_file_chroot[1:]) assert script_file.startswith(self.pakfire.path) # Create script directory, if it does not exist. script_dir = os.path.dirname(script_file) if not os.path.exists(script_dir): os.makedirs(script_dir) # Write the scriptlet to a file that we can execute it. try: f = open(script_file, "wb") f.write(self.scriptlet) f.close() # The file is only accessable by root. os.chmod(script_file, 700) except: # Remove the file if an error occurs. try: os.unlink(script_file) except OSError: pass # XXX catch errors and return a beautiful message to the user raise # Generate the script command. command = [script_file_chroot] + self.args try: self.execute(command) except ShellEnvironmentError, e: raise ActionError, _("The scriptlet returned an error:\n%s" % e)
def test_filter_expression_and_select_count(test_table): p = random_string() test_table.put_item(Item={'p': p, 'c': 'hi', 'x': 'dog', 'y': 'cat'}) test_table.put_item(Item={'p': p, 'c': 'yo', 'x': 'mouse', 'y': 'horse'}) (prefilter_count, postfilter_count, pages, got_items) = full_query_and_counts(test_table, KeyConditionExpression='p=:p', FilterExpression='x=:x', Select='COUNT', ExpressionAttributeValues={':p': p, ':x': 'mouse'}) # Exactly one item matches the filter on x. But because of Select=COUNT, # we shouldn't get an item back - just the count. assert postfilter_count == 1 assert prefilter_count == 2 assert got_items == []
def test_query_attributes_to_get(dynamodb, test_table): p = random_string() items = [{'p': p, 'c': str(i), 'a': str(i*10), 'b': str(i*100) } for i in range(10)] with test_table.batch_writer() as batch: for item in items: batch.put_item(item) for wanted in [ ['a'], # only non-key attributes ['c', 'a'], # a key attribute (sort key) and non-key ['p', 'c'], # entire key ['nonexistent'] # none of the items have this attribute! ]: got_items = full_query(test_table, KeyConditions={'p': {'AttributeValueList': [p], 'ComparisonOperator': 'EQ'}}, AttributesToGet=wanted) expected_items = [{k: x[k] for k in wanted if k in x} for x in items] assert multiset(expected_items) == multiset(got_items)
def test_number_magnitude_not_allowed(test_table_s): p = random_string() for num in [Decimal("1e126"), Decimal("11e125")]: with pytest.raises(ClientError, match='ValidationException.*overflow'): test_table_s.update_item(Key={'p': p}, UpdateExpression='SET a = :val', ExpressionAttributeValues={':val': num}) for num in [Decimal("1e-131"), Decimal("0.9e-130")]: print(num) with pytest.raises(ClientError, match='ValidationException.*underflow'): test_table_s.update_item(Key={'p': p}, UpdateExpression='SET a = :val', ExpressionAttributeValues={':val': num})
def test_projection_expression_path_nesting_levels(test_table_s): p = random_string() # 32 nesting levels (including the top-level attribute) work test_table_s.get_item(Key={'p': p}, ConsistentRead=True, ProjectionExpression='a' + ('.b' * 31)) # 33 nesting levels do not. DynamoDB gives an error: "Invalid # ProjectionExpression: The document path has too many nesting levels; # nesting levels: 33". with pytest.raises(ClientError, match='ValidationException.*nesting levels'): test_table_s.get_item(Key={'p': p}, ConsistentRead=True, ProjectionExpression='a' + ('.b' * 32))
def test_select(redis_host, redis_port): r = connect(redis_host, redis_port) key = random_string(10) val = random_string(4096) r.set(key, val) assert r.get(key) == val logger.debug('Switch to database 1') assert r.execute_command('SELECT 1') == 'OK' assert r.get(key) == None logger.debug('Switch back to default database 0') assert r.execute_command('SELECT 0') == 'OK' assert r.get(key) == val r.delete(key) assert r.get(key) == None logger.debug('Try to switch to invalid database 16') try: r.execute_command('SELECT 16') raise Exception('Expect that `SELECT 16` does not work') except redis.exceptions.ResponseError as ex: assert str(ex) == 'DB index is out of range'
def test_gsi_projection_keys_only(dynamodb): table = create_test_table(dynamodb, KeySchema=[ { 'AttributeName': 'p', 'KeyType': 'HASH' } ], AttributeDefinitions=[ { 'AttributeName': 'p', 'AttributeType': 'S' }, { 'AttributeName': 'x', 'AttributeType': 'S' }, ], GlobalSecondaryIndexes=[ { 'IndexName': 'hello', 'KeySchema': [ { 'AttributeName': 'x', 'KeyType': 'HASH' }, ], 'Projection': { 'ProjectionType': 'KEYS_ONLY' } } ]) items = [{'p': random_string(), 'x': random_string(), 'y': random_string()} for i in range(10)] with table.batch_writer() as batch: for item in items: batch.put_item(item) wanted = ['p', 'x'] expected_items = [{k: x[k] for k in wanted if k in x} for x in items] assert_index_scan(table, 'hello', expected_items) table.delete()
def test_batch_write_invalid_operation(test_table_s): # test key attribute with wrong type: p1 = random_string() p2 = random_string() items = [{'p': p1}, {'p': 3}, {'p': p2}] with pytest.raises(ClientError, match='ValidationException'): with test_table_s.batch_writer() as batch: for item in items: batch.put_item(item) for p in [p1, p2]: assert not 'item' in test_table_s.get_item(Key={'p': p}, ConsistentRead=True) # test missing key attribute: p1 = random_string() p2 = random_string() items = [{'p': p1}, {'x': 'whatever'}, {'p': p2}] with pytest.raises(ClientError, match='ValidationException'): with test_table_s.batch_writer() as batch: for item in items: batch.put_item(item) for p in [p1, p2]: assert not 'item' in test_table_s.get_item(Key={'p': p}, ConsistentRead=True)
def test_insert_null_key(cql, table1): s = random_string() with pytest.raises(InvalidRequest, match='null value'): cql.execute(f"INSERT INTO {table1} (p,c) VALUES ('{s}', null)") with pytest.raises(InvalidRequest, match='null value'): cql.execute(f"INSERT INTO {table1} (p,c) VALUES (null, '{s}')") # Try the same thing with prepared statement, where a "None" stands for # a null. Note that this is completely different from UNSET_VALUE - only # with the latter should the insertion be ignored. stmt = cql.prepare(f"INSERT INTO {table1} (p,c) VALUES (?, ?)") with pytest.raises(InvalidRequest, match='null value'): cql.execute(stmt, [s, None]) with pytest.raises(InvalidRequest, match='null value'): cql.execute(stmt, [None, s])
def test_delete_item_returnvalues(test_table_s): # By default, the previous value of an item is not returned: p = random_string() test_table_s.put_item(Item={'p': p, 'a': 'hi'}) ret = test_table_s.delete_item(Key={'p': p}) assert not 'Attributes' in ret # Using ReturnValues=NONE is the same: p = random_string() test_table_s.put_item(Item={'p': p, 'a': 'hi'}) ret = test_table_s.delete_item(Key={'p': p}, ReturnValues='NONE') assert not 'Attributes' in ret # With ReturnValues=ALL_OLD, the old value of the item is returned # in an "Attributes" attribute: p = random_string() test_table_s.put_item(Item={'p': p, 'a': 'hi'}) ret = test_table_s.delete_item(Key={'p': p}, ReturnValues='ALL_OLD') assert ret['Attributes'] == {'p': p, 'a': 'hi'} # If the item does not previously exist, "Attributes" is not returned # at all: p = random_string() ret = test_table_s.delete_item(Key={'p': p}, ReturnValues='ALL_OLD') assert not 'Attributes' in ret # Other ReturnValue options - UPDATED_OLD, ALL_NEW, UPDATED_NEW, # are supported by other operations but not by PutItem: with pytest.raises(ClientError, match='ValidationException'): test_table_s.delete_item(Key={'p': p}, ReturnValues='UPDATE_OLD') with pytest.raises(ClientError, match='ValidationException'): test_table_s.delete_item(Key={'p': p}, ReturnValues='ALL_NEW') with pytest.raises(ClientError, match='ValidationException'): test_table_s.delete_item(Key={'p': p}, ReturnValues='UPDATE_NEW') # Also, obviously, a non-supported setting "DOG" also returns in error: with pytest.raises(ClientError, match='ValidationException'): test_table_s.delete_item(Key={'p': p}, ReturnValues='DOG') # The ReturnValues value is case sensitive, so while "NONE" is supported # (and tested above), "none" isn't: with pytest.raises(ClientError, match='ValidationException'): test_table_s.delete_item(Key={'p': p}, ReturnValues='none')
def test_batch_put_item_replace(test_table_s, test_table): p = random_string() with test_table_s.batch_writer() as batch: batch.put_item(Item={'p': p, 'a': 'hi'}) assert test_table_s.get_item(Key={'p': p}, ConsistentRead=True)['Item'] == { 'p': p, 'a': 'hi' } with test_table_s.batch_writer() as batch: batch.put_item(Item={'p': p, 'b': 'hello'}) assert test_table_s.get_item(Key={'p': p}, ConsistentRead=True)['Item'] == { 'p': p, 'b': 'hello' } c = random_string() with test_table.batch_writer() as batch: batch.put_item(Item={'p': p, 'c': c, 'a': 'hi'}) assert test_table.get_item(Key={ 'p': p, 'c': c }, ConsistentRead=True)['Item'] == { 'p': p, 'c': c, 'a': 'hi' } with test_table.batch_writer() as batch: batch.put_item(Item={'p': p, 'c': c, 'b': 'hello'}) assert test_table.get_item(Key={ 'p': p, 'c': c }, ConsistentRead=True)['Item'] == { 'p': p, 'c': c, 'b': 'hello' }
def test_batch_get_item_duplicate(test_table, test_table_s): p = random_string() with pytest.raises(ClientError, match='ValidationException.*duplicates'): test_table_s.meta.client.batch_get_item( RequestItems={test_table_s.name: { 'Keys': [{ 'p': p }, { 'p': p }] }}) c = random_string() with pytest.raises(ClientError, match='ValidationException.*duplicates'): test_table.meta.client.batch_get_item(RequestItems={ test_table.name: { 'Keys': [{ 'p': p, 'c': c }, { 'p': p, 'c': c }] } }) # Not a duplicate: c2 = random_string() test_table.meta.client.batch_get_item(RequestItems={ test_table.name: { 'Keys': [{ 'p': p, 'c': c }, { 'p': p, 'c': c2 }] } })
def test_ttl_expiration_range(dynamodb): duration = 1200 if is_aws(dynamodb) else 10 with new_test_table(dynamodb, KeySchema=[{ 'AttributeName': 'p', 'KeyType': 'HASH' }, { 'AttributeName': 'c', 'KeyType': 'RANGE' }], AttributeDefinitions=[{ 'AttributeName': 'p', 'AttributeType': 'S' }, { 'AttributeName': 'c', 'AttributeType': 'N' }]) as table: ttl_spec = {'AttributeName': 'c', 'Enabled': True} table.meta.client.update_time_to_live(TableName=table.name, TimeToLiveSpecification=ttl_spec) # c1 is in the past, and should be expired. c2 is in the distant # future and should not be expired. p = random_string() c1 = int(time.time()) - 60 c2 = int(time.time()) + 3600 table.put_item(Item={'p': p, 'c': c1}) table.put_item(Item={'p': p, 'c': c2}) start_time = time.time() def check_expired(): return not 'Item' in table.get_item(Key={ 'p': p, 'c': c1 }) and 'Item' in table.get_item(Key={ 'p': p, 'c': c2 }) while time.time() < start_time + duration: print(f"--- {int(time.time()-start_time)} seconds") if 'Item' in table.get_item(Key={'p': p, 'c': c1}): print("c1 alive") if 'Item' in table.get_item(Key={'p': p, 'c': c2}): print("c2 alive") if check_expired(): break time.sleep(duration / 15) # After the delay, c2 should be alive, c1 should not assert check_expired()
def test_lsi_and_gsi(test_table_lsi_gsi): desc = test_table_lsi_gsi.meta.client.describe_table( TableName=test_table_lsi_gsi.name) assert 'Table' in desc assert 'LocalSecondaryIndexes' in desc['Table'] assert 'GlobalSecondaryIndexes' in desc['Table'] lsis = desc['Table']['LocalSecondaryIndexes'] gsis = desc['Table']['GlobalSecondaryIndexes'] assert (sorted([lsi['IndexName'] for lsi in lsis]) == ['hello_l1']) assert (sorted([gsi['IndexName'] for gsi in gsis]) == ['hello_g1']) items = [{ 'p': random_string(), 'c': random_string(), 'x1': random_string() } for i in range(17)] p1, c1, x1 = items[0]['p'], items[0]['c'], items[0]['x1'] with test_table_lsi_gsi.batch_writer() as batch: for item in items: batch.put_item(item) for index in ['hello_g1', 'hello_l1']: expected_items = [i for i in items if i['p'] == p1 and i['x1'] == x1] retrying_assert_index_query(test_table_lsi_gsi, index, expected_items, KeyConditions={ 'p': { 'AttributeValueList': [p1], 'ComparisonOperator': 'EQ' }, 'x1': { 'AttributeValueList': [x1], 'ComparisonOperator': 'EQ' } })
def test_streams_last_result(test_table_ss_keys_only, dynamodbstreams): table, arn = test_table_ss_keys_only iterators = latest_iterators(dynamodbstreams, arn) # Do an UpdateItem operation that is expected to leave one event in the # stream. table.update_item(Key={'p': random_string(), 'c': random_string()}, UpdateExpression='SET x = :val1', ExpressionAttributeValues={':val1': 5}) # Eventually (we may need to retry this for a while), *one* of the # stream shards will return one event: timeout = time.time() + 15 while time.time() < timeout: for iter in iterators: response = dynamodbstreams.get_records(ShardIterator=iter) if 'Records' in response and response['Records'] != []: # Found the shard with the data! Test that it only has # one event and that if we try to read again, we don't # get more data (this was issue #6942). assert len(response['Records']) == 1 assert 'NextShardIterator' in response response = dynamodbstreams.get_records(ShardIterator=response['NextShardIterator']) assert response['Records'] == [] return time.sleep(0.5) pytest.fail("timed out")
def test_empty_update_delete(test_table): p = random_string() c = random_string() test_table.update_item( Key={ 'p': p, 'c': c }, AttributeUpdates={'hello': { 'Value': 'world', 'Action': 'PUT' }}) test_table.update_item(Key={ 'p': p, 'c': c }, AttributeUpdates={'hello': { 'Action': 'DELETE' }}) item = test_table.get_item(Key={ 'p': p, 'c': c }, ConsistentRead=True)['Item'] assert item == {'p': p, 'c': c}
def test_batch_get_item_large(test_table_sn): p = random_string() long_content = random_string(100) * 500 count = 30 with test_table_sn.batch_writer() as batch: for i in range(count): batch.put_item(Item={'p': p, 'c': i, 'content': long_content}) # long_content is 49 KB, 30 such items is about 1.5 MB, well below the # BatchGetItem response limit, so the following BatchGetItem call should # be able to return all items we just wrote in one response - and in fact # does so in Alternator and thus exercises its handling of large # responses. Strangely, in DynamoDB, even though we're below the limit, # it often returns only partial results (with the unread keys in # UnprocessedKeys), so for reliable success of this test we need a loop: responses = [] to_read = { test_table_sn.name: { 'Keys': [{ 'p': p, 'c': c } for c in range(count)], 'ConsistentRead': True } } while to_read: reply = test_table_sn.meta.client.batch_get_item(RequestItems=to_read) assert 'UnprocessedKeys' in reply to_read = reply['UnprocessedKeys'] assert 'Responses' in reply assert test_table_sn.name in reply['Responses'] responses.extend(reply['Responses'][test_table_sn.name]) assert multiset(responses) == multiset([{ 'p': p, 'c': i, 'content': long_content } for i in range(count)])
def request_manifest(self): try: producer = KafkaProducer(bootstrap_servers=self.servers, client_id=random_string()) response_topic = random_string() t = ManifestConsumer(servers=self.servers, response_topic=response_topic) t.start() self.request["response_topic"] = response_topic msg = {"request": self.request} self.logger.info("send message: {0}".format(msg)) producer.send(self.request["kafka_topic"], json.dumps(msg).encode('utf-8')) producer.flush() producer.close() t.join() except Exception as e: exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_tb(exc_traceback, limit=20, file=sys.stdout) self.logger.error(str(exc_type)) self.logger.error(str(exc_value))
def test_update_item_non_existent(test_table_s): # An update that puts an attribute on a non-existent item, creates it: p = random_string() test_table_s.update_item( Key={'p': p}, AttributeUpdates={'a': { 'Value': 3, 'Action': 'PUT' }}) assert test_table_s.get_item(Key={'p': p}, ConsistentRead=True)['Item'] == { 'p': p, 'a': 3 } # An update that does *nothing* on a non-existent item, still creates it: p = random_string() test_table_s.update_item(Key={'p': p}, AttributeUpdates={}) assert test_table_s.get_item(Key={'p': p}, ConsistentRead=True)['Item'] == { 'p': p } # HOWEVER, an update that only deletes an attribute on a non-existent # item, does NOT creates it: (issue #5862 was about Alternator wrongly # creating and empty item in this case). p = random_string() test_table_s.update_item(Key={'p': p}, AttributeUpdates={'a': { 'Action': 'DELETE' }}) assert not 'Item' in test_table_s.get_item(Key={'p': p}, ConsistentRead=True) # Test the same thing - that an attribute-deleting update does not # create a non-existing item - but now with the update expression syntax: p = random_string() test_table_s.update_item(Key={'p': p}, UpdateExpression='REMOVE a') assert not 'Item' in test_table_s.get_item(Key={'p': p}, ConsistentRead=True)
def test_batch_get_item_projection_expression(test_table): items = [{ 'p': random_string(), 'c': random_string(), 'val1': random_string(), 'val2': random_string() } for i in range(10)] with test_table.batch_writer() as batch: for item in items: batch.put_item(item) keys = [{k: x[k] for k in ('p', 'c')} for x in items] for wanted in [['p'], ['p', 'c'], ['val1'], ['p', 'val2']]: reply = test_table.meta.client.batch_get_item( RequestItems={ test_table.name: { 'Keys': keys, 'ProjectionExpression': ",".join(wanted), 'ConsistentRead': True } }) got_items = reply['Responses'][test_table.name] expected_items = [{k: item[k] for k in wanted if k in item} for item in items] assert multiset(got_items) == multiset(expected_items)
def test_batch_get_item_projection_expression_path(test_table_s): items = [{ 'p': random_string(), 'a': { 'b': random_string(), 'x': 'hi' }, 'c': random_string() } for i in range(3)] with test_table_s.batch_writer() as batch: for item in items: batch.put_item(item) got_items = test_table_s.meta.client.batch_get_item( RequestItems={ test_table_s.name: { 'Keys': [{ 'p': item['p'] } for item in items], 'ProjectionExpression': 'a.b', 'ConsistentRead': True } })['Responses'][test_table_s.name] expected_items = [{'a': {'b': item['a']['b']}} for item in items] assert multiset(got_items) == multiset(expected_items)
def __init__(self, owner, syst): self.id = next_id() self.owner = owner self.referer = syst self.name = random_string() self.planets = dict() #rule : pla1.id < pla2.id <=> pla1.position < pla2.position self.culture = 0 self.merchs = dict() self.policy = I('tax') self.constructions = dict() self.poc = 0 self.ore = 0 owner.add_asset(self) for m in F('goods'): self.merchs[m] = 0
def test_query_limit_paging(test_table_sn): numbers = [Decimal(i) for i in range(20)] # Insert these numbers, in random order, into one partition: p = random_string() items = [{'p': p, 'c': num} for num in random.sample(numbers, len(numbers))] with test_table_sn.batch_writer() as batch: for item in items: batch.put_item(item) # Verify that full_query() returns all these numbers, in sorted order. # full_query() will do a query with the given limit, and resume it again # and again until the last page. for limit in [1, 2, 3, 7, 10, 17, 100, 10000]: got_items = full_query(test_table_sn, KeyConditions={'p': {'AttributeValueList': [p], 'ComparisonOperator': 'EQ'}}, Limit=limit) got_sort_keys = [x['c'] for x in got_items] assert got_sort_keys == numbers
def test_query_limit(test_table_sn): numbers = [Decimal(i) for i in range(10)] # Insert these numbers, in random order, into one partition: p = random_string() items = [{ 'p': p, 'c': num } for num in random.sample(numbers, len(numbers))] with test_table_sn.batch_writer() as batch: for item in items: batch.put_item(item) # Verify that we get back the numbers in their sorted order. # First, no Limit so we should get all numbers (we have few of them, so # it all fits in the default 1MB limitation) got_items = test_table_sn.query(ConsistentRead=True, KeyConditions={ 'p': { 'AttributeValueList': [p], 'ComparisonOperator': 'EQ' } })['Items'] got_sort_keys = [x['c'] for x in got_items] assert got_sort_keys == numbers # Now try a few different Limit values, and verify that the query # returns exactly the first Limit sorted numbers. for limit in [1, 2, 3, 7, 10, 17, 100, 10000]: got_items = test_table_sn.query(ConsistentRead=True, KeyConditions={ 'p': { 'AttributeValueList': [p], 'ComparisonOperator': 'EQ' } }, Limit=limit)['Items'] assert len(got_items) == min(limit, len(numbers)) got_sort_keys = [x['c'] for x in got_items] assert got_sort_keys == numbers[0:limit] # Unfortunately, the boto3 library forbids a Limit of 0 on its own, # before even sending a request, so we can't test how the server responds. with pytest.raises(ParamValidationError): test_table_sn.query(ConsistentRead=True, KeyConditions={ 'p': { 'AttributeValueList': [p], 'ComparisonOperator': 'EQ' } }, Limit=0)
def test_begins_with_more_str(dynamodb, test_table_ss): p = random_string() items = [{'p': p, 'c': sort_key, 'str': 'a'} for sort_key in [u'ÿÿÿ', u'cÿbÿ', u'cÿbÿÿabg'] ] with test_table_ss.batch_writer() as batch: for item in items: batch.put_item(item) got_items = full_query(test_table_ss, KeyConditions={ 'p' : {'AttributeValueList': [p], 'ComparisonOperator': 'EQ'}, 'c' : {'AttributeValueList': [u'ÿÿ'], 'ComparisonOperator': 'BEGINS_WITH'}}) assert sorted([d['c'] for d in got_items]) == sorted([d['c'] for d in items if d['c'].startswith(u'ÿÿ')]) got_items = full_query(test_table_ss, KeyConditions={ 'p' : {'AttributeValueList': [p], 'ComparisonOperator': 'EQ'}, 'c' : {'AttributeValueList': [u'cÿbÿ'], 'ComparisonOperator': 'BEGINS_WITH'}}) assert sorted([d['c'] for d in got_items]) == sorted([d['c'] for d in items if d['c'].startswith(u'cÿbÿ')])
def test_query_reverse_paging(test_table_sn): numbers = [Decimal(i) for i in range(20)] # Insert these numbers, in random order, into one partition: p = random_string() items = [{'p': p, 'c': num} for num in random.sample(numbers, len(numbers))] with test_table_sn.batch_writer() as batch: for item in items: batch.put_item(item) reversed_numbers = list(reversed(numbers)) # Verify that with ScanIndexForward=False, full_query() returns all # these numbers in reversed sorted order - getting pages of Limit items # at a time and resuming the query. for limit in [1, 2, 3, 7, 10, 17, 100, 10000]: got_items = full_query(test_table_sn, KeyConditions={'p': {'AttributeValueList': [p], 'ComparisonOperator': 'EQ'}}, ScanIndexForward=False, Limit=limit) got_sort_keys = [x['c'] for x in got_items] assert got_sort_keys == reversed_numbers
def test_update_expression_plus_overflow(test_table_s): p = random_string() with pytest.raises(ClientError, match='ValidationException.*overflow'): test_table_s.update_item(Key={'p': p}, UpdateExpression='SET b = :val1 + :val2', ExpressionAttributeValues={ ':val1': Decimal("9e125"), ':val2': Decimal("9e125") }) with pytest.raises(ClientError, match='ValidationException.*overflow'): test_table_s.update_item(Key={'p': p}, UpdateExpression='SET b = :val1 - :val2', ExpressionAttributeValues={ ':val1': Decimal("9e125"), ':val2': Decimal("-9e125") })
def route_add_answer(question_id): global update_views update_views = False if request.method == 'POST': random_file_name = util.random_string() message = util.make_compat_display(request.form['message']) message = message.replace("'", "''") file = request.files['file'] filename = secure_filename(file.filename) if file and data_manager.allowed_file(file.filename): extension = filename[-4:] filename = str(random_file_name) + extension file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) data_manager.add_answer(question_id, message, filename) return redirect(url_for('route_question', question_id=question_id)) return render_template('add_answer.html', question_id=question_id)
def test_query_select(test_table_sn): numbers = [Decimal(i) for i in range(10)] # Insert these numbers, in random order, into one partition: p = random_string() items = [{'p': p, 'c': num, 'x': num} for num in random.sample(numbers, len(numbers))] with test_table_sn.batch_writer() as batch: for item in items: batch.put_item(item) # Verify that we get back the numbers in their sorted order. By default, # query returns all attributes: got_items = test_table_sn.query(KeyConditions={'p': {'AttributeValueList': [p], 'ComparisonOperator': 'EQ'}})['Items'] got_sort_keys = [x['c'] for x in got_items] assert got_sort_keys == numbers got_x_attributes = [x['x'] for x in got_items] assert got_x_attributes == numbers # Select=ALL_ATTRIBUTES does exactly the same as the default - return # all attributes: got_items = test_table_sn.query(KeyConditions={'p': {'AttributeValueList': [p], 'ComparisonOperator': 'EQ'}}, Select='ALL_ATTRIBUTES')['Items'] got_sort_keys = [x['c'] for x in got_items] assert got_sort_keys == numbers got_x_attributes = [x['x'] for x in got_items] assert got_x_attributes == numbers # Select=ALL_PROJECTED_ATTRIBUTES is not allowed on a base table (it # is just for indexes, when IndexName is specified) with pytest.raises(ClientError, match='ValidationException'): test_table_sn.query(KeyConditions={'p': {'AttributeValueList': [p], 'ComparisonOperator': 'EQ'}}, Select='ALL_PROJECTED_ATTRIBUTES') # Select=SPECIFIC_ATTRIBUTES requires that either a AttributesToGet # or ProjectionExpression appears, but then really does nothing: with pytest.raises(ClientError, match='ValidationException'): test_table_sn.query(KeyConditions={'p': {'AttributeValueList': [p], 'ComparisonOperator': 'EQ'}}, Select='SPECIFIC_ATTRIBUTES') got_items = test_table_sn.query(KeyConditions={'p': {'AttributeValueList': [p], 'ComparisonOperator': 'EQ'}}, Select='SPECIFIC_ATTRIBUTES', AttributesToGet=['x'])['Items'] expected_items = [{'x': i} for i in numbers] assert got_items == expected_items got_items = test_table_sn.query(KeyConditions={'p': {'AttributeValueList': [p], 'ComparisonOperator': 'EQ'}}, Select='SPECIFIC_ATTRIBUTES', ProjectionExpression='x')['Items'] assert got_items == expected_items # Select=COUNT just returns a count - not any items got = test_table_sn.query(KeyConditions={'p': {'AttributeValueList': [p], 'ComparisonOperator': 'EQ'}}, Select='COUNT') assert got['Count'] == len(numbers) assert not 'Items' in got # Check again that we also get a count - not just with Select=COUNT, # but without Select=COUNT we also get the items: got = test_table_sn.query(KeyConditions={'p': {'AttributeValueList': [p], 'ComparisonOperator': 'EQ'}}) assert got['Count'] == len(numbers) assert 'Items' in got # Select with some unknown string generates a validation exception: with pytest.raises(ClientError, match='ValidationException'): test_table_sn.query(KeyConditions={'p': {'AttributeValueList': [p], 'ComparisonOperator': 'EQ'}}, Select='UNKNOWN')
def run(self): try: topic = self.config["kafka_topic"] client_id = random_string() consumer = KafkaConsumer(topic, bootstrap_servers=self.config["servers"], client_id=client_id) self.logger.info("running data service: {0}:{1}".format(client_id, topic)) tasks = [] max_tasks = int(self.config["max_response_tasks"]) for message in consumer: try: json_msg = json.loads(message.value) request = json_msg["request"] self.logger.info("recvd reuqest: {0}".format(request)) self.validate_request(request) t = DataResponse(dbconfig=self.config['database'], servers=self.config["servers"], request=request, data_store=self.config['data_store']) if len(tasks) < max_tasks: tasks.append(t) else: oldest=tasks.pop(0) oldest.join() tasks.append(t) assert(len(tasks) <= max_tasks) t.start() except Exception as e: exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_tb(exc_traceback, limit=20, file=sys.stdout) self.logger.error(str(e)) except Exception as e: exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_tb(exc_traceback, limit=20, file=sys.stdout) self.logger.error(str(exc_type)) self.logger.error(str(exc_value))
def test_query_sort_order_bytes(test_table_sb): # Insert a lot of random items in one new partition: # We arbitrarily use random_bytes with a random length. p = random_string() items = [{'p': p, 'c': random_bytes(10)} for i in range(128)] with test_table_sb.batch_writer() as batch: for item in items: batch.put_item(item) got_items = full_query(test_table_sb, KeyConditions={'p': {'AttributeValueList': [p], 'ComparisonOperator': 'EQ'}}) assert len(items) == len(got_items) sort_keys = [x['c'] for x in items] got_sort_keys = [x['c'] for x in got_items] # Boto3's "Binary" objects are sorted as if bytes are signed integers. # This isn't the order that DynamoDB itself uses (byte 0 should be first, # not byte -128). Sorting the byte array ".value" works. assert sorted(got_sort_keys, key=lambda x: x.value) == got_sort_keys assert sorted(sort_keys) == got_sort_keys
def test_query_sort_order_string(test_table): # Insert a lot of random items in one new partition: # str(i) has a non-obvious sort order (e.g., "100" comes before "2") so is a nice test. p = random_string() items = [{'p': p, 'c': str(i)} for i in range(128)] with test_table.batch_writer() as batch: for item in items: batch.put_item(item) got_items = full_query(test_table, KeyConditions={'p': {'AttributeValueList': [p], 'ComparisonOperator': 'EQ'}}) assert len(items) == len(got_items) # Extract just the sort key ("c") from the items sort_keys = [x['c'] for x in items] got_sort_keys = [x['c'] for x in got_items] # Verify that got_sort_keys are already sorted (in string order) assert sorted(got_sort_keys) == got_sort_keys # Verify that got_sort_keys are a sorted version of the expected sort_keys assert sorted(sort_keys) == got_sort_keys
def __write_temp_xauth_file (self, wildcard_addr): if self.xauth_cookie == None: self.xauth_cookie = util.random_string (16) xauth_name = "MIT-MAGIC-COOKIE-1" xauth_data = self.xauth_cookie if wildcard_addr: family = 0xffff # FamilyWild display_addr = "" else: family = 0x0100 # FamilyLocal display_addr = socket.gethostname () display_num_str = self.display_number display_num_len = len (display_num_str) display_addr_len = len (display_addr) xauth_name_len = len (xauth_name) xauth_data_len = len (xauth_data) pack_format = ">hh%dsh%dsh%dsh%d" % (display_addr_len, display_num_len, xauth_name_len, xauth_data_len) blob = struct.pack (pack_format, family, display_addr_len, display_addr, display_num_len, display_num_str, xauth_name_len, xauth_name, xauth_data_len) + xauth_data (fd, temp_xauth_file) = tempfile.mkstemp (prefix = ".xauth-") os.write (fd, blob) os.close (fd) dprint ("Wrote temporary xauth file to %s" % temp_xauth_file) return temp_xauth_file
return 1 + min(edit_distance(a,b,m,n-1), edit_distance(a,b,m-1,n-1), edit_distance(a,b,m-1,n)) def edit_distance_dynamic_programming(a,b,m,n): p = [[0 for j in range(n+1)] for i in range(m+1)] for i in xrange(m+1): for j in xrange(n+1): if i == 0: p[i][j] = j elif j == 0: p[i][j] = i elif a[i-1] == b[j-1]: p[i][j] = p[i-1][j-1] else: p[i][j] = 1 + min(p[i][j-1], p[i-1][j], p[i-1][j-1]) return p[m][n] if __name__ == '__main__': for x in xrange(1000): a = random_string(random.randint(5, 20)) b = random_string(random.randint(5, 20)) e1 = edit_distance(a,b,len(a),len(b)) e2 = edit_distance_dynamic_programming(a,b,len(a),len(b)) if e1 != e2: print e1,e2,a,b
def random_cipher(text): text = util.random_string(random.randint(42, 1024)) + text return MersenneCipher().encrypt(text, random.randint(0, 65535))
def install(self, requires, interactive=True, logger=None, signatures_mode=None, **kwargs): # Initialize this pakfire instance. self.initialize() if not logger: logger = logging.getLogger("pakfire") # Pointer to temporary repository. repo = None # Sort out what we got... download_packages = [] local_packages = [] relations = [] for req in requires: if isinstance(req, packages.Package): relations.append(req) continue # This looks like a file. elif req.endswith(".%s" % PACKAGE_EXTENSION) and os.path.exists(req) and os.path.isfile(req): local_packages.append(req) continue # Remote files. elif req.startswith("http://") or req.startswith("https://") or req.startswith("ftp://"): download_packages.append(req) continue # We treat the rest as relations. The solver will return any errors. relations.append(req) # Redefine requires, which will be the list that will be passed to the # solver. requires = relations try: # If we have got files to install, we need to create a temporary repository # called 'localinstall'. # XXX FIX TMP PATH if local_packages or download_packages: repo = repository.RepositoryDir(self, "localinstall", _("Local install repository"), os.path.join(LOCAL_TMP_PATH, "repo_%s" % util.random_string())) # Register the repository. self.repos.add_repo(repo) # Download packages. for download_package in download_packages: repo.download_package(download_package) # Add all packages to the repository index. repo.add_packages(local_packages) # Add all packages to the requires. requires += repo # Do the solving. request = self.pool.create_request(install=requires) solver = self.pool.solve(request, logger=logger, interactive=interactive, **kwargs) # Create the transaction. t = transaction.Transaction.from_solver(self, solver) t.dump(logger=logger) # Ask if the user acknowledges the transaction. if interactive and not t.cli_yesno(): return # Run the transaction. t.run(logger=logger, signatures_mode=signatures_mode) finally: if repo: # Remove the temporary repository we have created earlier. repo.remove() self.repos.rem_repo(repo)
def test_random_string(self): self.assertEquals(len(util.random_string(10)), 10) self.assertEquals(len(util.random_string(100)), 100) self.assertEquals(len(util.random_string(1000)), 1000)
def new_api_key(): key = random_string(size=32) api_key = database.ApiKey(user=current_user.to_dbref(), key=key) api_key.save() return redirect(url_for('settings'))
def auto_populate(self): self.field_1 = random_string(16) self.field_2 = random_integer()
def __init__(self): self.key = util.random_string(16) self.iv = self.key
def test_random_string(self): self.assertEquals(6, len(util.random_string(6)))
def auto_populate(self): self.field_1 = random_string(32) self.field_2 = random_string(32) self.field_3 = random_string(32) self.field_4 = random_string(32) self.data = random_string(64)
def setUp(self): self.map = self.client.get_map(random_string())
def __init__(self, block_size=16): self.key = random_string(block_size) self.block_size = block_size self.line = None
def __init__(self): self.key = util.random_string(20)