def generateBaseDRQ(self, domain): """Generator for Basic DNS Range Queries (randomly generated query sets) Queries are unique inside their respective sets, but may appear more than once across different query blocks. @param domain: Domain for which a DNS Range Query should be generated @return: List of Sets, in order, each set representing a query block """ if not DB.isValidTarget(domain): Error.printErrorAndExit(domain + " is not a valid target") patlen = DB.getPatternLengthForHost(domain) block = [set()] pattern = DB.getPatternForHost(domain) # Get the actual pattern of the target randoms = DB.getRandomHosts((Config.RQSIZE-1)*len(pattern)) # Get random hosts (dummies) pattern.remove(domain) block[0].add(domain) i = 1 for subquery in pattern: # Create the blocks that will hold dummies and actual queries block.append(set()) block[i].add(subquery) # Add the actual query to its respective block i += 1 for query, index in zip(randoms, cycle(range(patlen))): # distribute the randomly chosen dummy queries as evenly as possible across the blocks block[index].add(query) return block
def attack(self, block): """Attack a given Range Query with a distinguishable first block This function can only be used under specific circumstances, which is why it is not the default function. To use it, change the Dictionary of the getAttackerFor-function in DRQPatternAttack.py to point to DFBPatternBRQ instead of DFBPatternPRQ, but be aware that it will not always work on small data sets. @param fb: The first block, as set @param rq: The remaining range query, as set @return: List of possible results """ fb, rq = block res = [] suspected_n = float(len(fb)) rq.update(fb) rqlen = len(rq) pattern_length_max = math.ceil(rqlen / suspected_n) pattern_length_max += 2 * math.ceil(pattern_length_max / suspected_n) # Increase maximum pattern length, because duplicates could lead to a miscalculation of up to floor(real_pattern_length/real_N). # We are using ceil() to avoid border cases where the real M would lead to x in that calculation, while our detected M # only leads to x-1. Those cases would be few and far between, considering the chances of actually getting so many duplicates, # but nevertheless, they should be dealt with. pattern_length_min = math.floor(rqlen / (suspected_n+1)) for key in fb: # Iterate through all elements of the first block if DB.isValidTarget(key) and (pattern_length_min <= DB.getPatternLengthForHost(key) <= pattern_length_max): # if the current element is a beginning of a pattern with the correct length... if DB.getPatternForHost(key) <= rq: # Check if the pattern is a subset of the remaining range query. res.append(key) return res
def parse(): """Parses the INFILE INFILE is expected to have a format of: target.tld:query1.tld,query2.tld,query3.tld,... No parameters or return values, all info is read from the config and written to the database. @bug: Leading www. in domain name may cause issues if the www. is omitted in the pattern """ if not Config.QUIET: print("Beginning parsing of pattern file...") with open(Config.INFILE, 'r') as fobj: LC = sum(1 for line in fobj) # get line count of file (for progress bar) stat = Progress.Bar(LC, "=") # get progress bar instance for line in open(Config.INFILE, 'r'): # Open the file for reading line = line.strip() # Remove trailing newlines target = line[:line.find(":")] # Find the target if target.startswith("www."): # remove leading www. of target target = target[4:] queries = line[line.find(":")+1:].split(",") # Find the queries pattern = set() # Add target and queries... pattern.add(target) for element in queries: if (element.find(":") > 0): element = element[:element.find(":")] # Remove Port information, if any if element.startswith("www."): element = element[4:] # Remove leading www., if any pattern.add(element) # Add to current pattern DB.addTarget(target, pattern) # Actually add the information to the DB stat.tick() # notify progress bar if not Config.QUIET: print "Done"
def attack(self, rq): """Attack a given Range Query using the assumption from the class description. @param rq: A Range Query, as returned by generate.DRQ @return: list of possible results """ res = [] for element in rq: # Iterate through all elements (queries) of the given range query if DB.isValidTarget(element): # If the current element is the beginning of a pattern... # This checks if the pattern of the current element is a subset of the range query inter = rq & DB.getPatternForHost(element) if len(inter) == DB.getPatternLengthForHost(element): res.append(element) return res
def attack(self, block): """Attack a given Range Query with a distinguishable first block @param fb: The first block, as set @param rq: The remaining range query, as set @return: List of possible results """ fb, rq = block res = [] rq.update(fb) for key in fb: # Iterate through all queries in the first block if DB.isValidTarget(key): # If the current query is a valid beginning of a pattern... if DB.getPatternForHost(key) <= rq: # Check if the pattern is a subset of the second block. res.append(key) return res
def on_batch_start(self, batch_id): works = [{ "fetcherName": item["fetcherName"], "url": "https://" + item["host"] + "/dp/" + item["itemId"] } for item in DB.get_items()] print(works) self.add_works(works)
def getUserAlias(id): user = DB.getOperatorByID(id) delimIndex = user.index(" ") substr_1 = user[0] substr_2 = user[delimIndex + 1:] user_alias = substr_1+substr_2 return user_alias
def runningJobs(): page = "WORK IN PROCESS" jobList = DB.getRunningJobsList() jobs = [] for job in jobList: jobs.append({ 'jobID': job[0], 'job': job[1], 'workorder': job[2], 'cell': job[3], 'status': job[4], 'weight': job[5], 'operator': job[7], 'timestamp': job[8], }) if request.method == 'POST': errors = False if not request.form['JobID']: errors = True if not errors: jobID = request.form['JobID'] return redirect(url_for('updateJob', jobID=jobID)) else: return render_template('runningjobs.html', jobs=jobs, page=page) return render_template('runningjobs.html', jobs=jobs, page=page)
def set_channel_language(id, language): channel_ref = DB.collection("channels").document(str(id)) channel_ref.set( { "language" : language }, merge=True )
def set_combat_state(id, combat): channel_ref = DB.collection("channels").document(str(id)) channel_ref.set( { "combat" : { "in_combat" : combat } }, merge=True )
class Project(object): """This is the primary class to setup and run PopGen projects. """ def __init__(self, config_loc): self.config_loc = config_loc def load_project(self): self._load_config() self._populate_project_properties() self._load_data() def _load_config(self): # TODO: validating config file for YAML # TODO: validating YAML config file for field types # TODO: validating YAML for consistency across fields/config elements print(os.getcwd()) config_f = open(self.config_loc, "r") config_dict = yaml.load(config_f) self._config = Config(config_dict) self.column_names_config = self._config.project.inputs.column_names self.entities = self._config.project.inputs.entities self.housing_entities = self._config.project.inputs.housing_entities self.person_entities = self._config.project.inputs.person_entities def _populate_project_properties(self): self.name = self._config.project.name self.location = os.path.abspath(self._config.project.location) def _load_data(self): self.db = DB(self._config) self.db.load_data() def run_scenarios(self): scenarios_config = self._config.project.scenario for scenario_config in scenarios_config: print ("Running Scenario: %s" % scenario_config.description) scenario_obj = Scenario(self.location, self.entities, self.housing_entities, self.person_entities, self.column_names_config, scenario_config, self.db) scenario_obj.run_scenario()
def __init__(self, bot, auto=False): self.bot = bot self.db = DB(name="main", loop=bot.loop) self.loop = bot.loop self.players = None self.events = None self.party = None self.static_party = None if auto: self.loop.create_task(self.initialize())
def get_channel_language(id): channel_ref = DB.collection("channels").document(str(id)) channel = channel_ref.get() if channel.exists: return channel.to_dict()["language"] else: channel_ref.set( { "language" : "en" }, merge=True ) return "en"
def add_combatant(id, name, init): channel_ref = DB.collection("channels").document(str(id)) channel_ref.set( { "combat" : { "combatants" : { "name" : name, "iscore" : init } } }, merge=True )
def fixDatabaseDate(): records = DB.reformateDate_jobRecord(1) for record in records: id = record[0] if record[1] == None: date = formatDateForDatabase(" ") else: date = formatDateForDatabase(record[1]) print(DB.updateDateField(id,date,1)) print("success") records = DB.reformateDate_jobRecord(2) for record in records: id = record[0] if record[1] == None: date = formatDateForDatabase(" ") else: date = formatDateForDatabase(record[1]) print(DB.updateDateField(id,date,2)) print("success") records = DB.reformateDate_jobRecord(3) for record in records: id = record[0] if record[1] == None: date = formatDateForDatabase(" ") else: date = formatDateForDatabase(record[1]) print(DB.updateDateField(id,date,3)) print("success")
def viewComplete(jobID): page = "JOB RECORD" oplist = DB.getActiveOperators() typeList = DB.getJobType() workcellList = DB.getWorkCells() result = DB.setUpdateForm(jobID) endDate = result[0][15] endDate = endDate[0:10] job = { 'job ID': result[0][0], 'job name': result[0][1], 'work order': result[0][2], 'cell': result[0][3], 'cell ID': result[0][4], 'status': result[0][5], 'status ID': result[0][6], 'type': result[0][7], 'type ID': result[0][8], 'weight': result[0][9], 'activity ID': result[0][10], 'operator': result[0][11], 'operator ID': result[0][12], 'last op': result[0][13], 'notes': result[0][14], 'last activity': endDate } if request.method == 'POST': now = datetime.now() time = now.strftime('%I:%M %p') date = now.strftime('%Y-%m-%d') dbNotes = job['notes'] pageNotes = request.form['notes'] if (util.textHasChanged(dbNotes, pageNotes)): alias = "SYS" notes = pageNotes addNote = util.appendTimeStamp(alias, notes) else: addNote = dbNotes details = {'jobID': job['job ID'], 'notes': addNote} DB.updateJobNotes(details) return redirect(url_for('completedJobs')) else: return render_template('job_record.html', job=job, oplist=oplist, page=page) return render_template('job_record.html', job=job, oplist=oplist, page=page)
def attack(self, blocklist): """Attack a given range query with fully distinguishable blocks @param blocklist: A list of sets, each set representing a block, the main target in the first block. @return: List of possible results """ res = [] length = len(blocklist) for key in blocklist[0]: # Iterate through all candidates for the main target (as it must be in the first block) if DB.isValidTarget(key) and DB.getPatternLengthForHost(key) == length: # If it is the beginning of a pattern of the correct length... # The following is a method of determining if every block contains exactly one element of the pattern of the current candidate. tmp = blocklist[1:] cnt = {} for i in range(length-1): cnt[i] = 0 for query in DB.getPatternForHost(key): if query != key: for i in range(len(tmp)): if query in tmp[i]: cnt[i] += 1 if not 0 in cnt.values(): res.append(key) return res
def get_combat_state(id): channel_ref = DB.collection("channels").document(str(id)) channel = channel_ref.get() if channel.exists: if channel.to_dict()["combat"]["in_combat"] is not None: return channel.to_dict()["combat"]["in_combat"] else: channel_ref.set( { "combat" : { "in_combat" : False } }, merge=True ) return False
def runNightReport(): now = datetime.now() time = now.strftime('%I-%M-%p') date = now.strftime('%m-%d-%Y') path = "S:\\EVERYONE\\SBishop\\Job Tracker Admin\\Reports" reportTemplate = [] reportHeadings = ['Name', 'Job', 'Work Order', 'Work Cell', 'Job Type', 'Status', 'weight', 'Notes' ] reportRaw = DB.getReport() reportTemplate.append(reportHeadings) for each in reportRaw: reportTemplate.append([each[0],each[1],each[2],each[3],each[4],each[5],each[6],each[9]]) filename = "\\NightlyChemOpReport-" + str(date) + "-" + str(time) + ".csv" outfile = open( path + filename,"w",newline="") outcsv =csv.writer(outfile) outcsv.writerows(reportTemplate) outfile.close()
def completedJobs(query_type='all'): page = util.setPageMessage(query_type) jobList = DB.getCompletedJobsList(query_type) jobCount = len(jobList) jobs = [] for job in jobList: jobs.append({ 'jobID': job[0], 'job': job[1], 'workorder': job[2], 'cell': job[3], 'status': job[4], 'weight': job[5], 'operator': job[7], 'timestamp': job[8], }) if request.method == 'POST': errors = False if not request.form['JobID']: errors = True if not errors: jobID = request.form['JobID'] return redirect(url_for('viewComplete', jobID=jobID)) else: return render_template('completed_jobs.html', jobs=jobs, page=page, jobCount=jobCount) return render_template('completed_jobs.html', jobs=jobs, page=page, jobCount=jobCount)
# Iterate through the json directory # Each file will have a file format as "schema_table" # Thus, a file named "myschema_table1" will generate a table called "table1" in the "myschema" schema # Similarly, a file named "myschema_table2" will generate a table called "table2" in the "myschema" schema # This method allows different files to import into different tables for filename in os.listdir(Config.jsonDir): if filename.endswith(".json"): # Break filename into schema and table fn = re.sub("\..*", "", filename) # Removing the file extension schema_name = fn.split("_")[0] table_name = fn.split("_")[1] # Opening the file with open("{0}/{1}".format(Config.jsonDir, filename)) as f: data = json.load(f) # Load the json as an object data = DB.getData(data) # Flatten the data and get data types # Create the table based off of the data structure in "data" DB.createTable(data, schema_name, table_name) # Create the CSV and prepare for import using the COPY command DB.createCSV(data, schema_name, table_name) # Take the csv file and ingest into the database PostGre.ingestCSV(schema_name, table_name) else: continue # Sample query in code myResults = PostGre.query("Select * from myschema.table2") print(myResults)
def index(): page = "THE DASHBOARD" now = datetime.now() today = now.strftime('%A %B %d, %Y') runningJobs = DB.RunningJobsCount() dayCount = DB.getJobsCompleteToday() weekCount = DB.getJobsCompleteThisWeek() monthCount = DB.getJobsCompleteThisMonth() typeCountSet = { 'day': [0, 0, 0], 'week': [0, 0, 0], 'month': [0, 0, 0], 'year': [0, 0, 0] } typeCountDay = [] typeCountSet['day'][0] = DB.getJobTypeCount(0)[0] typeCountSet['day'][1] = DB.getJobTypeCount(1)[0] typeCountSet['day'][2] = DB.getJobTypeCount(2)[0] for each in typeCountSet['day']: typeCountDay.append(each) typeCountWeek = [] typeCountSet['week'][0] = DB.getJobTypeCount(3)[0] typeCountSet['week'][1] = DB.getJobTypeCount(4)[0] typeCountSet['week'][2] = DB.getJobTypeCount(5)[0] for each in typeCountSet['week']: typeCountWeek.append(each) typeCountMonth = [] typeCountSet['month'][0] = DB.getJobTypeCount(6)[0] typeCountSet['month'][1] = DB.getJobTypeCount(7)[0] typeCountSet['month'][2] = DB.getJobTypeCount(8)[0] for each in typeCountSet['month']: typeCountMonth.append(each) typeCountYear = [] typeCountSet['year'][0] = DB.getJobTypeCount(9)[0] typeCountSet['year'][1] = DB.getJobTypeCount(10)[0] typeCountSet['year'][2] = DB.getJobTypeCount(11)[0] for each in typeCountSet['year']: typeCountYear.append(each) # Bar Chart========================================= CellOBJ = { "Large Hock": 0, "Pilot Hock": 0, "2 Gal Ross": 0, "10 Gal Ross": 0, "40 Gal Ross": 0, "100 Gal Ross": 0, "Mezz Tank": 0, "Activator": 0, "1/2 Gal Ross": 0, } cellcount = DB.getTodaysCellCount() for cell in cellcount: CellOBJ[cell[0]] = cell[1] print(CellOBJ[cell[0]]) cellData = [ CellOBJ["Pilot Hock"], CellOBJ["Large Hock"], CellOBJ["1/2 Gal Ross"], CellOBJ["2 Gal Ross"], CellOBJ["10 Gal Ross"], CellOBJ["40 Gal Ross"], CellOBJ["100 Gal Ross"], CellOBJ["Mezz Tank"], CellOBJ["Activator"] ] # Bar Chart ========================================= output = { 'runningJobs': runningJobs, 'dayCount': dayCount, 'weekCount': weekCount, 'monthCount': monthCount } return render_template('index.html', output=output, page=page, today=today, cellData=json.dumps(cellData), typeCountDay=json.dumps(typeCountDay), typeCountWeek=json.dumps(typeCountWeek), typeCountMonth=json.dumps(typeCountMonth), typeCountYear=json.dumps(typeCountYear))
def _load_data(self): self.db = DB(self._config) self.db.load_data()
def newJobform(): page = 'START A NEW JOB' oplist = DB.getActiveOperators() typeList = DB.getJobType() workcellList = DB.getWorkCells() if request.method == 'POST': errors = False if not request.form['operator']: errors = True if not request.form['job_name']: errors = True if not request.form['work_order']: errors = True if not request.form['work_cell']: errors = True if not request.form['job_type']: errors = True if not request.form['job_weight']: errors = True if not request.form['total_operations']: errors = True try: ck_ipt = request.form['in_process_testing'] except: ck_ipt = '0' try: ck_pre = request.form['predjustments'] except: ck_pre = '0' if not errors: now = datetime.now() time = now.strftime('%I:%M %p') date = now.strftime('%Y-%m-%d') alias = util.getUserAlias(request.form['operator']) notes = request.form['notes'] addNote = util.appendTimeStamp(alias, notes, 'Job Started') newRecord = { 'operator': request.form['operator'], 'job': request.form['job_name'].upper(), 'workOrder': request.form['work_order'], 'workCell': request.form['work_cell'], 'jobType': request.form['job_type'], 'jobWeight': request.form['job_weight'], 'totalOperatiions': request.form['total_operations'], 'inProcessTesting': ck_ipt, 'preAdjustments': ck_pre, 'notes': addNote, 'jobStatus': 1, # Job status is 'In Progress' as a starting condtion (job status table) 'startTime': time, 'startDate': date, 'lastOperation': 10, #jobs start at operation 10 'Activity': 4 #the value 4 represents 'starting' from the Activity_Action table } DB.startJob(newRecord) page = "JOB ADDED" newRecord['operator'] = DB.getOperatorByID(newRecord['operator']) #return redirect(url_for('index')) return render_template('new_job_success.html', newRecord=newRecord, page=page) else: return render_template('new_job.html', oplist=oplist, typeList=typeList, workcellList=workcellList, page=page) return render_template('new_job.html', oplist=oplist, typeList=typeList, workcellList=workcellList, page=page)
def updateJob(jobID): page = "UPDATE WORK IN PROCESS" oplist = DB.getActiveOperators() typeList = DB.getJobType() workcellList = DB.getWorkCells() result = DB.setUpdateForm(jobID) job = { 'job ID': result[0][0], 'job name': result[0][1], 'work order': result[0][2], 'cell': result[0][3], 'cell ID': result[0][4], 'status': result[0][5], 'status ID': result[0][6], 'type': result[0][7], 'type ID': result[0][8], 'weight': result[0][9], 'activity ID': result[0][10], 'operator': result[0][11], 'operator ID': result[0][12], 'last op': result[0][13], 'notes': result[0][14], 'last activity': result[0][15], } operationValues = util.getAvailableOperationValues(job['last op']) # #to be passed to the next operation combo box errors = True if request.method == 'POST': errors = False if not request.form['action']: errors = True if not request.form['operator']: errors = True if not request.form['next-operation']: errors = True if not errors: now = datetime.now() time = now.strftime('%I:%M %p') date = now.strftime('%Y-%m-%d') dbNotes = job['notes'] pageNotes = request.form['notes'] OperatorID = request.form['operator'] nextOperation = request.form['next-operation'] action = request.form['action'] status = action if action == "7": status = 1 if action == "2": nextOperation = DB.getFinalOperationByID(job['job ID']) alias = util.getUserAlias(OperatorID) notes = pageNotes addNote = util.appendTimeStamp(alias, notes, 'Job Completed') # Note Handling if (util.textHasChanged(dbNotes, pageNotes)): alias = util.getUserAlias(OperatorID) notes = pageNotes if action == "2": addNote = util.appendTimeStamp(alias, notes, 'Job Completed') else: addNote = util.appendTimeStamp(alias, notes) else: if action == "2": alias = util.getUserAlias(OperatorID) addNote = util.appendTimeStamp(alias, dbNotes, 'Job Completed') else: addNote = dbNotes details = { 'jobID': job['job ID'], 'status': status, 'activity': action, 'notes': addNote, 'time': time, 'date': date, 'operation': nextOperation, 'operator': request.form['operator'] } DB.logActivity(details) if not (request.form['action'] == '2') and not (request.form['action'] == '5'): time = '' date = '' DB.updateJobRecord(details) details['job'] = job['job name'] details['work order'] = job['work order'] details['operator'] = DB.getOperatorByID(details['operator']) details['status'] = DB.getStatusByID(details['status']) page = "UPDATE SUCCESS" return render_template('update_job_success.html', details=details, page=page) #return redirect(url_for('index')) else: return render_template('update_job.html', job=job, oplist=oplist, page=page, operationValues=operationValues) return render_template('update_job.html', job=job, oplist=oplist, page=page, operationValues=operationValues)
now = datetime.now() time = now.strftime('%I:%M %p') date = now.strftime('%m-%d-%Y') # date = (now.strftime('%A %B %d, %Y')) typeCountSet = { 'day': [0, 0, 0], 'week': [0, 0, 0], 'month': [0, 0, 0], 'year': [0, 0, 0] } typeCountDay = [] typeCountSet['day'][0] = DB.getJobTypeCount(0)[0] typeCountSet['day'][1] = DB.getJobTypeCount(1)[0] typeCountSet['day'][2] = DB.getJobTypeCount(2)[0] for each in typeCountSet['day']: typeCountDay.append(each) typeCountWeek = [] typeCountSet['week'][0] = DB.getJobTypeCount(3)[0] typeCountSet['week'][1] = DB.getJobTypeCount(4)[0] typeCountSet['week'][2] = DB.getJobTypeCount(5)[0] for each in typeCountSet['week']: typeCountWeek.append(each) typeCountMonth = [] typeCountSet['month'][0] = DB.getJobTypeCount(6)[0] typeCountSet['month'][1] = DB.getJobTypeCount(7)[0]
#!/usr/bin/env python import os import sys import shutil import config from data import DB from backend import anubis from backend import cwsandbox from backend.errors import SafetyCheckError, TimeLeftError, FatalError if config.path: sys.path = sys.path + config.path db = DB() class ThrottleError(Exception): def __init__(*args, **kargs): Exeption.__init__(*args, **kargs) def process_new(): # Get list of files that need submitting submit_list = os.listdir(config.new) # Submit and retrieve every file for file in submit_list: path = os.path.join(config.new, file) # Add file to database try: fid = db.add_file(file) except Exception, e: print('Error adding file %s to database'%file) continue
def on_save_result(self, price, batch_id, work): now = datetime.now() price['date'] = now.strftime('%m/%d/%Y') price['time'] = now.strftime('%H:%M:%S') DB.save_price(price)
import os import flask import data.update import data.entities from data import DB DEBUG = os.environ.get('DEBUG') == 'on' PORT = int(os.environ.get('PORT', '5000')) app = flask.Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = os.environ['DATABASE_URL'] DB.init_app(app) with app.app_context(): DB.create_all() @app.route('/') def hello_world(): return flask.render_template('index.html', **{ 'baby_messages': data.entities.ProfileMessage.query.filter_by(profile='baby_monitoring'), 'all_messages': data.entities.ProfileMessage.query.all(), }) @app.route('/update', methods=['POST']) def update(): data.update.update_json(flask.request.json) data.update.test_upload()
def generateBaseDRQ(self, domain): """Generator for Pattern-Based DNS Range Queries (trying to fill the query blocks with patterns) Queries are unique inside their respective sets, but may appear more than once across different query blocks. @param domain: Domain for which a DNS Range Query should be generated @return: List of Sets, in order, each set representing a query block """ if not DB.isValidTarget(domain): Error.printErrorAndExit(domain + " is not a valid target") pattern_length = len(DB.PATTERNS[domain]) block = [set()] num_of_available_patterns = DB.getNumberOfHostsWithPatternLength(pattern_length) - 1 if num_of_available_patterns >= Config.RQSIZE: hosts = set([domain]) hosts.update(set(DB.getRandomHostsByPatternLengthB(pattern_length, Config.RQSIZE-1, hosts))) pattern_copy = {} for host in hosts: pattern_copy[host] = DB.getPatternForHost(host) pattern_copy[host].remove(host) block[0].add(host) for i in range(1, pattern_length, 1): block.append(set()) for host in pattern_copy: block[i].add(pattern_copy[host].pop()) else: num_of_needed_patterns = Config.RQSIZE - (num_of_available_patterns+1) padding = [] for i in range(num_of_needed_patterns): # Find patterns whose lengths sum to pattern_length (if any exist that have not been chosen yet) pad1_len = pad2_len = -1 for pad1_len, pad2_len in zip(range(1, pattern_length/2+1, 1), range(pattern_length-1, pattern_length/2-1, -1)): # This is a construct that generates numbers that sum to pattern_length. It is used instead of truly random # numbers because it will not get stuck when no more patterns are available. if ((DB.getNumberOfHostsWithPatternLengthB(pad1_len, block[0]) > 0) and \ (DB.getNumberOfHostsWithPatternLength(pad2_len) > 0)): break elif pad1_len == pattern_length/2: # No patterns of the correct length have been found, abort pad1_len = -1 if (pad1_len == -1): # Break out of loop as no further patterns can be found. break # The following few lines get the dummy patterns from the database and saves them to the list of dummy-patterns pad1_host = DB.getRandomHostsByPatternLengthB(pad1_len, 1, block[0])[0] pad1_pattern = DB.getPatternForHost(pad1_host) pad1_pattern.remove(pad1_host) block[0].add(pad1_host) padding.append([pad1_host]) for host in pad1_pattern: padding[i].append(host) pad2_host = DB.getRandomHostsByPatternLength(pad2_len, 1)[0] pad2_pattern = DB.getPatternForHost(pad2_host) pad2_pattern.remove(pad2_host) padding[i].append(pad2_host) for host in pad2_pattern: padding[i].append(host) # We now have as many dummy patterns as we will get. Start distributing them. pattern_copy = {} block[0].add(domain) pattern_copy[domain] = DB.getPatternForHost(domain) pattern_copy[domain].remove(domain) for element in DB.getRandomHostsByPatternLengthB(pattern_length, num_of_available_patterns, block[0]): # Get all patterns with the correct length and add them to the range query pattern_copy[element] = DB.getPatternForHost(element) pattern_copy[element].remove(element) block[0].add(element) for i in range(1, pattern_length, 1): # Distribute the remaining patterns (those whose lengths sum to the correct length) block.append(set()) for host in pattern_copy: block[i].add(pattern_copy[host].pop()) for pattern in padding: block[i].add(pattern[i]) return block