def run(self, times=10): for run in xrange(0, times): log("Starting algorithm {}, run #{}".format( self.algorithm, self.run)) self.algorithm.run() alg_run_stats = self.algorithm.get_stats() self.run_to_stats[run] = alg_run_stats
def scan(self): modem_data = {} # Wrap around the split index if necessary (mod would also work) if self.split_index >= len(FREQUENCY_SPLIT): self.split_index = 0 freq_split = FREQUENCY_SPLIT[self.split_index] utils.log("Reading modem scan data on arfcn range: (" + str(freq_split[0]) + \ ", " + str(freq_split[1]) + ")") # Grab the data blob from the modem modem_data['data_blob'] = self.run_at_command('surv_channel_range', \ freq_split[0] , freq_split[1]) # Now record the last split that was used modem_data['freq_low'] = freq_split[0] modem_data['freq_high'] = freq_split[1] # Update to the next split self.split_index = self.split_index + 1 utils.log("Done reading modem scan data.") return modem_data
def selection(self): travel_length_tuples_list = [(travel, travel.length) for travel in self.population] travel_length_tuples_list.sort(key=lambda tuple: tuple[1]) sorted_population_length = float(len(travel_length_tuples_list)) survivors_count = int(self.survives * sorted_population_length) self.population = [ travel for travel, _ in travel_length_tuples_list[:survivors_count] ] log("Ended selection, best {} elems survived!".format( len(self.population)))
def __disconnect(session_id: int, address: tuple) -> bytes: """ Closes session :param session_id: id of session to close :param address: address of the client :return: answer for the client """ answer = Datagram(Status.OK, Mode.DISCONNECT, session_id) utils.log('removed session: ' + str(session_id) + ' : ' + str(address)) return answer.get_bytes()
def mutation(self): counter = 0 new_populaion = [] for travel in self.population: if random_ratio() <= self.mutation_probability: new_travel = self._swap_random_points(travel) new_populaion.append(new_travel) counter += 1 else: new_populaion.append(travel) self.population = new_populaion log("Ended mutation! {} elements were mutated.".format(counter))
def __operation(self, session_id: int, operation: int, num_a: float, num_b: float) -> bytes: """ Makes requested calculations :param session_id: :param operation: id of rewuested operation :param num_a: number a :param num_b: number b :return: answer for the client """ utils.log('received call for ' + Operation.name_from_code(operation) + ' from session: ' + str(session_id)) answer = Datagram(Status.OK, Mode.OPERATION, session_id, operation, num_a, num_b) answer.result_id = self.next_result_id result: float try: if operation == Operation.POWER: result = num_a**num_b elif operation == Operation.LOG: result = log(num_b) / log(num_a) elif operation == Operation.GEO_MEAN: if num_a * num_b < 0: return self.__error(5, Mode.OPERATION, session_id, operation) result = sqrt(num_a * num_b) elif operation == Operation.BIN_COE: if num_b > num_a or num_a < 0 or num_b < 0: return self.__error(5, Mode.OPERATION, session_id, operation) result = factorial(num_a) / (factorial(num_a - num_b) * factorial(num_b)) except OverflowError: return self.__error(Error.MAX_VALUE_EXCEEDED, Mode.OPERATION, session_id, operation) if result == float('inf'): return self.__error(Error.MAX_VALUE_EXCEEDED, Mode.OPERATION, session_id, operation) self.results_storage[session_id][self.next_result_id] = \ (operation, num_a, num_b, session_id, result, self.next_result_id) self.next_result_id += 1 answer.result = result return answer.get_bytes()
def save_comment( request ): post_id = int( request.POST['post'] ) log(str(post_id)) log(str(request.POST['parent'])) try: post = BlogsPost.objects.get( pk = post_id ) except BlogsPost.DoesNotExist: raise Http404 # log(post) comment = '' if request.method == "POST": form = BlogsPostCommentForm( request.POST ) log(str(form)) if form.is_valid(): comment = form.save() log(comment) data = { 'status':'success', 'message':'', 'result':comment, } return data
def stop(self) -> None: """ Starts the server Sets ON flag to FALSE, which stops listening for new connections. Then safely closes all open sessions. """ self.on = False utils.log('stopping listening...') for session in self.sessions: # turn session off self.sessions[session] = False # wait for client to confirm disconnection session.join() utils.log('all sessions closed')
def category( request, id, slug = None, country = None, city = None, page = None ): id = int( id ) data = {} log( 'city', str( city ) ) try: data['category'] = category = CatalogCategory.objects.get( pk = id ) except CatalogCategory.DoesNotExist: raise Http404 if category.slug() != slug: return redirect( 'catalog-category', id = id, slug = category.slug(), permanent = True ) if country: country = int( country ) try: data['country'] = country = Country.objects.get( pk = country ) except Country.DoesNotExist: raise Http404 if city: city = int( city ) try: data['city'] = city = City.objects.get( pk = city ) except City.DoesNotExist: raise Http404 data['cities'] = City.objects.filter( country = country ) data['countries'] = get_countries( category ) data['categories'] = get_categories() data['posts'] = get_posts( category = category, country = country, city = city, page = page ) # data = { # 'category':category, # 'posts':posts, # 'categories':categories, # 'countries' : countries, # 'country':country, # 'city':city, # 'cities':cities, # } return render( request, 'catalog/category.html', data )
def run(self): self.authorize = ftpserver.DummyAuthorizer() self.authorize.add_anonymous(os.path.abspath(self.path)) ftpserver.log = logger ftpserver.logline = logger ftpserver.logerror = logger self.handler = ftpserver.FTPHandler self.handler.authorizer = self.authorize self.ftp = ftpserver.FTPServer(('', self.port), self.handler) log('FTP launched on port '+str(self.port)+'.') self.ftp.serve_forever()
def __connect(self, address: tuple) -> (bytes, int): """ Establish new session :param address: client address :return: (answer to the client , given session_id) """ # get id self.next_id_lock.acquire() given_id = self.next_id self.next_id += 1 self.next_id_lock.release() # prepare answer answer = Datagram(Status.OK, Mode.CONNECT, given_id) utils.log('new session: ' + str(given_id) + ' : ' + str(address[0])) return answer.get_bytes(), given_id
def __query_by_session_id(self, session_id: int, given_session_id: int, connection: socket) -> bytes: """ Gets all results of session :param session_id: id of session to look for :param given_session_id: id of session requesting query :param connection: connection socket :return: answer for the client """ utils.log('querying by session_id: ' + str(session_id) + ' for ' + str(given_session_id)) if session_id != given_session_id: return self.__error(Error.UNAUTHORISED, Mode.QUERY_BY_SESSION_ID, session_id) if session_id not in self.results_storage: return self.__error(Error.NOT_EXISTING_DATA, Mode.QUERY_BY_SESSION_ID) if not self.results_storage[session_id]: return self.__error(Error.NOT_EXISTING_DATA, Mode.QUERY_BY_SESSION_ID) results = self.results_storage[session_id] answer: List[Datagram] = list() for result_id, result in results.items(): answer.append( Datagram(Status.OK, Mode.QUERY_BY_SESSION_ID, session_id, operation=result[0], a=result[1], b=result[2], result=result[4], result_id=result_id, last=False)) # send all except last results to the client for i in range(0, len(answer) - 1): connection.sendall(answer[i].get_bytes()) # return last result to send answer[len(answer) - 1].last = True return answer[len(answer) - 1].get_bytes()
def initialize(self): self.start_time = datetime.now() log("Initializing...") log("Loading cities from: {} ...".format(self.dataset_filename)) self.cities = load_from_file(self.dataset_filename) log("Growing population...") while len(self.population) < self.population_size: new_travel = generate_travel(self.cities) self.population.append(new_travel) log("Ended initialization!")
def __error(code: int, mode: int = Mode.ERROR, session_id: int = 0, operation: int = 0) -> bytes: """ Returns error answer :param code: error code :param mode: mode in which error occurred :param session_id: session id in which error occurred :param operation: operation in which error occurred :return: error answer for the client """ utils.log( Error.name_from_code(code) + ' on session: ' + str(session_id) + ' mode: ' + Mode.name_from_code(mode), True) error = Datagram(Status.ERROR, mode, session_id, operation, a=code) return error.get_bytes()
def main(): mdb = mongo_db.Database("SensorDB", "Scan") pdb = postgres_db.Database(postgres_config.database, \ postgres_config.username, \ postgres_config.password, \ postgres_config.hostname, \ postgres_config.port) # Purging tables # Uncomment this if you want to delete all old data # in the tables (the tables must already be defined) #utils.log("Purging tables...") #pdb.purge_tables() # Go ahead and initialize the tables. No harm if they are # already initialized. utils.log("Initializing tables...") pdb.init_tables() # Grab the uuids that exist uuids = pdb.get_uuids() # Read in each of the scan objects scan_uuid_lst = [] i = 0 for (full_scan, uuid, version) in mdb.get_scans(uuids=uuids): i = i + 1 if i % 100 == 0: pprint("Scan number: {:d}".format(i)) # Actually append the scan to the full scan list scan_uuid_lst.append(( full_scan, uuid, version, )) if len(scan_uuid_lst) >= INSERT_NUM: pdb.insert_scans(scan_uuid_lst) scan_uuid_lst = [] if len(scan_uuid_lst) > 0: pdb.insert_scans(scan_uuid_lst)
def _find_tests(self): cwd = os.getcwd() os.chdir(self.path) script_cmds = self.find_tests_cmd.split("\n") for cmd in script_cmds: r = os.system("%s >> output.tmp 2>> output.tmp" % cmd) if r != 0: print "Error while running tests" break f = open("output.tmp", "r") if not f: log("Nie mozna otworzyc pliku wyjsciowego") f.close() lines = f.readlines() f.close() os.system("rm output.tmp") os.chdir(cwd) return [l.rstrip("\n") for l in lines]
def evaluate(self): log("Evaluating {}. generation...".format(self.generation)) population_to_evaluation = { travel: travel.length for travel in self.population } min_distance = min(population_to_evaluation.values()) max_distance = max(population_to_evaluation.values()) avg_distance = sum(population_to_evaluation.values()) / len( population_to_evaluation.values()) time = (datetime.now() - self.start_time).total_seconds() self.stats.update({ self.generation: { "min": min_distance, "max": max_distance, "avg": avg_distance, "time": time } }) log("Ended evaluation!")
def __send_datagram(self, datagram: Datagram) -> List[Datagram]: """ Sends data to the server :param datagram: data to send :return: list of answers """ self.socket.sendall(datagram.get_bytes()) answer = list() last = False # receive data until last flag is send to true while last is False: # get data answer_bin = self.socket.recv(DATAGRAM_SIZE) try: # decode data answer_data = Datagram.from_bytes(answer_bin) except (bitstring.ReadError, ValueError, TypeError) as e: # if data was unreadable utils.log('error reading datagram: ' + str(e), True) print('error reading datagram') else: # check last flag if answer_data.last: last = True # proceed received errors if answer_data.status == Status.ERROR: print('error on server: ' + Mode.name_from_code(answer_data.mode) + ' - ' + Error.name_from_code(answer_data.a)) elif answer_data.status == Status.REFUSED: print('server refused to ' + Mode.name_from_code(answer_data.mode) + ' reason: ' + Error.name_from_code(answer_data.a)) # add received data to answers answer.append(answer_data) return answer
def __is_alive(self) -> None: """ Checks if server is still available""" while self.connected: self.connected_lock.acquire() datagram = Datagram(Status.NEW, Mode.IS_ALIVE, self.session_id) answer: List[Datagram] try: answer = self.__send_datagram(datagram)[0] except (ConnectionAbortedError, ConnectionResetError): utils.log('server went down') self.connected = False if answer.status != Status.OK: utils.log('server rejected session') self.connected = False if not self.connected: print('press ENTER to exit') self.connected_lock.release() time.sleep(1)
def listen(self) -> None: """ Listens for incoming connections """ # create socket for handling connections s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # set socket timeout [makes possible to safely break listening] s.settimeout(1) # bind socket with server address s.bind((self.host, self.port)) # set maximal waiting queue s.listen(5) utils.log('listening on port ' + str(self.port)) # listen util user turns server off while self.on: try: # accept connection connection, address = s.accept() utils.log('connected by ' + str(address)) # create handler for new connection [see Handler definition below] handler = Handler(name="handler_for_" + str(address), server=self, connection=connection, address=address) # add new connection to sessions storage self.sessions[handler] = True # handle new session handler.start() except socket.timeout: pass utils.log('listening stopped')
def scan(database, gps_scanner, gsm_scanner): '''This runs one iteration of a scan. Currently, one scan iteration is a gps scan followed by a gsm scan followed by a gps scan. Then it inserts this data into the database. Args: database (Database) : The database connection object to do insertions gps_scanner (GpsScanner): This is an object that manages the gps connection. gsm_scanner (GsmScanner): This is the object that manages the gsm connection. ''' # Get data from GPS and modem utils.log("Collecting GPS and modem data...") gps_before = Gps_Scan(gps_scanner.scan()) utils.log_gps_time(gps_before.get_time(), gps_before.get_mode()) # Grap the gsm scan data and then parse it into as Scan object raw_gsm_data = gsm_scanner.scan() # Create a parser and parse the blob to make a Gsm_Scan parser = Telit_Modem_Parser() gsm_scan = parser.parse_scan(raw_gsm_data['data_blob']) # Now add the frequency range to the gsm_scan obj gsm_scan.set_freq_range(raw_gsm_data['freq_low'], raw_gsm_data['freq_high']) gps_after = Gps_Scan(gps_scanner.scan()) # Now gather the data into a scan object scan = Scan(gsm_scan, gps_before, gps_after, utils.get_sensor_name()) utils.log("Done collecting GPS and modem data.") # Actually insert the database points database.insert_sensor_point(scan)
def __query_by_result_id(self, session_id: int, given_session_id: int, result_id: int) -> bytes: """ Gets one result :param session_id: id of session to look for :param given_session_id: id of session requesting query :param result_id: id of result to look for :return: answer for the client """ utils.log('querying by result id: ' + str(result_id) + 'for ' + str(given_session_id)) if session_id != given_session_id: return self.__error(Error.UNAUTHORISED, Mode.QUERY_BY_SESSION_ID, session_id) if session_id not in self.results_storage: return self.__error(Error.NOT_EXISTING_DATA, Mode.QUERY_BY_SESSION_ID_CMD) session_results = self.results_storage[session_id] if result_id not in session_results: return self.__error(Error.UNAUTHORISED, Mode.QUERY_BY_RESULT_ID, session_id) answer = Datagram( Status.OK, Mode.QUERY_BY_RESULT_ID, session_id, operation=self.results_storage[session_id][result_id][0], a=self.results_storage[session_id][result_id][1], b=self.results_storage[session_id][result_id][2], result=self.results_storage[session_id][result_id][4], result_id=result_id, ) return answer.get_bytes()
def run(self): log("Started run!") self.best_travel = None self.iteration = 1 self.start_time = datetime.now() self.cities = load_from_file(self.dataset_filename) for travel_permutation in itertools.permutations( range(0, len(self.cities))): if self._seconds_since() > self.timeout: time = self._seconds_since() self.stats.update({ self.iteration: { "min": self.best_travel.length, "avg": self.best_travel.length, "max": self.best_travel.length, "time": time } }) break else: travel = Travel(travel_permutation, self.cities) if self.best_travel: if self.best_travel.length > travel.length: self.best_travel = travel time = self._seconds_since() self.stats.update({ self.iteration: { "min": travel.length, "avg": travel.length, "max": travel.length, "time": time } }) log("Found new best at {} iteration!".format( self.iteration)) log("Iteration {} stats: {}".format( self.iteration, self.stats.get(self.iteration))) self.iteration += 1 else: self.best_travel = travel log("Ended run! Took {0:.1f} seconds.".format(self._seconds_since()))
def run(self): log("Started run!") self._init_variables() self.initialize() self.evaluate() while self.generation <= self.max_generations and not self._is_timeout( ): self.selection() self.crossover() self.mutation() self.evaluate() log("Gen. {}. stats: {}".format(self.generation, self.stats.get(self.generation))) self.generation += 1 log("Ended run! Took {0:.1f} seconds.".format( (datetime.now() - self.start_time).total_seconds()))
def __connect(self) -> None: """ Connects to the server """ self.connected_lock.acquire() utils.log('connecting to : ' + self.host + ':' + str(self.port)) # connect to the server self.socket.connect((self.host, self.port)) # get session id datagram = Datagram(Status.NEW, Mode.CONNECT) answer = self.__send_datagram(datagram)[0] self.session_id = answer.session_id if answer.status == Status.OK: utils.log('connected to : ' + self.host + ':' + str(self.port)) self.connected = True else: utils.log(self.host + ':' + str(self.port) + ' refused to connect') self.connected = False self.connected_lock.release()
def __disconnect(self) -> None: """ Disconnects from the server """ self.connected_lock.acquire() utils.log('disconnecting from : ' + self.host + ':' + str(self.port)) # send disconnect request datagram = Datagram(Status.NEW, Mode.DISCONNECT, self.session_id) answer = self.__send_datagram(datagram)[0] if answer.status == Status.OK: utils.log('disconnected from : ' + self.host + ':' + str(self.port)) # close connection self.socket.close() self.connected = False else: utils.log( 'cannot disconnect from : ' + self.host + ':' + str(self.port) + ' error code: ' + str(answer.a), True) self.connected_lock.release()
def insert_mongo_points(self, mongo_dicts): # If the connection has a timeout then just keep trying. # If the database is down there is no point in collecting # data anyway. insertion_successful = False while not insertion_successful: try: # Finally insert the point and set a bool to leave the loop utils.log("Trying to write to the DB...") self.collection.insert_many(mongo_dicts) insertion_successful = True utils.log("Done writing to DB.") except Exception as e: exceptionType, exceptionValue, exceptionTraceback = sys.exc_info( ) traceback.print_exception(exceptionType, exceptionValue, exceptionTraceback, file=sys.stdout) utils.log("Error writing to DB: {}".format(e)) time.sleep(DB_INSERT_TIMEOUT)
from serwer.IRepository import IRepository from parsers.base_parser import BaseParser from common.utils import log from copy import deepcopy import common.dbutils as db import os import re #Load plugins to work with differend types of repositories lista = '/'+'/'.join(os.path.abspath(__file__).split('/')[1:-1])+'/plugins/' log('Loading repository plugins from '+ lista) for filename in os.listdir(lista): if re.match('rep_.+\.py$',filename): log('* '+filename) __import__('serwer.plugins.'+filename[:-3]) #Class used to communicate with system which stores data #about repositories defined by user class RepoManager: repTypes = IRepository.__subclasses__() repList = [] def convertRepository(self, toConvert): for rep in RepoManager.repTypes: if (rep().typ == toConvert.typ): converted = rep() converted.assign(toConvert.name, toConvert.url, toConvert.comment, toConvert.typ) if toConvert.Auth: converted.setAuth(toConvert.login, toConvert.password)
def start(self): if self.svndownload: log("Pobieram svn...") self.svn.download(self.repository, self.path) log("Svn pobrany.") else: log("Bez pobierania svn.") self._compile() self.jobs = self._find_tests() self.workers = [] tmpworker = None self.workersLock.acquire() self.serverCond.acquire() with open(self.workersFile) as f: for line in f: tmpworker = serverworker.serverworker(line, self.workerPort, self) if tmpworker.init(): self.workers.append(tmpworker) log("Worker " + line[:-1]) tmpworker.start() self.workersDone = len(self.workers) self.workersLock.release() if len(self.workers) == 0: log("Brak workerow do testowania.") return self.ftp.start() self.serverCond.wait() self.serverCond.release() log("Testy zakonczone.") self.ftp.close() self.close()
def run(self): #Send ping self.data = pakiet() self.data.typ = pakiet.PING self.data.msg = 'ping' self.buffer.send(self.data) #Recv ping self.data = self.buffer.read() if self.data.typ != pakiet.PING or self.data.msg != 'pong': #wyslij error print 'Bledna odpowiedz na ping!' self.close() return #pobieranie aplikacji testowanej przez workera self.data = pakiet() self.data.typ = pakiet.FTPDOWNLOAD self.data.port = 2222 # TODO: poprawic! self.data.msg = self.socket.getsockname()[0] self.buffer.send(self.data) self.data = self.buffer.read() if self.data.typ != pakiet.FTPDOWNLOAD or self.data.msg != 'ok': print 'Worker nie pobral FTP ', self.data.msg self.close() return self.ftpDownloaded = True log('Worker '+self.ip[:-1]+' pobral FTP') self.data = pakiet() self.data.typ = pakiet.BUILD self.data.msg = 'ant compile' self.buffer.send(self.data) self.data = self.buffer.read() if not self.data or self.data.typ != pakiet.BUILD: log('Blad podczas budowania') #self.close() #return rep = RepoManager() while True: self.server.workersLock.acquire() if len(self.server.jobs) > 0: job_file = self.server.jobs[0] job = self.server.run_test_cmd.replace('$$', job_file) self.server.jobs = self.server.jobs[1:] else: self.server.workersLock.release() self.server.serverCond.acquire() self.server.serverCond.notify() self.server.serverCond.release() break self.server.workersLock.release() try: results = self._test(job_file, job) except ValueError: print 'Blad podczas parsowania wyjscia' continue rep = RepoManager() rep.addResult(self.server.taskName, results) print 'WYNIKI TESTOW' print 'ILE: %d FAILURES: %d, ERRORS: %d LOG:' % (results.tests_count, results.failures, results.errors) print '/-------------\\' print results.log print '\\-------------/' #koniec pracy self.data = pakiet() self.data.typ = pakiet.EXIT self.buffer.send(self.data) self.close()
def command_install(self): script_common.command_install(self) # Fetch & install toolset.. utils.log( "Install toolset: %s"%(self.toolset) ) if self.toolset: self.command_install_toolset(self.toolset)
def command_install(self): script_common.command_install(self) # Fetch & install toolset.. utils.log("Install toolset: %s" % (self.toolset)) if self.toolset: self.command_install_toolset(self.toolset)
def handle_incoming_connection(self, connection: socket, address: tuple, handler: Thread) -> None: """ Handles session :param connection: socket with established cconnection :param address: address of client :param handler: handler object """ # create variable for storing id session_id = 0 # handle requests while self.sessions[handler]: try: # receive data data = connection.recv(DATAGRAM_SIZE) answer: Datagram = None # noinspection PyBroadException try: # decode data datagram = Datagram.from_bytes(data) # utils.log('received: ' + str(datagram)) answer: bytes if datagram.mode == Mode.CONNECT: answer, session_id = self.__connect(address) self.results_storage[session_id] = {} elif datagram.session_id == session_id: if datagram.mode == Mode.IS_ALIVE: answer = self.__is_alive(datagram.session_id, handler) elif datagram.mode == Mode.DISCONNECT: answer = self.__disconnect(datagram.session_id, address) self.sessions[handler] = False elif datagram.mode == Mode.OPERATION: answer = self.__operation(datagram.session_id, datagram.operation, datagram.a, datagram.b) elif datagram.mode == Mode.QUERY_BY_SESSION_ID: answer = self.__query_by_session_id( session_id, datagram.session_id, connection) elif datagram.mode == Mode.QUERY_BY_RESULT_ID: answer = self.__query_by_result_id( session_id, datagram.session_id, datagram.result_id) else: # if authorization didn't succeed answer = self.__error(Error.UNAUTHORISED) except (bitstring.ReadError, ValueError, TypeError) as e: # if data was unreadable utils.log("datagram exception: " + str(e), True) answer = self.__error(Error.CANNOT_READ_DATAGRAM, Mode.ERROR, session_id) except Exception as e: # if any other exception occurred utils.log("exception: " + str(e), True) answer = self.__error(Error.INTERNAL_SERVER_ERROR, Mode.ERROR, session_id) finally: # send answer to the client connection.sendall(answer) except (ConnectionAbortedError, ConnectionResetError): # if session was closed unsafely utils.log('breaking listening for session: ' + str(session_id)) self.sessions[handler] = False # after closing session safely close connection connection.close() utils.log('session closed: ' + str(session_id))
def log(self, logStr): utils.log("{0} {1}".format(self.logStrPrefix, logStr))
import sys from data.csv_reader import BatchReader from data.db import DB from common.utils import log BATCH_SIZE = 10000 # read command line args csvpath = sys.argv[1] dbpath = sys.argv[2] # read csv file and insert into db with DB(dbpath) as prices_db: prices_db.reset() log('[import] opening file..') with BatchReader(csvpath, batchSize=BATCH_SIZE) as reader: total = 0 log('[import] reading in {0} batches.. '.format(BATCH_SIZE)) for batch in reader: total += len(batch) # write to db when we have a big enough batch prices_db.insert_many(batch) log('[import] imported {0} rows'.format(total))
def close(self): log('Zamykam FTP') self.ftp.close_all()
traceback.print_exception(exceptionType, exceptionValue, exceptionTraceback, file=sys.stdout) # It is good to close the modem so that it should work right when the program is run. utils.log("Closing modem...") gsm_scanner.close() utils.log("Closed modem.") utils.log("End Scan: {:d}".format(i)) sys.exit(-1) # Sleep between scans time.sleep(SCAN_PAUSE) if __name__ == "__main__": if len(sys.argv) != 2: utils.log("Usage: ./survey <modem_tty>") sys.exit(-1) utils.log("#########################") utils.log("Beginning cellular survey.") utils.log("#########################") modem_tty = sys.argv[1] scan_loop(modem_tty)
def insert_scans(self, scan_uuids): '''This parses each of the scans and inserts them into the db''' utils.log("Inserting Scans into the database...") (gsm_measurement_tmp_id, bcch_measurement_tmp_id) = self.get_tmp_ids() cur = self.con.cursor() for (scan, uuid, version) in scan_uuids: # Create a gsm_scan_row gsm_scan = scan.get_gsm() freq_range = gsm_scan.get_freq_range() # freq_low, freq_high, error, jammed gsm_scan_row = (freq_range[0], freq_range[1], \ gsm_scan.get_error(), gsm_scan.get_jammed(),) # Create the before and after gps_scan_row gps_before = scan.get_gps_before() gps_after = scan.get_gps_after() def generate_gps_tuple(gps_data): '''Use this function to generate the gps tuples from the data dict''' return ( gps_data.get('mode', None), gps_data.get('time', None), gps_data.get('ept', None), gps_data.get('lat', None), gps_data.get('lon', None), gps_data.get('alt', None), gps_data.get('epx', None), gps_data.get('epy', None), gps_data.get('epv', None), gps_data.get('track', None), gps_data.get('speed', None), gps_data.get('climb', None), gps_data.get('epd', None), gps_data.get('eps', None), gps_data.get('edc', None), ) gps_before_row = generate_gps_tuple(gps_before.get_gps_data()) gps_after_row = generate_gps_tuple(gps_after.get_gps_data()) # Now insert the gsm_scan, gps_before_scan, and gps_after_scan cur.execute(GSM_SCAN_INSERT, gsm_scan_row) gsm_scan_id = cur.fetchone()[0] cur.execute(GPS_SCAN_INSERT, gps_before_row) gps_before_id = cur.fetchone()[0] cur.execute(GPS_SCAN_INSERT, gps_after_row) gps_after_id = cur.fetchone()[0] # Using the ids we just collected, insert the scan scan_row = (gsm_scan_id, gps_before_id, gps_after_id,\ scan.get_sensor_name(),uuid,version,scan.get_high_quality(),) cur.execute(SCAN_INSERT, scan_row) # This is to enable ids on bluk insertion on bcch_measurements and gsm_measurements gsm_measurement_rows = [] # This is a bit of a hack but we are inserting # {'row' : bcch_measurement_row, 'gsm_measurement_tmp_id' : gsm_measurement_tmp_id} # so that we can know what to reference bcch_measurement_rowlist_dicts = [] # Finally we need to store the arfcn list and bcch lists arfcn_list_rowlist_dicts = [] channel_list_rowlist_dicts = [] # Used as a place holder for the foreign key tmp_id = 0 # We need to know the range of gsm_measurement_ids to query for the inserted # points later gsm_measurement_tmp_id_low = gsm_measurement_tmp_id bcch_measurement_tmp_id_low = bcch_measurement_tmp_id for meas in gsm_scan.measurement_cursor(): gsm_measurement_row = (gsm_scan_id, meas.get_arfcn(), \ meas.get_rx_lev(), gsm_measurement_tmp_id) # Update the gsm_measurement_row gsm_measurement_rows.append(gsm_measurement_row) # If it is a bcch_measurement then insert those fields if isinstance(meas, Bcch_Measurement): (arfcns, num_arfcn) = meas.get_arfcn_lst() (channels, num_channels) = meas.get_channel_lst() bcch_data = meas.get_data() # Sadly this must be a list because tuples are immutable # Thus it has the name bcch_measurement_rowlist bcch_measurement_rowlist = [tmp_id, (bcch_data['bsic'] if bcch_data['bsic']\ is not None else "NULL"), (bcch_data['ber'] if bcch_data['ber']\ is not None else "NULL"), (bcch_data['mcc'] if bcch_data['mcc']\ is not None else "NULL"), (bcch_data['mnc'] if bcch_data['mnc']\ is not None else "NULL"), (bcch_data['lac'] if bcch_data['lac']\ is not None else "NULL"), (bcch_data['cell_id'] if bcch_data['cell_id']\ is not None else "NULL"), (bcch_data['cell_status'] if bcch_data['cell_status'] \ is not None else "NULL"), num_arfcn, num_channels, (bcch_data['pbcch'] if bcch_data['pbcch']\ is not None else "NULL"), (bcch_data['nom'] if bcch_data['nom']\ is not None else "NULL"), (bcch_data['rac'] if bcch_data['rac']\ is not None else "NULL"), (bcch_data['spgc'] if bcch_data['spgc']\ is not None else "NULL"), (bcch_data['pat'] if bcch_data['pat']\ is not None else "NULL"), (bcch_data['nco'] if bcch_data['nco']\ is not None else "NULL"), (bcch_data['t3168'] if bcch_data['t3168']\ is not None else "NULL"), (bcch_data['t3192'] if bcch_data['t3192']\ is not None else "NULL"), (bcch_data['drxmax'] if bcch_data['drxmax']\ is not None else "NULL"), (bcch_data['ctrl_ack'] if bcch_data['ctrl_ack']\ is not None else "NULL"), (bcch_data['bscvmax'] if bcch_data['bscvmax']\ is not None else "NULL"), (bcch_data['alpha'] if bcch_data['alpha']\ is not None else "NULL"), (bcch_data['pc_meas_ch'] if bcch_data['pc_meas_ch']\ is not None else "NULL"), (bcch_data['mstxpwr'] if bcch_data['mstxpwr']\ is not None else "NULL"), (bcch_data['rxaccmin'] if bcch_data['rxaccmin']\ is not None else "NULL"), (bcch_data['croffset'] if bcch_data['croffset']\ is not None else "NULL"), (bcch_data['penaltyt'] if bcch_data['penaltyt']\ is not None else "NULL"), (bcch_data['t3212'] if bcch_data['t3212']\ is not None else "NULL"), (bcch_data['crh'] if bcch_data['crh']\ is not None else "NULL"), bcch_measurement_tmp_id ] # We want to add the bcch_measurement_rowlist along side the # gsm_measurement_tmp_id so it is easly to look it up bcch_measurement_rowlist_dicts.append(\ {'rowlist' : bcch_measurement_rowlist, 'gsm_measurement_tmp_id' : gsm_measurement_tmp_id}) # Finally insert the arfcns and channels for arfcn in arfcns: arfcn_list_rowlist = [tmp_id, arfcn] arfcn_list_rowlist_dicts.append(\ {'rowlist' : arfcn_list_rowlist, 'bcch_measurement_tmp_id': bcch_measurement_tmp_id}) for channel in channels: channel_list_rowlist = [tmp_id, channel] channel_list_rowlist_dicts.append(\ {'rowlist' : channel_list_rowlist, 'bcch_measurement_tmp_id': bcch_measurement_tmp_id}) # Now update the bcch_measurement_tmp_id bcch_measurement_tmp_id += 1 # Now update the gsm_measurement_tmp_id gsm_measurement_tmp_id += 1 # Finally do the bulk inserts if there were measurements if len(gsm_measurement_rows) > 0: # First do an insert with the gsm_measurements gsm_measurement_query = GSM_MEASUREMENT_INSERT gsm_measurement_query += ','.join( [str(g) for g in gsm_measurement_rows]) gsm_measurement_query += ";" cur.execute(gsm_measurement_query) # Only if there is a bcch_measurement do we need to insert # into the bcch_measurement table if len(bcch_measurement_rowlist_dicts) > 0: # Grab the id associated with each gsm_measurement # Note that the gms_meaurement_tmp_id is one greater than # the value we are searching for. # Note: index 0: id and index 1: tmp_id cur.execute( '''Select GM.id, GM.tmp_id FROM Gsm_Measurement GM WHERE GM.tmp_id >= %s and GM.tmp_id < %s ''', ( gsm_measurement_tmp_id_low, gsm_measurement_tmp_id, )) # This is a dict with {tmp_id : real_id} gsm_measurement_id_lookup = {} for ids in cur.fetchall(): gsm_measurement_id_lookup[ids[1]] = ids[0] # Now actually update the bcch_measurement foreign_key rows bcch_measurement_rows = [] for bcch_meas_dict in bcch_measurement_rowlist_dicts: # Grab the real id tmp_id = bcch_meas_dict['gsm_measurement_tmp_id'] real_id = gsm_measurement_id_lookup[tmp_id] # Now grab the row and update the first entry to the real id bcch_meas_rowlist = bcch_meas_dict['rowlist'] bcch_meas_rowlist[0] = real_id bcch_measurement_rows.append(tuple(bcch_meas_rowlist)) bcch_measurement_query = BCCH_MEASUREMENT_INSERT bcch_measurement_query += ','.join( [str(b) for b in bcch_measurement_rows]) bcch_measurement_query += ";" # We need to replace "NULL" with NULL in the query string so we don't insert # the string "NULL" instead of an actual NULL nullified_bcch_measurement_query = bcch_measurement_query.replace( "'NULL'", "NULL") cur.execute(nullified_bcch_measurement_query) # We need to build our index lookup for the bcch measurements if len(arfcn_list_rowlist_dicts) > 0 or \ len(channel_list_rowlist_dicts) > 0: cur.execute( '''SELECT BM.id, BM.tmp_id FROM Bcch_Measurement BM WHERE BM.tmp_id >= %s and BM.tmp_id < %s ''', ( bcch_measurement_tmp_id_low, bcch_measurement_tmp_id, )) bcch_measurement_id_lookup = {} for ids in cur.fetchall(): bcch_measurement_id_lookup[ids[1]] = ids[0] if len(arfcn_list_rowlist_dicts) > 0: arfcn_list_rows = [] for arfcn_list_dict in arfcn_list_rowlist_dicts: tmp_id = arfcn_list_dict[ 'bcch_measurement_tmp_id'] real_id = bcch_measurement_id_lookup[tmp_id] arfcn_list_rowlist = arfcn_list_dict['rowlist'] arfcn_list_rowlist[0] = real_id arfcn_list_rows.append( tuple(arfcn_list_rowlist)) arfcn_list_query = ARFCN_LIST_INSERT arfcn_list_query += ','.join( [str(a) for a in arfcn_list_rows]) arfcn_list_query += ';' cur.execute(arfcn_list_query) if len(channel_list_rowlist_dicts) > 0: channel_list_rows = [] for channel_list_dict in channel_list_rowlist_dicts: tmp_id = channel_list_dict[ 'bcch_measurement_tmp_id'] real_id = bcch_measurement_id_lookup[tmp_id] channel_list_rowlist = channel_list_dict[ 'rowlist'] channel_list_rowlist[0] = real_id channel_list_rows.append( tuple(channel_list_rowlist)) channel_list_query = CHANNEL_LIST_INSERT channel_list_query += ','.join( [str(c) for c in channel_list_rows]) channel_list_query += ';' cur.execute(channel_list_query) # We want to commit the transaction on every scan that we insert. # Each scan should be inserted as an atomic operation. self.con.commit() utils.log("Done inserting Scans...")
def scan_loop(modem_tty): '''This endlessly loops taking gps and gsm scans and writing them to a db This function never terminates until the program stops or there is an error. ''' # This will create tables if needed (database, gps_scanner, gsm_scanner) = initialize(modem_tty) i = 0 while True: i = i + 1 # Perform the scan utils.log("Begin Scan: {:d}".format(i)) try: scan(database, gps_scanner, gsm_scanner) except Exception as e: utils.log("Exception in Scan...") utils.log(str(e)) # Grab the exception and then print it. exceptionType, exceptionValue, exceptionTraceback = sys.exc_info() traceback.print_exception(exceptionType, exceptionValue, exceptionTraceback, file=sys.stdout) # It is good to close the modem so that it should work right when the program is run. utils.log("Closing modem...") gsm_scanner.close() utils.log("Closed modem.") utils.log("End Scan: {:d}".format(i)) sys.exit(-1) # Sleep between scans time.sleep(SCAN_PAUSE)
def close(self): log("Koniec.")