def charex_all(onepass=False, force=False, dryrun=False, debug=False): """ Retrieve any record in the database, which doesn't have calculated characteristics by this charex.py yet, and pass them to charex() """ global config from lib.fileio import connect_database import time from lib.config import BeaconConfigParser config = BeaconConfigParser() conn = connect_database() while True: c = conn.cursor() # If specified 'force', even the record has characteristics parameters, # fetch any records for update. if force: cond = '' else: cond = 'WHERE char1_max_sn IS NULL' c.execute('''SELECT datetime, offset_ms, bfo_offset_hz FROM received %s ORDER BY datetime''' % (cond)) for row in c.fetchall(): try: sigdata, samplerate = read_sigdata(datetime_sec=row[0]) paramset = charex( sigdata, samplerate, row[1], row[2], debug=debug) if not dryrun: paramset.updatedb(conn, row[0]) except IOError as err: if err[1] == 'No such file or directory': if debug: pass # eprint('Signal file not found. Skipped') else: raise if onepass: break else: # For continuous passes, 'force fetch' is NOT required force = False # To let rest database, wait for a short time period time.sleep(0.5) conn.close()
def bayes_all(onepass=False, limit=1000, force=False, debug=False): """ Retrieve any record in the database, which doesn't have Bayesian Inference this bayes.py yet, and pass them to bayes() """ from lib.fileio import connect_database import time bi = BayesInference() conn = connect_database() while True: c = conn.cursor() cond = 'WHERE char1_max_sn IS NOT NULL' # If specified 'force', even the record has characteristics parameters, # fetch any records for update. if not force: cond += '\nAND bayes1_prob IS NULL' # XXX For testing purpose # cond += '\nAND datetime >= 1509580799' c.execute('''SELECT datetime, freq_khz, char1_max_sn, char1_best_pos_hz, char1_total_ct, char1_bg_pos_hz FROM received %s ORDER BY datetime LIMIT %d''' % (cond, limit)) n_rows = 0 for row in c.fetchall(): pprob = bayes(bi, row[0], row[1], row[2], row[3], row[4], row[5], debug=debug) n_rows += 1 c.execute('''UPDATE received SET bayes1_prob = ? WHERE datetime = ?''', ( pprob, row[0] )) conn.commit() if onepass and n_rows == 0: break else: # For continuous passes, 'force fetch' is NOT required force = False # To let rest database, wait for a short time period time.sleep(0.5) conn.close()
def register_db(datestr, timestr, mhz, ignore_err=False, debug=False): """ Register record information to database and return True. Return false if a duplicate record was found. """ from datetime import datetime from lib.config import BeaconConfigParser from lib.fileio import connect_database from lib.ibp import mhz_to_freq_khz from sqlite3 import IntegrityError # Convert datestr and timestr to seconds from epoch datetime_utc = datetime.strptime(datestr + ' ' + timestr, '%Y%m%d %H%M%S') seconds_from_epoch = int( (datetime_utc - datetime.utcfromtimestamp(0)).total_seconds()) if debug: print "seconds_from_epoch:", seconds_from_epoch if seconds_from_epoch % 10 != 0: raise Exception('seconds_from_epoch is not multiple of 10 seconds') # Obtain parameters from configuration config = BeaconConfigParser() conn = connect_database() c = conn.cursor() err_occurred = False try: c.execute( '''INSERT INTO received(datetime, offset_ms, freq_khz, bfo_offset_hz, recorder) VALUES(?,?,?,?,?)''', (seconds_from_epoch, config.getint('Migration', 'offset_ms'), mhz_to_freq_khz(mhz), config.getint('Migration', 'bfo_offset_hz'), config.get('Migration', 'recorder'))) conn.commit() except IntegrityError as err: if ignore_err and \ err[0] == 'UNIQUE constraint failed: received.datetime': err_occurred = True else: raise conn.close() return not err_occurred
def dumpdb(debug=False): import time conn = connect_database() c = conn.cursor() c.execute('''SELECT datetime, offset_ms, freq_khz, bfo_offset_hz, recorder FROM received ORDER BY datetime''') print 'UTC date time offset_ms freq_khz bfo_offset_hz recorder' print '-' * 79 for row in c.fetchall(): print '%s %9d %8d %13d %s' % (time.strftime( '%Y-%m-%d %H:%M:%S', time.gmtime( row[0])), row[1], row[2], row[3], row[4]) conn.close()
def init_db(destroy='no', preserve=False, debug=False): from lib.config import BeaconConfigParser import os if destroy != 'yes': raise Exception('Not accepted by "yes"') if not preserve: try: os.remove(BeaconConfigParser().getpath('Common', 'database')) except OSError as err: if err[1] != 'No such file or directory': raise conn = connect_database() c = conn.cursor() c.execute(SCHEMA_RECEIVED) conn.commit() conn.close() eprint('Database is initialized and set up.')
def register_db(datetime_sec): """ Register record information to database and return True. Return false if a duplicate record was found. """ from lib.fileio import connect_database conn = connect_database() c = conn.cursor() print 'register_db: %d' % (datetime_sec) c.execute( '''INSERT INTO received(datetime, offset_ms, freq_khz, bfo_offset_hz, recorder) VALUES(?,?,?,?,?)''', (datetime_sec, OFFSET_MS, datetime_sec_to_freq_khz(datetime_sec), config.getint('SignalRecorder', 'bfo_offset_hz'), config.get('SignalRecorder', 'recorder'))) conn.commit() conn.close()
def biashist_mig_all(ignore_err=False, debug=False): from lib.config import BeaconConfigParser from lib.fileio import connect_database from fnmatch import fnmatch import os dbdir = BeaconConfigParser().getpath('Migration', 'dbdir') recorder = BeaconConfigParser().get('Migration', 'recorder') offset_ms = BeaconConfigParser().getint('Migration', 'offset_ms') bfo_offset_hz = \ BeaconConfigParser().getint('Migration', 'bfo_offset_hz') conn = connect_database() for file in sorted(os.listdir(dbdir)): if fnmatch(file, 'ibprec_*.log'): if debug: print "Migrating", file biashist_mig_band(conn, recorder, offset_ms, bfo_offset_hz, os.path.join(dbdir, file), ignore_err=ignore_err) conn.close()
def cleanup(debug=False): """ Search signal files and remove it if it's old """ from datetime import datetime from lib.config import BeaconConfigParser from lib.fileio import connect_database import os import re config = BeaconConfigParser() files_path = config.getpath('Signal', 'dir') timelimit_sec = eval(config.get('Cleaner', 'timelimit_sec')) if debug: print 'timelimit_sec = %d' % (timelimit_sec) conn = connect_database() c = conn.cursor() for date_dir in os.listdir(files_path): if not re.match(r'[0-9]{8}$', date_dir): continue # Now found a date directory date_dir_path = os.path.join(files_path, date_dir) for file in os.listdir(date_dir_path): m = re.match(r'([0-9]{6})\.wav$', file) if not m: continue time_str = m.group(1) datetime_sec = int(( datetime.strptime(date_dir + ' ' + time_str, '%Y%m%d %H%M%S') - datetime.utcfromtimestamp(0)).total_seconds()) # print date_dir, time_str, datetime_sec c.execute( '''SELECT datetime FROM received WHERE datetime == ? AND char1_max_sn IS NOT NULL''', (datetime_sec, )) row = c.fetchone() # If the signal files hasn't have characteristics in database, # it must be skipped. if row is None: continue # If the file is too old, now we can remove the signal file # print row sec_diff = int( (datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds()) - datetime_sec # print sec_diff if sec_diff > timelimit_sec: rm_filename = os.path.join(files_path, date_dir, file) if debug: print 'Removing file %s' % (rm_filename) os.remove(rm_filename) # If the date directory is empty, remove the directory if os.listdir(date_dir_path) == []: if debug: print 'Removing directory %s' % (date_dir_path) os.rmdir(date_dir_path) return
def gen_graph(datestr, outfile_name, format=None, debug=False): """ Generate graph outfile_name can be StringIO. In the case, format must be 'PNG' or 'GIF' if outfile_name is string (or file name), format will be ignored """ from datetime import datetime, timedelta from lib.fileio import connect_database from lib.ibp import freq_khz_to_mhz, get_slot import re im = gd.image((lborder + rborder + cwidth * 96 - 1, \ tborder + bborder + 18 * (cheight * 5 + sskip) - sskip)) colidx = {} iminit(im, colidx) conn = connect_database() c = conn.cursor() def datetime_to_sec(t): return int((t - datetime.utcfromtimestamp(0)).total_seconds()) timefrom = datetime.strptime(datestr, '%Y%m%d') timeto = timefrom + timedelta(days=1) - timedelta(seconds=10) if debug: print timefrom, timeto print datetime_to_sec(timefrom), datetime_to_sec(timeto) c.execute( '''SELECT datetime, freq_khz, char1_max_sn, char1_best_pos_hz, bayes1_prob FROM received WHERE datetime >= ? AND datetime <= ? AND bayes1_prob IS NOT NULL''', (datetime_to_sec(timefrom), datetime_to_sec(timeto))) for row in c.fetchall(): if debug: print row tindex = (row[0] % (3600 * 24)) / (15 * 60) bindex = {14100: 4, 18110: 3, 21150: 2, 24930: 1, 28200: 0}[row[1]] band = freq_khz_to_mhz(row[1]) sindex = get_slot(row[0], band) sn = row[2] bias = float(row[3]) / band pp = row[4] found = (pp >= 0.5) if found: # print "found", pp imputmark(im, tindex, bindex, sindex, colidx[getindex(sn, pp, bias)]) else: imputmark(im, tindex, bindex, sindex, nosig) if type(outfile_name) is not str: import StringIO if format == 'PNG': writer = im.writePng elif format == 'GIF': writer = im.writeGif else: raise Exception('Unknown output file format') fimg = outfile_name writer(fimg) fimg.seek(0) else: if re.search('\.png$', outfile_name, flags=re.IGNORECASE): writer = im.writePng elif re.search('\.gif$', outfile_name, flags=re.IGNORECASE): writer = im.writeGif else: raise Exception('Unknown output file format') fimg = open(outfile_name, "wb") writer(fimg) fimg.close()
def biashist(datetime_sec, freq_khz): """ Return statistical information about the beacon transmitting station. By specifying received time (seconds from UNIX epoch) and received freq. in kHz, check database, and return average frequency bias (expected exact frequency in Hz) and standard deviation. """ from lib.ibp import Station, freq_khz_to_mhz from lib.fileio import connect_database if not hasattr(biashist, 'identify'): biashist.identify = Station().identify_station # Identify transmitting station by time and band timeslot_in_sched, effective_time_sec, station = \ biashist.identify(datetime_sec, freq_khz) # print '<<<', timeslot_in_sched, effective_time_sec, station # valid_sec is some days before the datetime_sec # Required not to obtain database records which are too old valid_sec = datetime_sec - VALID_THRU # print datetime_sec, valid_sec conn = connect_database() c = conn.cursor() # The following conditions came from Monitor-1's genhist() in # bin/extfeatures and also bayes/biashist c.execute('''SELECT datetime, freq_khz, char1_max_sn, char1_best_pos_hz, char1_total_ct, char1_bg_pos_hz FROM received WHERE datetime < ? AND datetime >= ? AND (char1_best_pos_hz - char1_bg_pos_hz) * (char1_best_pos_hz - char1_bg_pos_hz) > 4 AND char1_total_ct >= 3 ORDER BY datetime''', (datetime_sec, valid_sec)) # Search candidates and calculate statistics stat = BiasHistStatistics() for row in c.fetchall(): candidate_datetime = row[0] candidate_freq_khz = row[1] candidate_station = \ biashist.identify(candidate_datetime, candidate_freq_khz) # print '???', row, candidate_station # Filter stations. Listening station and stations on the database # must have the same transmitting time slot and station name (or # transmitter). if timeslot_in_sched != candidate_station[0]: # print "$$$ different slot" continue if effective_time_sec != candidate_station[1]: # print "$$$ different transmitter" continue # Now found a true candidate passed_sec = datetime_sec - candidate_datetime # print '!!!', passed_sec, row, candidate_station sn = row[2] bias = row[3] ct = row[4] stat.add_params(passed_sec, freq_khz, candidate_freq_khz, sn, bias, ct) # print stat.hist return stat.result()