def main(argv=sys.argv): if len(argv) != 5: print """ OBJ to evaluate top-k accuracy Usage %s k predict som Arguments k : int, estimate top-1 to top-K accuracy predict: file, generated by gap_predict som : file, som file. des : file, descriptors, same as input for gap_predict Attention 1. reports are based on SOMs involving only one atom: - considering all types of SOMs - exclude SOM type `6`(O-conjugation) """ % argv[0] sys.exit(1) des = load_des(argv[4]) #key=mol_name, value=[(atom,type),...] k = int(argv[1]) predict = load_predict( argv[2]) #key=name, value=[[site,y],...] which has been sorted actual_all, actual_no6 = load_som(argv[3], des) #key=name, value=[site1,site2,...] print "===report considering all types of SOMs except those with more than one atoms===" report(actual_all, predict, k) print "\n===report excluding SOM type 6 (O-conjugation) and more than one atoms===" report(actual_no6, predict, k)
def measure(self, time_ms): report("Counters.measure(%s)" % repr(time_ms)) # Stop counters if running. self.stop() # Specify time to count. self.setTime(time_ms) # Sample real time. t1 = time() # Start timer and counters. self.start() # Wait until time has elapsed. while self.isBusy(): sleep(0.1) # Sample real time. t2 = time() # Calculate and report real time elapsed. taken_ms = int((t2 - t1) * 1000) report("Counters.measure(%s) took %d ms" % (repr(time_ms), taken_ms)) # Read out counter values. counts = self.getCounts() # Stop counters. self.stop() self.setTime(0) return counts
def __init__(self, link, aSwitch, aBusy, aTime, aValues): report( "Counters(aSwitch=%s, aBusy=%s, aTime=%s, aValues=%s)" % (repr(aSwitch), repr(aBusy), repr(aTime), repr(aValues)) ) self.rSwitch = ToggleRegister(link, aSwitch) self.rBusy = ToggleRegister(link, aBusy) self.rTime = FloatRegister(link, aTime) self.rValues = [DwordRegister(link, aValue) for aValue in aValues]
def write(self, value): nvalue = long(value) for offset in self.offsets: part = nvalue & self.mask nvalue >>= self.eachWidth self.link.write(offset, part) assert nvalue == 0 nvalue = self.read() if nvalue != value: report('addresses [%s], wrote %ld, returned %ld' % (', '.join(map(str, self.offsets)), value, nvalue)) return nvalue
def report(self): json.dump(self.ip_all, open(os.path.join(config.OUTPUT_DIR, "ip_all.json"), "w")) json.dump( list(self.cdn_domain), open(os.path.join(config.OUTPUT_DIR, "cdn_domain.json"), "w")) json.dump( list(self.internal_domain), open(os.path.join(config.OUTPUT_DIR, "internal_domain.json"), "w")) with open(os.path.join(config.OUTPUT_DIR, 'domain_takeover.txt'), 'a') as f: f.write('\n'.join(self.takeover_domain).strip()) tools.report(self.ip_all, outname=config.REPORT_FILENAME)
def write(self, value): # Pack into bytes as 1 x 32 bit integer data = struct.pack('>I', value) # Unpack from bytes as 2 x 16 bit unsigned integers lo, hi = struct.unpack('>HH', data) # Write individual 16 bit unsigned integers self.link.write(self.offset + 0, lo) self.link.write(self.offset + 1, hi) # Verify written value nvalue = self.read() if nvalue != value: report('address %ld dword, wrote %ld, returned %ld' % (self.offset, value, nvalue)) return nvalue
def read(self): value = 0L shift = 0L for offset in self.offsets: part = self.link.read(offset) if (part & self.mask) != part: report('address %ld has value %ld too large for %ld bits' % (offset, part, self.eachWidth)) part <<= shift shift += self.eachWidth value |= part return value
def write(self, offset, value): # Adjust offset due to weirdness. offset -= 40000 # Create request. assert offset is not None assert offset >= 0 assert value is not None req = WriteSingleRegisterRequest(offset, value) # Execute and return response. res = self.conn.execute(req) assert res is not None assert res.value is not None nvalue = res.value if nvalue != value: report('address %d, wrote %d, returned %d' % (offset, value, nvalue)) return nvalue
def main(argv=sys.argv): if len(argv) < 9: print """ OBJ to evaluate top-k accuracy Usage %s [options] [options] -k int: estimate top-1 to top-K accuracy --pred file: the one generated by gap_predict --som file: each line should be `name\\tatom1\\tatom2...` --des file: descriptors, same as input for gap_predict --delta float: if given, samples whose X is out of range (<-delta or >1+delta) will not be considered. Attention 1. reports are based on SOMs involving only one atom: - considering all types of SOMs - exclude SOM type `6`(O-conjugation) """ % argv[0] sys.exit(1) k = None pred_file = None som_file = None des_file = None delta = None options, args = getopt(argv[1:], 'k:', ['pred=', 'som=', 'des=', 'delta=']) for opt, val in options: if opt == '-k': k = int(val) elif opt == '--pred': pred_file = val elif opt == '--som': som_file = val elif opt == '--des': des_file = val elif opt == '--delta': delta = float(val) assert delta >= 0 else: print "Error: invalid option", opt sys.exit(1) assert len(args) == 0 assert k is not None and pred_file is not None and som_file is not None and des_file is not None predict = load_predict( pred_file) #key=name, value=[[site,y],...] which has been sorted print "totally %d samples being predicted with atom contributions" % len( predict.keys()) des = load_des(des_file, predict.keys(), delta) #key=mol_name, value=[(atom,type),...] print "totally %d samples being loaded from %s" % (len( des.keys()), des_file) actual_all, actual_no6 = load_som(som_file, des) #key=name, value=[site1,site2,...] print "len(actual_all.keys()):", len(actual_all.keys()) print "len(actual_no6.keys()):", len(actual_no6.keys()) print "===report considering all types of SOMs except those with more than one atoms===" report(actual_all, predict, k) print "\n===report excluding SOM type 6 (O-conjugation) and more than one atoms===" report(actual_no6, predict, k)
cur.execute("SELECT * FROM domains WHERE cdn=0") rows = cur.fetchall() for row in rows: domain, ip, cname, cdn, internal = row if internal: internal_domain.add(domain) continue if not ip_all.get(ip, None): ip_all[ip] = {'domain': [], 'ports': [], 'service': []} if domain not in ip_all[ip]['domain']: ip_all[ip]['domain'].append(domain) conn.close() conn = helper.get_ports_conn() cur = conn.cursor() cur.execute("SELECT * FROM open") rows = cur.fetchall() for row in rows: ip, port, service, comment = row ip_all[ip]['ports'].append(port) ip_all[ip]['service'].append(service) conn.close() json.dump(ip_all, open(os.path.join(config.OUTPUT_DIR, "ip_all.json"), "w")) json.dump(list(cdn_domain), open(os.path.join(config.OUTPUT_DIR, "cdn_domain.json"), "w")) json.dump(list(internal_domain), open(os.path.join(config.OUTPUT_DIR, "internal_domain.json"), "w")) tools.report(ip_all, outname=config.REPORT_FILENAME)
def start(self): report("Counters.start()") self.rSwitch.write(True)
def __init__(self, link, aValue): report("Analog(aValue=%s)" % repr(aValue)) self.rValue = FloatRegister(link, aValue)
def setTime(self, time_ms): report("Counters.setTime(%s)" % repr(time_ms)) self.rTime.write(float(int(time_ms)))
def disable(self): report("Relay.disable()") self.rSwitch.write(False)
def stop(self): report("Counters.stop()") self.rSwitch.write(False)
def enable(self): report("Relay.enable()") self.rSwitch.write(True)
def __init__(self, link, aSwitch): report("Relay(aSwitch=%s)" % repr(aSwitch)) self.rSwitch = ToggleRegister(link, aSwitch)
def isBusy(self): done = self.rBusy.read() report("Counters.isBusy() -> %s" % repr(done)) return done
import tools def parse(): try: year = int(sys.argv[1]) except: year = 2018 return year @tools.chrono def read(name): return pd.read_csv(f"data/{name}.csv", sep=" ", encoding="ISO-8859-1") def main(): year = parse() df = read(f"consommation_{year}") # print("\n".join(df.columns)) print(df.describe()['Consommation']) if __name__ == "__main__": try: main() finally: times = tools.Clock.report() message = "{key:<20} (x{value[0]:<5}): {value[1]:.3f} s" tools.report("Clock", times, message)
def set(self, value): report("Analog.set(%s)" % repr(value)) self.rValue.write(float(value))
return g if __name__ == '__main__': freeze_support() pool = Pool(processes=qtd_process) population = Population(table, spots, candidates, list(chromosomes), pool=pool, max_cs_size=maxi, workers=qtd_process) population = iterate(population) result_folder = f"Result date: {datetime.now().strftime('%Y-%m-%d__%H_%M_%S')}" os.mkdir(result_folder) os.chdir(result_folder) for i, cs in enumerate(population.chromosomes): spot_descr=[] descr = '' for x in raw_table[1:]: if '!' in x[0]: descr = x[0].split('!')[1] spot_descr.append(descr) with open(f'Schedule {i + 1} (Fitness:{cs.fitness:.2f}).txt', 'w') as output: tools.report(cs, table, spots, candidates, f=output, spot_descr=spot_descr)
def getCounts(self): counts = [rValue.read() for rValue in self.rValues] report("Counters.getCounts() -> %s" % repr(counts)) return counts