def genSim(verifyMethod, dut_cl, data, *args, **kwargs): """ Generates a Simulation Object """ dut = traceSignals(processor, *dut_cl.args, **dut_cl.kwargs) if kwargs.get( 'trace', False) else processor(*dut_cl.args, **dut_cl.kwargs) @always(delay(kwargs.get('clkfreq', 1))) def clkGen(): #time.sleep(0.05) dut_cl.clk.next = not dut_cl.clk @instance def stimulus(): dut_cl.reset.next = False yield delay(3 * kwargs.get('clkfreq', 1)) dut_cl.reset.next = True yield verifyMethod(dut_cl, dut) raise StopSimulation rom = pseudorom(dut_cl.romrden, dut_cl.memoryaddr, dut_cl.memoryin, mem=data) ram = pseudoram(dut_cl.clk, dut_cl.ramwren, dut_cl.ramrden, dut_cl.memoryaddr, dut_cl.memoryout, dut_cl.memoryin) ff = fifo(dut_cl.clk, dut_cl.fData, dut_cl.fre, dut_cl.fwe, dut_cl.fempty, dut_cl.ffull, dut_cl.fQ) return Simulation(dut, clkGen, stimulus, rom, ram, ff, *args)
def slot_test_Classifiers_on_testset(self): if False == self.processor: self.processor = processor.processor(self.config) self.slot_save() #save data first so the processor can load it: self.processor.load_data(self.current_dataset.id) self.processor.train_and_save_Classifiers() self.processor.test_classifiers_on_testset()
def slot_display_intensity(self): if self.display_mode != 'intensities': if False == self.processor: self.processor = processor.processor(self.config) #reset ground plane: self.current_dataset.ground_plane_normal = '' self.current_dataset.ground_plane_three_points = '' self.slot_save() self.processor.load_data(self.current_dataset.id) self.processor.process_intensities() filename = self.processor.save_intensity_image( self.current_dataset.id) #self.processor.display_intensities() self.display_mode = 'intensities' self.draw_widget.set_image(filename) else: #display normal image self.display_mode = 'image' self.draw_widget.set_image(self.scans_database.get_path() + '/' + self.current_dataset.image_filename)
def save_processor(self, master, name, speed): procid = self.generate_processor_id() proc = processor(procid, name.get(), speed.get()) Processors.add_processor(proc) self.tree.insert("", Processors.get_len()-1, values=(Processors.processors[Processors.get_len()-1].id,Processors.processors[Processors.get_len()-1].Name, Processors.processors[Processors.get_len()-1].Speed)) master.destroy()
def slot_generate_save_features(self): if False == self.processor: self.processor = processor.processor(self.config) #save data first so the processor can load it: self.slot_save() self.processor.load_data(self.current_dataset.id) self.processor.generate_save_features()
def main(): file = open('Tetris.gb', 'r') tetris = file.read() gb_memory = processor.memory(0xFFFF, tetris) gb_screen = screen.screen(gb_memory) gb_processor = processor.processor(tetris, gb_screen, gb_memory) gb_screen.start() gb_processor.power_on()
def testing_city_finder(): path = os.path.join(os.path.dirname(__file__), 'sample_transcripts', 'out-example-2021-02-01-hansard-plenary.txt') city_finder = processor() workflow = city_finder.build_workflow(path) report = get(workflow, 'output') with open('./tests/test_report.json', 'r') as json_file: expected_report = json.load(json_file) compiled_report = report.compile()
def slot_display_masks(self): if False == self.processor: self.processor = processor.processor(self.config) self.processor.load_data(self.current_dataset.id) if self.display_mode != 'labels': self.processor.process_masks(self.display_3d_type) self.display_mode = 'labels' filename = self.processor.save_masks_image(self.display_3d_type) #saves pic in results else: self.processor.process_masks(self.display_3d_type, True) #show clutter mask NOT placement mask self.display_mode = 'image' filename = self.processor.save_masks_image(self.display_3d_type, True) #saves pic in results self.draw_widget.set_image(filename) #loads picture saved previously
def output(self, code, inp_range_start, inp_range_end, loopback_mode=False): amp_inputs = permutations(range(inp_range_start, inp_range_end + 1)) thruster_out_max = 0 for combos in amp_inputs: amp_input = 0 for phase in combos: amp_out = processor.processor(code, amp_input, phase) amp_input = amp_out while loopback_mode: amp_out_loop = processor.processor(code, amp_input, phase) if amp_out_loop is None: break amp_input = amp_out_loop amp_out = amp_out_loop if amp_out > thruster_out_max: thruster_out_max = amp_out combo_out = combos return thruster_out_max, combo_out
def slot_take_artag_image(self): if False == self.scanner: self.scanner = scanner.scanner(self.config) if False == self.processor: self.processor = processor.processor(self.config) img = self.scanner.take_artag_image() self.current_dataset.image_artag_filename = self.scanner.save_artag_image(self.current_dataset.id) self.slot_save() #save for consistency with files if self.processor.read_artag(img).any(): print "SUCCESS in reading ARTag" else: print "FAILURE in reading ARTag - try again!"
def slot_display_labels(self): if self.display_mode != 'labels': if False == self.processor: self.processor = processor.processor(self.config) self.processor.load_data(self.current_dataset.id) self.processor.process_labels(self.display_3d_type) filename = self.processor.save_labels_image(self.display_3d_type) self.draw_widget.set_image(filename) self.display_mode = 'labels' else: #display normal image self.draw_widget.set_image(self.scans_database.get_path() + '/' + self.current_dataset.image_filename) self.display_mode = 'image'
def slot_display_features(self): if self.display_mode != 'features': if False == self.processor: self.processor = processor.processor(self.config) self.processor.load_data(self.current_dataset.id) self.processor.process_intensities() filename = self.processor.save_intensity_image(self.current_dataset.id) self.display_mode = 'features' self.draw_widget.set_image(filename) else: #display normal image self.display_mode = 'image' self.draw_widget.set_image(self.scans_database.get_path() + '/' + self.current_dataset.image_filename)
def slot_take_artag_image(self): if False == self.scanner: self.scanner = scanner.scanner(self.config) if False == self.processor: self.processor = processor.processor(self.config) img = self.scanner.take_artag_image() self.current_dataset.image_artag_filename = self.scanner.save_artag_image( self.current_dataset.id) self.slot_save() #save for consistency with files if self.processor.read_artag(img).any(): print "SUCCESS in reading ARTag" else: print "FAILURE in reading ARTag - try again!"
def slot_display_features(self): if self.display_mode != 'features': if False == self.processor: self.processor = processor.processor(self.config) self.processor.load_data(self.current_dataset.id) self.processor.process_intensities() filename = self.processor.save_intensity_image( self.current_dataset.id) self.display_mode = 'features' self.draw_widget.set_image(filename) else: #display normal image self.display_mode = 'image' self.draw_widget.set_image(self.scans_database.get_path() + '/' + self.current_dataset.image_filename)
def slot_display_masks(self): if False == self.processor: self.processor = processor.processor(self.config) self.processor.load_data(self.current_dataset.id) if self.display_mode != 'labels': self.processor.process_masks(self.display_3d_type) self.display_mode = 'labels' filename = self.processor.save_masks_image( self.display_3d_type) #saves pic in results else: self.processor.process_masks( self.display_3d_type, True) #show clutter mask NOT placement mask self.display_mode = 'image' filename = self.processor.save_masks_image( self.display_3d_type, True) #saves pic in results self.draw_widget.set_image(filename) #loads picture saved previously
def slot_display_3d(self, spheres=False): print 'Inside slot_display_3d' if False == self.processor: self.processor = processor.processor(self.config) #save data first so the processor can load it: print 'Before slot_save' self.slot_save() print 'Before load_data' self.processor.load_data(self.current_dataset.id) #self.processor.create_polygon_images() print 'Before process_raw_data' self.processor.process_raw_data() #pc.save_mapped_image(name) print 'Before display_3d' self.processor.display_3d(self.display_3d_type, spheres) print 'After display_3d'
def slot_display_3d(self, spheres = False): print 'Inside slot_display_3d' if False == self.processor: self.processor = processor.processor(self.config) #save data first so the processor can load it: print 'Before slot_save' self.slot_save() print 'Before load_data' self.processor.load_data(self.current_dataset.id) #self.processor.create_polygon_images() print 'Before process_raw_data' self.processor.process_raw_data() #pc.save_mapped_image(name) print 'Before display_3d' self.processor.display_3d(self.display_3d_type, spheres) print 'After display_3d'
def slot_display_intensity(self): if self.display_mode != 'intensities': if False == self.processor: self.processor = processor.processor(self.config) #reset ground plane: self.current_dataset.ground_plane_normal = '' self.current_dataset.ground_plane_three_points = '' self.slot_save() self.processor.load_data(self.current_dataset.id) self.processor.process_intensities() filename = self.processor.save_intensity_image(self.current_dataset.id) #self.processor.display_intensities() self.display_mode = 'intensities' self.draw_widget.set_image(filename) else: #display normal image self.display_mode = 'image' self.draw_widget.set_image(self.scans_database.get_path() + '/' + self.current_dataset.image_filename)
def slot_take_scan(self): #save database, let scanner add dataset, reload it then self.slot_save() if False == self.scanner: self.scanner = scanner.scanner(self.config) if False == self.processor: self.processor = processor.processor(self.config) name = ut.formatted_time() self.scanner.capture_and_save(name) #self.processor.load_raw_data(name) #self.processor.load_metadata(name) #self.processor.process_raw_data() #self.processor.save_mapped_image(name) #self.processor.display_all_data() print 'scan ' + name + ' taken' self.scans_database.load(self.path,'database.pkl') #proceed to new scan: while True == self.slot_next_dataset(): pass
def testPlanePointcloud(): import processor import configuration cfg = configuration.configuration(TEST_FOLDER) #sc = scanner.scanner(cfg) pc = processor.processor(cfg) #pc.load_data('2009Oct30_162400') pc.load_data('2009Nov04_141226') pc.process_raw_data() debug = False model = PlaneLeastSquaresModel(debug) data = np.asarray(pc.pts3d_bound).T # run RANSAC algorithm ransac_fit, ransac_data = ransac(data,model, 3, 1000, 0.02, 300, # misc. parameters debug=debug,return_all=True) print ransac_fit print ransac_data print 'len inlier',len(ransac_data['inliers']),'shape pts',np.shape(pc.pts3d_bound) pc.pts3d_bound = pc.pts3d_bound[:,ransac_data['inliers']] pc.display_3d('height')
def slot_take_scan(self): #save database, let scanner add dataset, reload it then self.slot_save() if False == self.scanner: self.scanner = scanner.scanner(self.config) if False == self.processor: self.processor = processor.processor(self.config) name = ut.formatted_time() self.scanner.capture_and_save(name) #self.processor.load_raw_data(name) #self.processor.load_metadata(name) #self.processor.process_raw_data() #self.processor.save_mapped_image(name) #self.processor.display_all_data() print 'scan ' + name + ' taken' self.scans_database.load(self.path, 'database.pkl') #proceed to new scan: while True == self.slot_next_dataset(): pass
valid=testex.runExtract() if valid=="ok": testex.getData() paths.extend(testex.getPaths()) else: pass soft=test.getSoftware() raw=test.getRaw() respack=[filename,users,paths,soft,raw] else: pass #An error in the parsing process else: pass all.append(respack) proc=processor.processor(all) userlist=proc.sort_users() softlist=proc.sort_software() pathlist=proc.sort_paths() try: save = writehtml(userlist,softlist,pathlist,all,outhtml,dir) except: print "Error creating the file" print "\n[+] List of users found:" print "--------------------" for x in userlist: print x print "\n[+] List of software found:" print "-----------------------" for x in softlist: print x
from processor import processor if __name__ == '__main__': processor()
def slot_load_Classifiers(self): if False == self.processor: self.processor = processor.processor(self.config) self.processor.load_Classifiers()
#!/usr/bin/env python import processor as proc import net as net import sys x = proc.processor("tims", "tlbl") nn = net.net(x) nn.load_state(sys.argv[1]) nn.train(sys.argv[2]) nn.save_state(sys.argv[1])
argparser.add_argument("--start_epoch", type=int, default=80, help='epoch') argparser.add_argument("--conv", type=int, default=1, help='') argparser.add_argument("--model", type=int, default=1, help='') argparser.add_argument("--clayer", type=int, default=1, help='') argparser.add_argument("--rank", type=int, default=10, help='') argparser.add_argument("--head", type=int, default=4, help='') args = argparser.parse_args() print args assert os.path.isfile(args.train) assert os.path.isfile(args.test) assert args.save and args.pre_emb train_processor = processor(args.train) test_processor = processor(args.test) print "Load data..." train_cors = train_processor.loadSrc() test_cors = test_processor.loadSrc() print 'Constructing word and character list...' word_lis = words_load(train_cors, test_cors) char_lis = chars_load(word_lis) char_lis.append('<unk>') rel_lis = rels_load(train_cors, test_cors) rel_lis.append('<unk>') print 'Find ' + str(len(word_lis)) + ' unique words!' print 'Find ' + str(len(char_lis)) + ' unique chars!' print 'Find ' + str(len(rel_lis)) + ' unique dep relations!'
print "[x] Error in the parsing process" # A error in the parsing process if filetype == "docx" or filetype == "pdf": res = test.getTexts() if res == "ok": email = test.getEmails() for x in email: emails.append(x) else: failedfiles(filename + ":" + str(res)) else: print "pass" else: pass print "processing" proc = processor.processor(all) userlist = proc.sort_users() softlist = proc.sort_software() pathlist = proc.sort_paths() try: html = htmlExport.htmlExport(userlist, softlist, pathlist, all, outhtml, dir, failedfiles, word, emails) save = html.writehtml() except Exception, e: print e print "Error creating the file" print "\n[+] List of users found:" print "--------------------------" for x in userlist: print x
#!/usr/bin/python import sys import math from processor import processor from cache import Cache file1 = open('log_p1', 'w') file2 = open('log_p2', 'w') #Processors 1 and 2 P1 = processor() P2 = processor() #Cache L1 to P1, L2 to P2 and L3 shared b_size = 16 L_size = 8*1024 L3_size = 64*1024 L1 = Cache(8) L2 = Cache(8) L3 = Cache(64) index = int(math.log(float(L_size/b_size),2)) indexL3 = int(math.log(float(L3_size/b_size),2)) mask = 0 maskL3 = 0 for i in range(index): mask = 1 + mask*2 for x in range(indexL3):
def show_image(window_name, img, wait=False): hg.cvStartWindowThread() RESIZABLE = 0 hg.cvNamedWindow(window_name, RESIZABLE) hg.cvShowImage(window_name, img) if wait: print 'show_image: press any key to continue..' cv.highgui.cvWaitKey() #------------------------------------ ###CHANGE THIS TO THE DIRECTORY WITH RESULTS FROM CODY: ### change 'codyRobot' to 'dummyScanner' or code tries to find camera drivers, etc. cfg = configuration.configuration(DATA_LOCATION, ROBOT) #'dummyScanner' pc = processor.processor(cfg) pc.features_k_nearest_neighbors = None # Name of dataset will allow loading of saved image / scan. pc.scan_dataset = create_default_scan_dataset( DATASET_ID, z_above_floor)#1.32 ''' PC.LOAD_RAW_DATA() Browses for content with the following names: data/UNIQUE_ID_image.png data/UNIQUE_ID_laserscans.pkl self.image_angle is set or zeroed Sets in pc as: self.img, self.laserscans, sets (or zeros) self.image_angle
def slot_display_global_stats(self): if False == self.processor: self.processor = processor.processor(self.config) self.processor.load_data(self.current_dataset.id) self.processor.display_stats(True)
def __init__(self, start_items): super(scheduler, self).__init__(start_items) self.fetcher = fetcher() self.processor = processor() self.storer = storer()
#!/usr/bin/python import sys import math from processor import processor from cache import Cache file1 = open('log_p1', 'w') file2 = open('log_p2', 'w') #Processors 1 and 2 P1 = processor() P2 = processor() #Cache L1 to P1, L2 to P2 and L3 shared b_size = 16 L_size = 8 * 1024 L3_size = 64 * 1024 L1 = Cache(8) L2 = Cache(8) L3 = Cache(64) index = int(math.log(float(L_size / b_size), 2)) indexL3 = int(math.log(float(L3_size / b_size), 2)) mask = 0 maskL3 = 0 for i in range(index): mask = 1 + mask * 2 for x in range(indexL3):
def loopback(self, code, amp_input): return processor.processor(phase, amp_input, code)
def doprocess(argv): filelimit = 50 word = "local" localanalysis = "no" failedfiles = [] emails = [] if len(sys.argv) < 3: usage() try: opts, args = getopt.getopt(argv, "l:d:f:h:n:t:o:") except getopt.GetoptError: usage() for opt, arg in opts: if opt == '-d': word = arg elif opt == '-t': filetypes = [] if arg.count(",") != 0: filetypes = arg.split(",") else: filetypes.append(arg) print filetypes elif opt == '-l': limit = int(arg) elif opt == '-h': localanalysis = arg elif opt == '-n': filelimit = int(arg) elif opt == '-o': dir = arg elif opt == '-f': outhtml = arg if os.path.exists(dir): pass else: os.mkdir(dir) if localanalysis == "no": print "\n[-] Starting online search..." for filetype in filetypes: print "\n[-] Searching for " + filetype + " files, with a limit of " + str( limit) search = googlesearch.search_google(word, limit, start, filetype) search.process_files() files = search.get_files() print "Results: " + str(len(files)) + " files found" print "Starting to download " + str(filelimit) + " of them:" print "----------------------------------------\n" counter = 1 for x in files: if counter <= filelimit: print "[" + str(counter) + "/" + str(filelimit) + "] " + x getfile = downloader.downloader(x, dir) getfile.down() filename = getfile.name() if filename != "": if filetype == "pdf": test = metadataPDF.metapdf(dir + "/" + filename, password) elif filetype == "doc" or filetype == "ppt" or filetype == "xls": test = metadataMSOffice.metaMs2k(dir + "/" + filename) if os.name == "posix": testex = metadataExtractor.metaExtractor( dir + "/" + filename) elif filetype == "docx" or filetype == "pptx" or filetype == "xlsx": test = metadataMSOfficeXML.metaInfoMS(dir + "/" + filename) res = test.getData() if res == "ok": raw = test.getRaw() users = test.getUsers() paths = test.getPaths() soft = test.getSoftware() email = [] if filetype == "pdf" or filetype == "docx": res = test.getTexts() if res == "ok": email = test.getEmails() for em in email: emails.append(em) else: email = [] failedfiles.append(x + ":" + str(res)) respack = [x, users, paths, soft, raw, email] all.append(respack) else: failedfiles.append(x + ":" + str(res)) print "\t [x] Error in the parsing process" #A error in the parsing process else: pass counter += 1 else: print "[-] Starting local analysis in directory " + dir dirList = os.listdir(dir) print dirList for filename in dirList: if filename != "": filetype = str(filename.split(".")[-1]) if filetype == "pdf": test = metadataPDF.metapdf(dir + "/" + filename, password) elif filetype == "doc" or filetype == "ppt" or filetype == "xls": print "doc" test = metadataMSOffice.metaMs2k(dir + "/" + filename) if os.name == "posix": testex = metadataExtractor.metaExtractor(dir + "/" + filename) elif filetype == "docx" or filetype == "pptx" or filetype == "xlsx": test = metadataMSOfficeXML.metaInfoMS(dir + "/" + filename) res = test.getData() if res == "ok": raw = test.getRaw() users = test.getUsers() paths = test.getPaths() soft = test.getSoftware() if (filetype == "doc" or filetype == "xls" or filetype == "ppt") and os.name == "posix": testex.runExtract() testex.getData() paths.extend(testex.getPaths()) respack = [filename, users, paths, soft, raw, email] all.append(respack) else: failedfiles.append(filename + ":" + str(res)) print "[x] Error in the parsing process" # A error in the parsing process if filetype == "docx" or filetype == "pdf": res = test.getTexts() if res == "ok": email = test.getEmails() for x in email: emails.append(x) else: failedfiles(filename + ":" + str(res)) else: print "pass" else: pass print "processing" proc = processor.processor(all) userlist = proc.sort_users() softlist = proc.sort_software() pathlist = proc.sort_paths() try: html = htmlExport.htmlExport(userlist, softlist, pathlist, all, outhtml, dir, failedfiles, word, emails) save = html.writehtml() except Exception, e: print e print "Error creating the file"
def main(args): INTERACTIVE = False if '-i' in args[0]: args = args[1:] INTERACTIVE = True elif '-q' in args[0]: args = args[1:] # ============================================================================================ # INIT STUFF # ============================================================================================ pid_0 = 'p0' pid_1 = 'p1' pid_2 = 'p2' pid_3 = 'p3' # get the traces for all processors p0_trace = parse(str(args[0]), pid_0) p1_trace = parse(str(args[1]), pid_1) p2_trace = parse(str(args[2]), pid_2) p3_trace = parse(str(args[3]), pid_3) # init all the processors with pids ps = {} ps[pid_0] = processor(pid_0) ps[pid_1] = processor(pid_1) ps[pid_2] = processor(pid_2) ps[pid_3] = processor(pid_3) # sort the traces by timestamps then by pid all_traces = p0_trace + p1_trace + p2_trace + p3_trace all_traces.sort(key=lambda t: (t[1], t[0])) # each cycle has the form (pid, timestamp, read/write(1/0), tag (int), index(int), offset(int)) # ============================================================================================ # BUS STUFF # ============================================================================================ for cycle in all_traces: # for debugging if INTERACTIVE: input('Enter for next cycle') ## STEP ONE: GET PROCESSOR STATES AND CHANGE EXEC PROCESSOR # make it easier to read c_pid, _, io, c_tag, c_index, c_offset = cycle # get the states of all the processors shared = False get_from_pid = None get_from_state = None states = get_states(ps, c_index, c_tag) for pid in states: state = states[pid] if state != 'i' and pid != c_pid: get_from_pid, get_from_state = high_priority( get_from_pid, get_from_state, pid, state) shared = True # have the processor say what action it needs done bus_action = ps[c_pid].execute(io, c_tag, c_index, c_offset) ## STEP 2: if shared someone has the data and reading ## always update our current state first ps[c_pid].change_state_rw(io, c_index, shared, c_tag) if bus_action is None: continue if shared: c2c_transfers[get_from_pid][c_pid] += 1 # update all of the other processors for pid in ps: if pid != c_pid: ps[pid].change_state_bus(bus_action, c_index, c_tag) # ============================================================================================ # END BUS STUFF # ============================================================================================ # PRINT STATS print('Cache to Cache transfers') for pid in ps: print('{} '.format(pid), end='') for p2 in c2c_transfers[pid]: print('\t<{}-{}>: {}'.format(pid, p2, c2c_transfers[pid][p2]), end='') print('') print('\nInvalidation count') for pid in ps: print('{} \tm: {} \to: {} \te: {} \ts: {}'.format( pid, ps[pid].invalids[0], ps[pid].invalids[1], ps[pid].invalids[2], ps[pid].invalids[3])) print('\nDirty writeback count') for pid in ps: print('{} \t{}'.format(pid, ps[pid].dirty_wbs)) print('\nProcessor states count') for pid in ps: states = ps[pid].count_states() print('{} \tm: {} \to: {} \te: {} \ts: {} \ti: {}'.format( pid, states[0], states[1], states[2], states[3], states[4]))
def doprocess(argv): filelimit = 50 word = "local" localanalysis = "no" failedfiles = [] emails = [] if len(sys.argv) < 3: usage() try: opts, args = getopt.getopt(argv, "l:d:f:h:n:t:o:") except getopt.GetoptError: usage() for opt, arg in opts: if opt == '-d': word = arg elif opt == '-t': filetypes = [] if arg.count(",") != 0: filetypes = arg.split(",") else: filetypes.append(arg) print filetypes elif opt == '-l': limit = int(arg) elif opt == '-h': localanalysis = arg elif opt == '-n': filelimit = int(arg) elif opt == '-o': dir = arg elif opt == '-f': outhtml = arg if os.path.exists(dir): pass else: os.mkdir(dir) if localanalysis == "no": print "\n[-] Starting online search..." for filetype in filetypes: print "\n[-] Searching for "+ filetype + " files, with a limit of " + str(limit) search = googlesearch.search_google(word, limit, start, filetype) search.process_files() files = search.get_files() print "Results: " + str(len(files)) + " files found" print "Starting to download " + str(filelimit) + " of them:" print "----------------------------------------\n" counter = 1 for x in files: if counter <= filelimit: print "[" + str(counter) + "/" + str(filelimit) + "] " + x getfile = downloader.downloader(x, dir) getfile.down() filename = getfile.name() if filename != "": if filetype == "pdf": test = metadataPDF.metapdf(dir + "/" + filename, password) elif filetype == "doc" or filetype == "ppt" or filetype == "xls": test = metadataMSOffice.metaMs2k(dir + "/" + filename) if os.name == "posix": testex = metadataExtractor.metaExtractor(dir + "/" + filename) elif filetype == "docx" or filetype == "pptx" or filetype == "xlsx": test = metadataMSOfficeXML.metaInfoMS(dir + "/" + filename) res = test.getData() if res == "ok": raw = test.getRaw() users = test.getUsers() paths = test.getPaths() soft = test.getSoftware() email = [] if filetype == "pdf" or filetype == "docx": res = test.getTexts() if res == "ok": email = test.getEmails() for em in email: emails.append(em) else: email = [] failedfiles.append(x + ":" + str(res)) respack=[x, users, paths, soft, raw, email] all.append(respack) else: failedfiles.append(x + ":" + str(res)) print "\t [x] Error in the parsing process" #A error in the parsing process else: pass counter += 1 else: print "[-] Starting local analysis in directory " + dir dirList = os.listdir(dir) print dirList for filename in dirList: if filename != "": filetype = str(filename.split(".")[-1]) if filetype == "pdf": test = metadataPDF.metapdf(dir + "/" + filename, password) elif filetype == "doc" or filetype == "ppt" or filetype == "xls": print "doc" test = metadataMSOffice.metaMs2k(dir + "/" + filename) if os.name == "posix": testex = metadataExtractor.metaExtractor(dir + "/" + filename) elif filetype == "docx" or filetype == "pptx" or filetype == "xlsx": test = metadataMSOfficeXML.metaInfoMS(dir + "/" + filename) res = test.getData() if res == "ok": raw = test.getRaw() users = test.getUsers() paths = test.getPaths() soft = test.getSoftware() if (filetype == "doc" or filetype == "xls" or filetype == "ppt") and os.name=="posix": testex.runExtract() testex.getData() paths.extend(testex.getPaths()) respack = [filename, users, paths, soft, raw, email] all.append(respack) else: failedfiles.append(filename + ":" + str(res)) print "[x] Error in the parsing process" # A error in the parsing process if filetype == "docx" or filetype == "pdf": res = test.getTexts() if res == "ok": email = test.getEmails() for x in email: emails.append(x) else: failedfiles(filename + ":" + str(res)) else: print "pass" else: pass print "processing" proc = processor.processor(all) userlist = proc.sort_users() softlist = proc.sort_software() pathlist = proc.sort_paths() try: html = htmlExport.htmlExport(userlist, softlist, pathlist, all, outhtml, dir, failedfiles, word, emails) save = html.writehtml() except Exception, e: print e print "Error creating the file"
from processor import processor from cursorColorama import * import random with open("day15.txt") as file: code = file.read() code = code.split(",") for i, val in enumerate(code): code[i] = int(val) prc = processor(code) prc.pad(1000) print(prc) graphics = ["D", "░", "█", "O"] " 0 1 2 3 " "droid, empty, wall, oxygen" grid = [] size = [40, 42] loc = [int(size[1] / 2), int(size[0] / 2)] starting_loc = loc.copy() temp = [" "] * size[1] for i in range(size[0]): grid.append(temp.copy()) def draw(loc, char): #print("drawing",char,"at",loc) x = loc[0] y = loc[1]
import os import time from slackclient import SlackClient from processor import processor # starterbot's ID as an environment variable BOT_ID = os.environ.get("BOT_ID") # constants AT_BOT = "<@" + BOT_ID + ">" EXAMPLE_COMMAND = "do" slack_client = SlackClient(os.environ.get('SLACK_BOT_TOKEN')) processor = processor() def handle_command(command, channel): """ Receives commands directed at the bot and determines if they are valid commands. If so, then acts on the commands. If not, returns back what it needs for clarification. """ response = "Not sure what you mean. Use the *" + EXAMPLE_COMMAND + \ "* command with numbers, delimited by spaces." if command.startswith(EXAMPLE_COMMAND): response = "Sure...write some more code then I can do that!" slack_client.api_call("chat.postMessage", channel=channel, text=processCommand(command), as_user=True)