def main(): settings.init() settings.logger = log.Log(settings.log_file_name) settings.logger.daemon = True settings.logger.start() settings.logger.log('Starting NewsGrabber') tools.create_dir(settings.dir_new_urllists) tools.create_dir(settings.dir_old_urllists) tools.create_dir(settings.dir_donefiles) tools.create_dir(settings.dir_ready) tools.create_dir(settings.dir_last_upload) if not os.path.isfile('rsync_targets'): settings.logger.log("Please add one or more rsync targets to file 'rsync_targets'", 'ERROR') if not os.path.isfile('rsync_targets_discovery'): settings.logger.log("Please add one or more discovery rsync targets to file 'rsync_targets_discovery'", 'ERROR') settings.irc_bot = irc.IRC() settings.irc_bot.daemon = True settings.irc_bot.start() settings.upload = upload.Upload() settings.upload.daemon = True settings.upload.start() settings.run_services = service.RunServices() settings.run_services.daemon = True settings.run_services.start() while settings.running: time.sleep(1)
def start(): with KVObjectsManager.__lock: # this lock is not really needed here, since the start() method # should only be called one time per process. if KVObjectsManager._initialized: raise RuntimeError("KVObjectsManager already running") KVObjectsManager._initialized = True settings.init() KVObjectsManager._publisher = Publisher(KVObjectsManager) KVObjectsManager._subscriber = Subscriber(KVObjectsManager) KVObjectsManager._sender = ObjectSender(KVObjectsManager) KVObjectsManager._event_processor = EventProcessor() KVObjectsManager._ttl_processor = TTLProcessor() origin_obj = KVObject(collection="origin") import socket origin_obj.hostname = socket.gethostname() origin_obj.notify()
def __init__(self): self.database_dir = '' self.storage_dir = '' self.shelves = [] settings.init() self.__initialize_configs() self.__initialize_areas()
def main(): '''Start web application''' settings.init() application = App() application.listen(options.port) # pylint: disable=E1101 tornado.ioloop.IOLoop.instance().start()
def update(): """ A good way to check all things - load and sign again. """ dhnio.init() settings.init() src = dhnio.ReadTextFile(settings.LocalIdentityFilename()) misc.setLocalIdentity(identity(xmlsrc=src)) misc.getLocalIdentity().sign() misc.saveLocalIdentity() print misc.getLocalIdentity().serialize()
def main(): print_lock = Lock() socket_lock = Lock() server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) Server.setupSocket(print_lock, server_addr, server_socket) rasp_serial = serial.Serial( port='/dev/ttyACM0', # Linux #port='COM8', # Windows baudrate=9600, ) settings.init() threads = [] update_sensors_thread = Thread( target=Server.update_sensors, args=(print_lock, rasp_serial) ) threads.append(update_sensors_thread) update_sensors_thread.start() update_cameras_thread = Thread( target=Server.update_camera ) threads.append(update_cameras_thread) update_cameras_thread.start() should_continue = True while should_continue: with print_lock: print "Waiting for new client..." client, addr = server_socket.accept() thread = Thread(target=Server.run, args=(print_lock, client)) threads.append(thread) thread.start() with print_lock: print "Joining threads." for thread in threads: thread.join() with print_lock: print "All threads closed."
def main(argv): filename = '' crossover_methods = [] max_iterations = 1000 pop_size = 500 stop = False try: opts, args = getopt.getopt(argv, 'hf:i:p:t:s', [ 'help', 'file=', 'max-iterations', 'population', 'type=', 's' ]) except getopt.GetoptError: usage() sys.exit(2) for opt, arg in opts: if opt in ('-h', '--help'): usage() sys.exit() elif opt in ('-f', '--file'): filename = arg settings.init(filename) elif opt in ('-i', '--max-iterations'): max_iterations = int(arg) elif opt in ('-p', '--population'): pop_size = int(arg) elif opt in ('-t', '--type'): crossover_methods = arg.split(',') elif opt in ('-s', '--stop'): stop = True start_time = time.time() for method in crossover_methods: P = Population(pop_size) for i in range(max_iterations): try: P.evolve(method) except KeyError, e: usage() sys.exit() update_progress(i * 100 / max_iterations, P) if stop == True: if P.get_best_fitness() <= 0.001: break update_progress(100, P) print '' print 'processing time: {}'.format(time.time() - start_time) print '' P.result()
def setUp(self): self.tickets = mock.create_autospec(GestioTiquets) self.tickets.consulta_tiquet_dades.return_value = { "solicitant": "usuari.real", "emailSolicitant": "*****@*****.**" } self.identitat = mock.create_autospec(GestioIdentitat) self.identitat.obtenir_uid.return_value = None settings.init() settings.set("regex_reply", "(.*)") # Una que trobi sempre algo settings.set("regex_privat", "X") # Una que no trobi mai res settings.set("usuari_extern", "usuari.extern")
def main(): settings.init() settings.logger = log.Log(settings.log_file_name) settings.logger.daemon = True settings.logger.start() settings.logger.log( 'Starting grabber {name}'.format(name=settings.irc_nick)) tools.create_dir(settings.dir_ready) tools.create_dir(settings.dir_new_lists) tools.create_dir(settings.dir_old_lists) if not os.path.isfile(settings.target_main): raise Exception( "Please add a rsync target to file '{settings.target_main}'.". format(**locals())) settings.irc_bot = irc.IRC() settings.irc_bot.daemon = True settings.irc_bot.start() time.sleep(30) settings.upload = upload.Upload() settings.upload.daemon = True settings.upload.start() settings.grab = grab.Grab() settings.grab.daemon = True settings.grab.start() while settings.running: # if not settings.logger.isAlive(): # print('The logger stopped running...') # settings.irc_bot.send('PRIVMSG', 'The logger stopped running...', # settings.irc_channel_bot) # settings.running = False # if not settings.irc_bot.isAlive(): # print('The IRC bot stopped running...') # settings.running = False # if not settings.upload.isAlive(): # print('The uploader stopped running...') # settings.irc_bot.send('PRIVMSG', 'The uploader stopped running...', # settings.irc_channel_bot) # settings.running = False # if not settings.grab.isAlive(): # print('The grabber stopped running...') # settings.irc_bot.send('PRIVMSG', 'The grabber stopped working...', # settings.irc_channel_bot) # settings.running = False time.sleep(1)
def run(width=300, height=300): def redrawAllWrapper(canvas, data): canvas.delete('ALL') canvas.create_rectangle(0, 0, data.width, data.height, fill='white', width=0) redrawAll(canvas, data) canvas.update() def mousePressedWrapper(event, canvas, data): mousePressed(event, data) redrawAllWrapper(canvas, data) def keyPressedWrapper(event, canvas, data): keyPressed(event, data) redrawAllWrapper(canvas, data) def timerFiredWrapper(canvas, data): timerFired(data) redrawAllWrapper(canvas, data) # pause, then call timerFired again canvas.after(data.timerDelay, timerFiredWrapper, canvas, data) # Set up data and call init class Struct(object): pass data = Struct() data.width = width data.height = height data.timerDelay = 100 # milliseconds settings.init(data) # create the root and the canvas root = tk.Tk() canvas = tk.Canvas(root, width=data.width, height=data.height) canvas.pack() # set up events root.bind("<Button-1>", lambda event: mousePressedWrapper(event, canvas, data)) root.bind("<Key>", lambda event: keyPressedWrapper(event, canvas, data)) timerFiredWrapper(canvas, data) # and launch the app root.mainloop() # blocks until window is closed print("bye!")
def keyPressed(event): if event.char == 'r': settings.init(canvas) if event.char == 'p' and canvas.data.start == False and \ canvas.data.gameOver == False and canvas.data.winner == False: canvas.data.isPaused = not canvas.data.isPaused if event.keysym == 'Return': canvas.data.start = False if event.keysym == "space" and canvas.data.start == False and \ canvas.data.gameOver == False and canvas.data.winner == False and \ canvas.data.isPaused == False: color1 = canvas.data.userBallColor color2 = canvas.data.savedUserBallColor canvas.data.userBallColor = color2 canvas.data.savedUserBallColor = color1 redrawAll()
def createDefaultConfigFile(): settings.init() data = settings.default with io.open(config_file_path, "w", encoding='utf8') as outfile: yaml.dump(data, outfile, default_flow_style=False, allow_unicode=True) with io.open(config_file_path, "r") as stream: data_loaded = yaml.load(stream, Loader=yaml.FullLoader) if(data == data_loaded): print("File created successfully, adding to settings") settings.loaded = data_loaded return data else: print("Warning: there was an error while creating config file") return False
def main(argv): try: opts, args = getopt.getopt(argv, 'hw:cmk', ['help', 'weight=', 'cipher', 'md', 'ke']) except getopt.GetoptError: print('One of the options does not exit.\nUse: "comparator.py -h" for help') sys.exit(2) if not args and not opts: print('No inputs where given') sys.exit(2) if len(args) > 2: print('Too many arguments') sys.exit(2) weight = 2 suites = [] algs = [] for opt, arg in opts: if opt in ('-h', '--help'): print('algs_comparator.py [-w <filter_weight>] [-c] [-m] [-k] <path_to_data> <algorithm_list>') print('algs_comparator.py [--weight=<filter_weight>] [--cipher] [--md] [--ke] <path_to_data> <algorithm_list>') sys.exit(0) elif opt in ('-w', '--weight'): weight = float(arg) elif opt in ('-c', '--cipher'): algs.append('cipher') elif opt in ('-m', '--md'): algs.append('md') elif opt in ('-k', '--ke'): algs.append('ke') else: print(f'Option "{opt}" does not exist') sys.exit(2) os.system('clear') settings.init() suites = [f.name for f in os.scandir('../docs/' + args[0]) if f.is_dir()] make_figs(args[0], args[1], suites, weight=weight, alg_set=algs)
def main(argv): filename = '' crossover_methods = [] max_iterations = 1000 pop_size = 500 stop = False try: opts, args = getopt.getopt( argv, 'hf:i:p:t:s', ['help', 'file=', 'max-iterations', 'population', 'type=', 's']) except getopt.GetoptError: usage() sys.exit(2) for opt, arg in opts: if opt in ('-h', '--help'): usage() sys.exit() elif opt in ('-f', '--file'): filename = arg settings.init(filename) elif opt in ('-i', '--max-iterations'): max_iterations = int(arg) elif opt in ('-p', '--population'): pop_size = int(arg) elif opt in ('-t', '--type'): crossover_methods = arg.split(',') elif opt in ('-s', '--stop'): stop = True start_time = time.time() for method in crossover_methods: P = Population(pop_size) for i in range(max_iterations): try: P.evolve(method) except KeyError, e: usage() sys.exit() update_progress(i * 100 / max_iterations, P) if stop == True: if P.get_best_fitness() <= 0.001: break update_progress(100, P) print '' print 'processing time: {}'.format(time.time() - start_time) print '' P.result()
def main(): settings.init() settings.logger = log.Log(settings.log_file_name) settings.logger.daemon = True settings.logger.start() settings.logger.log('Starting grabber {name}'.format( name=settings.irc_nick)) tools.create_dir(settings.dir_ready) tools.create_dir(settings.dir_new_lists) tools.create_dir(settings.dir_old_lists) if not os.path.isfile(settings.target_main): raise Exception("Please add a rsync target to file '{name}'.".format( name=settings.target_main)) settings.irc_bot = irc.IRC() settings.irc_bot.daemon = True settings.irc_bot.start() settings.upload = upload.Upload() settings.upload.daemon = True settings.upload.start() settings.grab = grab.Grab() settings.grab.daemon = True settings.grab.start() while settings.running: # if not settings.logger.isAlive(): # print('The logger stopped running...') # settings.irc_bot.send('PRIVMSG', 'The logger stopped running...', # settings.irc_channel_bot) # settings.running = False # if not settings.irc_bot.isAlive(): # print('The IRC bot stopped running...') # settings.running = False # if not settings.upload.isAlive(): # print('The uploader stopped running...') # settings.irc_bot.send('PRIVMSG', 'The uploader stopped running...', # settings.irc_channel_bot) # settings.running = False # if not settings.grab.isAlive(): # print('The grabber stopped running...') # settings.irc_bot.send('PRIVMSG', 'The grabber stopped working...', # settings.irc_channel_bot) # settings.running = False time.sleep(1)
def main(): settings.init() parse_config() slack_client = SlackClient(settings.SECRETS['slack_bot_token']) start_http_server(8000) if slack_client.rtm_connect(): print("Quotebot is connected and running.") data.prefetch_quote_ids() # populate the list of quote ids from the DB # infinite loop to continuously consume slack data from rtm api while True: command, channel = parse_slack_input(slack_client.rtm_read()) if command and channel: response = handle_command(command) write_response(response, channel, slack_client) time.sleep(READ_WEBSOCKET_DELAY) else: print("Connection failed. Invalid Slack token or bot ID?")
def main(): settings.init() iniciaOcorrencias() for corpus in raiz.findall('base:corpus', ns): for sentenca in corpus.findall('base:sentence', ns): parse_id = sentenca.find('base:id', ns).text.replace('/', '-') raw = sentenca.find('base:raw', ns) tree = sentenca.find('base:tree', ns) tree_text = tree.text verificaOcorrencias(occList, tree_text) tree_text = translator.traduzirTags(tree_text) createTreeFile('raw', parse_id, raw.text) createTreeFile('tree', parse_id, tree_text) createListFiles()
def main(): initial_popup = gui.Popup_settings() initial_popup.mainloop() settings.init() app = AcousticStand() ani = FuncAnimation(settings.fig, animate, init_func=init, interval=settings.REFRESH_TIME * 1000, blit=True) ani.blit = True # TODO make gui independent on refresh time (another process/thread # handles gui response) app.mainloop()
def dex(d={}): # Check if 'dataframes' attribute exists and initialize if necessary if not hasattr(settings, 'dataframes'): settings.init() # Handle different types of data argument if isinstance(d, dict): settings.dataframes = { k: v for k, v in d.items() if isinstance(v, pd.core.frame.DataFrame) } elif isinstance(d, pd.core.frame.DataFrame): settings.dataframes[retrieve_name(d)] = d # Run data explorer app = explore() # <-- data app.master.title('PyExplore - Python Data Exploration Tool - version 0.1') app.mainloop()
def main(): # args config_filename = sys.argv[1] # 0 based settings.init(config_filename) sns = settings.aws_session.client('sns') # Send message to SQS queue response = sns.publish( TopicArn='arn:aws:sns:us-east-1:333408648190:home_security_event', Message='test #13 event: backdoor \nhttps://photos.app.goo.gl/p8yPhr5m6v7H4Fr96', Subject='test_notification', MessageStructure='string' ) print(f"SNS Publish Response: {response['MessageId']}")
def analyze_commit(commit, antlr_file_list, repo, commit_index): """[this commit takes commit details and analyze all antlr files inside] Arguments: commit {[object]} -- [commit object holds commit information] antlr_file_list {[list]} -- [contains all antlr filenames present in the HEAD revision] repo {[repository_object/Git Python]} -- [holds the information of the repository loaded through Git Python library] commit_index {[int]} -- [commit id of the respective commit] Returns: [object] -- [commit_data object] """ try: commit_data = Commit(str(commit.hexsha), str(commit.authored_datetime), commit_index) # _initializing the commit object for file_path_name in antlr_file_list: file_content = get_blob_recursively( str(commit.tree.hexsha), file_path_name, repo) # _getting blob data recursively if file_content is not None: enter_cnt = exit_cnt = visit_cnt = 0 settings.init() # _resets all the antlr count details get_complexity_with_content(file_content) is_antlr_file = settings.is_antlr_file enter_cnt = settings.enter_cnt exit_cnt = settings.exit_cnt visit_cnt = settings.visit_cnt if is_antlr_file is True: commit_data.add_changed_files( File(file_path_name, is_antlr_file, enter_cnt, exit_cnt, visit_cnt) ) # _creating File object and appending to the commit data list return commit_data except ValueError as ve: print("Value error: " + str(ve)) except Exception as e: print("Unexpected error: " + str(e))
def checkDb(db=DBNAME): '''check if db exists, if not it creates it''' #print dbname settings.init() #dbname = settings.getDatabase() #RSA.runDecrypt(dbname+'.crypt', keyFile=settings.getKeyPrivate()) global dbname_crypted dbname_crypted = db + '.crypt' #print db dbExists = not os.path.exists(dbname_crypted) global cursor global conn global key global dbname dbname = db key = settings.getKey() if dbExists: conn = sqlite3.connect(db) print "Database does not exist, creating..." create_logs = '''CREATE TABLE "Logs" ( `ID` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE, `LogName` TEXT, `Protocol` TEXT, `Month` TEXT, `Day` INTEGER, `Time` TEXT, `SrcIp` TEXT, `SrcCountry` TEXT, `SrcCity` TEXT, `SrcSpecific` TEXT, `DstIp` TEXT, `DstCountry` TEXT, `DstCity` TEXT, `DstSpecific` TEXT, `Notes` TEXT )''' create_script = '''CREATE TABLE "Script" ( `ID` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE, `Data` TEXT, `Script` TEXT, `Ip` TEXT, `Country` TEXT, `City` TEXT, `Specific` TEXT, `RangeScanned` TEXT, `PortsOpen` TEXT )''' cursor = conn.cursor() cursor.execute(create_logs) cursor.execute(create_script) print "Database created" print "Table 'Logs' has been generated to insert the information from processed logs" print "Table 'Script' has been generated to insert the information gathered from the scripts" else: print "Database already exists" print "Decrypting...." key = settings.getKey() AES.decrypt_file(key, dbname_crypted, db, 16) print "Connecting....\n" conn = sqlite3.connect(db) cursor = conn.cursor() Popen(['rm', dbname_crypted])
def __init__(self, master): ttk.Frame.__init__(self, master) settings.init() mixer.init() width, height, x, y = settings.center(master, 'master', 525, 355) master.geometry('%dx%d+%d+%d' % (width, height, x, y)) self.master.configure(background='#333333') self.master.title("Compression Tool") img = tk.Image("photo", file=settings.ICON) self.tk.call('wm','iconphoto',root._w,img) self.browse_files = {} self.sound = {} self.item_id = {} # causes the full width of the window to be used self.columnconfigure(2, weight=1) self.columnconfigure(1, weight=1) self.make_UI()
def c_future_wealth(fut_period = 1, coh = 1, exo = True): c_list = [] rebate_fut_vals = np.linspace(0, 1, num=11) rebate_curr_vals = rebate_fut_vals[1:] for rebate_fut in rebate_fut_vals: settings.rebate_size = rebate_fut settings.init() if exo: IndShockExample = Model.IndShockConsumerType(**baseline_params) else : IndShockExample = Model.IndShockConsumerType(**init_natural_borrowing_constraint) IndShockExample.solve() IndShockExample.unpack_cFunc() IndShockExample.timeFwd() c_list = np.append(c_list,IndShockExample.cFunc[t_eval-fut_period](coh)) for rebate_cur in rebate_curr_vals: c_list = np.append(c_list,IndShockExample.cFunc[t_eval-fut_period](coh+rebate_cur)) c_func = LinearInterp(np.linspace(0, 2, num=21),np.array(c_list)) return(c_func)
def init_mce(self): self.timeinterval = 1 #deleting old gui_data_test files for i in range(len(os.listdir('tempfiles'))): tempfilename = 'tempfiles/gui_data_test%s.nc' % (i) tempfile = os.path.exists(tempfilename) if tempfile: delete_file = ['rm ' + tempfilename] subprocess.Popen(delete_file,shell=True) #setting all variables self.observer = '' self.datamode = '' self.readoutcard = '' self.framenumber = '' self.frameperfile = 374 self.totaltimeinterval = 120 self.currentchannel = 1 self.oldch = 1 st.init()
def metrics(): settings.init() global report, total_visits start_date = str(input("Enter starting day of report: ")) end_date = str(input("Enter ending day of report: ")) report = get_report("general", start_date, end_date) metrics = {} metrics["start_date"] = start_date total_visits = get_week_total_visits() metrics["total_visits"] = total_visits metrics["avg_visit_time"] = get_avg_visit_time() metrics["bounce_rate"] = get_bounce_rate() metrics["visited_urls_visits"] = get_visited_urls_number_of_visits( settings.get_pages_urls() ) metrics["referrer_report"] = get_referrers_repartition( get_report("referrers") ) return metrics
def main(): if sys.argv.count('receive'): ## log.startLogging(sys.stdout) settings.init() settings.update_proxy_settings() contacts.init() init() receive() reactor.run() elif sys.argv.count('send'): ## log.startLogging(sys.stdout) settings.init() settings.update_proxy_settings() contacts.init() init() start_http_server(int(sys.argv[2])) send(sys.argv[3], sys.argv[4]) reactor.run() else: usage()
def main(): # args config_filename = sys.argv[1] # 0 based settings.init(config_filename) # try not sending it an active session #ses = settings.aws_session.client('s3') bucket = settings.aws_s3_public_image s3_key = '20200619/15920694963-3-3.jpg' file_name = 'faces/15920694963-3-3.jpg' print(f'BEFORE present in {bucket} / {s3_key}') object_list = s3_util.get_object_list_from_s3(bucket, s3_key) for obj in object_list: print(obj) print(f'PUTTING file {file_name} -> {bucket} {s3_key}') response = s3_util.upload_file(file_name, bucket, s3_key) print(f'AFTER - response: {response}')
def main(): parser = setup_parser() args = parser.parse_args() settings.init(language=args.language, replaying=args.replay, verbose=args.verbose) wikitext = get_wikitext(args.title) wiki_dict = find_wikitext_links(wikitext) entities = get_entities(args.infile) disambiguations = ned(entities) accuracy, marked = calculate_accuracy(wiki_dict, disambiguations) print() print('Disambiguations') print('===============') print() for row in marked: s = 'Mention: {0} Disambiguation: {1} Wikipedia link: {2} Correct: {3}'.format(row[0], row[1], row[2], row[3]) if row[3] == 'Yes': settings.logger.warn(s) else: settings.logger.error(s) print() settings.logger.info('Accuracy on article "{0}": {1}%'.format(args.title, accuracy))
def main(): settings.init() dict_ground_truth = loadGroundTruthInDictionary() print("dict_ground_truth: ", len(dict_ground_truth)) dict_baseline = loadBaseLineResultInDictionary(baseLineName) print("dict_baseline: ", len(dict_baseline)) # sys.exit(0) count = 1 groundTruthValues_List1 = [] baselineValues_List2 = [] for counter, index in enumerate( open(settings.dict['FILES_PATH'] + os.sep + "groundtruths" + os.sep + groundTruthfileName)): splitIndex = index.split("\t") meshTerm = splitIndex[0].lower() # print(meshTerm) # print(dict_baseline.keys()) if meshTerm in dict_baseline.keys(): # print ("coming") baselineKeyScore = dict_baseline.get(meshTerm) groundTruthKeyScore = dict_ground_truth.get(meshTerm) print( str(count) + " " + str(groundTruthKeyScore) + " " + str(baselineKeyScore) + " " + meshTerm) groundTruthValues_List1.append(groundTruthKeyScore) baselineValues_List2.append(baselineKeyScore) count = count + 1 if (count > topKSpearman): break # print("count: ",count) print("groundTruthValues_List1: ", len(groundTruthValues_List1)) print("baselineValues_List2: ", len(baselineValues_List2)) # sys.exit(0) spearmanCoefficient = scipy.stats.mstats.spearmanr( np.asarray(groundTruthValues_List1), np.asarray(baselineValues_List2)) print(spearmanCoefficient)
def run(): global canvas root = Tk() canvasWidth = 750 canvasHeight = 750 canvas = Canvas(root, width=canvasWidth, height=canvasHeight, \ highlightthickness = 0) canvas["bg"] = "dark khaki" canvas.pack() class Struct: pass canvas.data = Struct() canvas.data.canvasWidth = canvasWidth canvas.data.canvasHeight = canvasHeight settings.init(canvas) root.bind("<Button-1>", mousePressed) root.bind("<Key>", keyPressed) timerFired() root.mainloop()
def runPdfChartsScript(filename): ''' Run function for the 'Script' table on the default database ''' settings.init() sqlite.checkDb(db='output/LPD.db') run('Script', 'Ip', 'aux/images/pdfChartScript1.png', 'Ips') run('Script', 'Country', 'aux/images/pdfChartScript2.png', 'Countries') run('Script', 'City', 'aux/images/pdfChartScript3.png', 'Cities') run('Script', 'Specific', 'aux/images/pdfChartScript4.png', 'Region') run('Script', 'PortsOpen', 'aux/images/pdfChartScript5.png', 'Ports Open') writePdf(0, filename, images=[ 'aux/images/pdfChartScript1.png', 'aux/images/pdfChartScript2.png', 'aux/images/pdfChartScript3.png', 'aux/images/pdfChartScript4.png', 'aux/images/pdfChartScript5.png' ]) sqlite.closeDb()
def main(): settings.init() String = "../results/Fig_1.txt" N_it = 10 N_it2 = 10 for mm in range(N_it): print(mm / N_it) settings.Gamma = 0.1 * float(mm + 1) / float(N_it) a_val = 0. compteur = 0 for nn in range(N_it2): print(float(nn / N_it2)) Graph_MIS = Graph_and_vector_space.Graph() aaa = Graph_MIS.Divide_non_connected_subgraphs() for k in aaa: if len(k) > 10.: subgraph = Graph_MIS.igraph_representation.subgraph(k) (H, indices) = Graph_and_vector_space.generate_Hilbert_space( subgraph) psi = np.zeros(len(H), dtype=complex) psi[0] = 1. + 0. * 1j settings.Gamma = 0.1 * float(mm + 1) / float(N_it) (H_1, H_2, H_diss) = Graph_and_vector_space.generate_Hamiltonians( H, indices) (a, b) = Fig_1_paper(H, psi, H_1, H_2, H_diss, indices) a_val += a compteur += 1 f = open(String, "a+") u1 = a_val / float(compteur) Gamm = 0.1 * float(mm) / float(N_it) # u2=b[1] # u3=b[2] stri = '' stri = stri + ''.join(str(u1)) + "," + ''.join( str(Gamm)) + "," #+''. join(str(u3))+"," f.write(stri + "\n") f.close()
def main(): settings.init() args = arg.parse() logger.init(args.loglevel) print("StreamCables 0.1") config = settings.config reader_name = config["main"]["reader"] writer_names = config["main"]["writers"] refresh_rate = config["main"]["refresh-rate"] rs = [reader_name + ".register"] reader = plugins(rs)[0] ws = [] for name in writer_names: ws.append(name + ".register") writers = plugins(ws) logger.logging.info("-------START----------") last_hash = "" try: while True: info = reader() if last_hash != info["hash"]: for writer in writers: writer(info) last_hash = info["hash"] for i in range(refresh_rate * 2): print("/-\|"[i % 4], end="\b", flush=True) time.sleep(0.5) except KeyboardInterrupt: print("") pass print("Bye!")
def run(config_file, args): print('Neuroevolutionary program started!') # Just in case, close all opened connections vrep.simxFinish(-1) settings.init() settings.CLIENT_ID = vrep.simxStart('127.0.0.1', settings.PORT_NUM, True, True, 5000, 5) # Connect to V-REP if settings.CLIENT_ID == -1: print('Failed connecting to remote API server') print('Program ended') return settings.N_GENERATIONS = args.n_gen settings.RUNTIME = args.time settings.DEBUG = False # Load configuration. config = neat.Config(neat.DefaultGenome, neat.DefaultReproduction, neat.DefaultSpeciesSet, neat.DefaultStagnation, config_file) restored_population = neat.Checkpointer.restore_checkpoint(args.checkpoint) # this will keep running the evolution from the previous checkpoint # restored_population.run(eval_genomes, settings.N_GENERATIONS) pop = restored_population.population species = restored_population.species gen = restored_population.generation p = neat.population.Population(config, (pop, species, 0)) # Add a stdout reporter to show progress in the terminal. stats = neat.StatisticsReporter() p.add_reporter(neat.StdOutReporter(True)) p.add_reporter(stats) # Run for up to N_GENERATIONS generations. winner = p.run(eval_genomes, settings.N_GENERATIONS)
def main(argv=None): # instantiate COMM object comm = RobotComm(1, -50) #maxRobot = 1, minRSSI = -50 if comm.start(): print 'Communication starts' else: print 'Error: communication' return # instantiate Robot robotList = comm.get_robotList() # instantiate global variables gFrame and gBehavior settings.init() settings.gFrame = tk.Tk() settings.gFrame.geometry('600x500') gRobotDraw = draw.RobotDraw(settings.gFrame, tk) # create behaviors settings.gBehaviors[0] = scanning.Behavior("scanning", robotList, 4.0, gRobotDraw.get_queue()) settings.gBehaviors[1] = collision.Behavior("collision", robotList, -50) gRobotDraw.start() settings.gFrame.mainloop() for behavior in behavior_threads: print "joining... ", behavior.getName() behavior.join() print behavior.getName(), "joined!" for robot in robotList: robot.reset() comm.stop() comm.join() print("terminated!")
def main(): parser = argparse.ArgumentParser(description='RadioCast CTRL client') parser.add_argument('ip', help='IP address of the RadioCast server') parser.add_argument( 'command', help='Either: gaming, movies, various, downloads, nowplaying') args = parser.parse_args() settings.init() port = "4443" ip = args.ip socket.connect("tcp://%s:%s" % (ip, port)) rcvmsg = RadioMsg() if args.command == 'gaming': rcvmsg = playlist('gaming') print_list(rcvmsg.list) if args.command == 'movies': rcvmsg = playlist('movies') print_list(rcvmsg.list) if args.command == 'various': rcvmsg = playlist('various') print_list(rcvmsg.list) if args.command == 'downloads': rcvmsg = playlist('downloads') print_list(rcvmsg.list) if args.command == 'nowplaying': rcvmsg = playlist('gaming') rcvmsg2 = RadioMsg() rcvmsg2 = playlist('movies') rcvmsg3 = RadioMsg() rcvmsg3 = playlist('various') print("Now playing GAMING:\n") print(rcvmsg.list[0].pod_to_str()) print("Now playing MOVIES:\n") print(rcvmsg2.list[0].pod_to_str()) print("Now playing VARIOUS:\n") print(rcvmsg3.list[0].pod_to_str())
def main(): settings.init() flag = True count.main(0, 'detector') print("Starting Face Detection Technique") for i in trange(0, 10): time.sleep(0.01) print(" If Data Already Feeded.(Press Ctrl Z)") while (flag): count.main(1, 'face_dataset_creater') try: print( "Press Enter To Skip To Recognition Part or Any Key To Feed New Data :" ) flag = input() except: flag = False count.main(2, 'trainner') count.main(3, 'recogniser') count.main(4, 'detector') print("Completed") print(settings.myList)
async def main(): #initialise globals s.init() #try and boot s.HOST, s.PORT, s.SERVERS, s.ROOT = boot() #add conn field to servers for connection, addr in s.SERVERS.items(): addr['connected'] = False s.FILES = loadFs() #if there was an error, exit if None in [s.HOST, s.PORT]: print('Error in Boot. Reconfigure') sys.exit(0) #do the connections await asyncio.gather( send_connect(), server(), ) return
def main(): global _BaseDir global _Debug options, args = parseCommandLine() _BaseDir = options.basedir _Debug = options.debug dhnio.init() dhnio.SetDebug(18) settings.init() import identitycache identitycache.init() reactor.addSystemEventTrigger('before', 'shutdown', shutdown_final) # from transport_control import _InitDone # _InitDone = True def initDone(state, options, args): if state != 'online': print 'state is %s, exit' % state reactor.stop() return if options.send: # def _random_sending(count): # import random # if count > 10: # return # print 'sending file %s to %s' % (args[0], args[1]) # reactor.callLater(random.randint(0, 2), _random_sending, count+1) # send(args[0], args[1]) # count += 1 # reactor.callLater(0.5, _random_sending, 0) send(args[0], args[1]) return if options.receive: print 'state is %s, receiving...' % state return print 'ONLINE !!!' init().addCallback(initDone, options, args) # reactor.callLater(20, A, 'shutdown') reactor.run()
#!/usr/bin/python3 from eve import Eve from eve_docs import eve_docs from flask_bootstrap import Bootstrap from utils.auth import APIAuth from settings import init # start eve app = Eve(auth=APIAuth()) init(app) # eve_docs addon Bootstrap(app) app.register_blueprint(eve_docs, url_prefix='/docs') if __name__ == '__main__': # run app.run(host='0.0.0.0', port=80)
def main(): settings.init() parser = argparse.ArgumentParser() # If config file exists, load variables from json load = {} if os.path.isfile(CONFIG): with open(CONFIG) as data: load.update(json.load(data)) # Read passed in Arguments required = lambda x: not x in load parser.add_argument("-a", "--auth_service", help="Auth Service", required=required("auth_service")) parser.add_argument("-u", "--username", help="Username", required=required("username")) parser.add_argument("-p", "--password", help="Password", required=required("password")) parser.add_argument("-l", "--location", help="Location", required=required("location")) parser.add_argument("-d", "--debug", help="Debug Mode", action='store_true') parser.add_argument("-s", "--client_secret", help="PTC Client Secret") parser.set_defaults(DEBUG=True) args = parser.parse_args() # Passed in arguments shoud trump for key in args.__dict__: if key in load and args.__dict__[key] == None: args.__dict__[key] = load[key] # Or # args.__dict__.update({key:load[key] for key in load if args.__dict__[key] == None and key in load}) if args.auth_service not in ['ptc', 'google']: print('[!] Invalid Auth service specified') return if args.debug: settings.debug = True print('[!] DEBUG mode on') if args.client_secret is not None: global PTC_CLIENT_SECRET PTC_CLIENT_SECRET = args.client_secret set_location(args.location) if args.auth_service == 'ptc': access_token = login_ptc(args.username, args.password) else: access_token = login_google(args.username, args.password) if access_token is None: print('[-] Wrong username/password') return print('[+] RPC Session Token: {} ...'.format(access_token[:25])) api_endpoint = get_api_endpoint(args.auth_service, access_token) if api_endpoint is None: print('[-] RPC server offline') return print('[+] Received API endpoint: {}'.format(api_endpoint)) profile = get_profile(args.auth_service, api_endpoint, access_token) if profile is not None: print('[+] Login successful') profile = profile.payload[0].profile print('[+] Username: {}'.format(profile.username)) creation_time = datetime.fromtimestamp(int(profile.creation_time)/1000) print('[+] You are playing Pokemon Go since: {}'.format( creation_time.strftime('%Y-%m-%d %H:%M:%S'), )) print('[+] Poke Storage: {}'.format(profile.poke_storage)) print('[+] Item Storage: {}'.format(profile.item_storage)) for curr in profile.currency: print('[+] {}: {}'.format(curr.type, curr.amount)) else: print('[-] Ooops...')
def main(): # Initialize the window. settings.init() director.director.init(width=settings.dimensions['x'], height=settings.dimensions['y'], do_not_scale=True, resizable=True) settings.collision_manager = cm.CollisionManagerBruteForce() # Create a layer and add a sprite to it. settings.layers['game'] = Game() settings.layers['bullets'] = BatchNode() settings.layers['walls'] = BatchNode() settings.layers['enemies'] = BatchNode() Tank1 = ETank() Tank1.do(UserTankMovingHandlers()) settings.collision_manager.add(Tank1) settings.layers['game'].add(Tank1) settings.layers['game'].add(Tank1.getGunSprite()) settings.objects['players'].append(Tank1) settings.layers['game'].add(settings.layers['bullets']) settings.layers['game'].add(settings.layers['walls']) settings.layers['game'].add(settings.layers['enemies']) enemy = KVTank() enemy.position = (800, 800) enemy.do(EnemyTankMovingHandlers()) settings.collision_manager.add(enemy) settings.objects['enemies'].append(enemy) settings.layers['enemies'].add(enemy) settings.layers['enemies'].add(enemy.getGunSprite()) for i in range(20): wall = BrickWall() wall.update_position(i*32, 500) settings.collision_manager.add(wall) settings.objects['walls'].append(wall) settings.layers['walls'].add(wall) for i in range(30): wall = BrickWall() wall.update_position(i*32 + 680, 500) settings.collision_manager.add(wall) settings.objects['walls'].append(wall) settings.layers['walls'].add(wall) for i in range(100): for j in range(50): wall = sprite.Sprite('sprites/walls/adesert_cracks_5x5.jpg') wall.position = (i*5 + 200, 800 + j*5) wall.scale = 1 wall.cshape = cm.AARectShape( wall.position, wall.width // 2, wall.height // 2 ) #settings.collision_manager.add(wall) #settings.objects['walls'].append(wall) #settings.layers['walls'].add(wall) #animation = pyglet.image.load_animation('sprites/effects/nuke-ani.gif') #anim = sprite.Sprite(animation) #anim.position = (500, 500) #settings.layers['game'].add(anim) # Create a scene and set its initial layer. main_scene = scene.Scene(settings.layers['game']) # Attach a KeyStateHandler to the keyboard object. settings.keyboard = key.KeyStateHandler() director.director.window.push_handlers(settings.keyboard) # Play the scene in the window. director.director.run(main_scene)
appendhash = args.appendhash # whether to dump section-segment mapping secseg_mapping = args.secmapping # number of distinct cell types (same branching and compartments) # each cell has random type [0:ntype] ntype = int((nring * ncell - 1) / ncell_per_type + 1) from ring import * from neuron import h from commonutils import * import settings # initialize global variables settings.init(usegap, nring) # note that if branching is small and variation of nbranch and ncompart # is small then not all types may have distinct topologies # CoreNEURON will print number of distinct topologies. h.Random().Random123_globalindex(args.gran) h.load_file('stdgui.hoc') pc = h.ParallelContext() # set number of threads pc.nthread(args.nt, 1)
def main(): global gRobotList, gQuit global gCanvas, frame, gHamsterBox global monitor_thread, dispatch_thread global gBeepQueue, gWheelQueue, drawQueue global vWorld global joystick comm = RobotComm(gMaxRobotNum) comm.start() print 'Bluetooth started' gRobotList = comm.robotList monitor_thread = False dispatch_thread= False # create UI: two separate Tkinter windows # 1. frame = course track display # 2. gFrame = localization scanning display frame = tk.Tk() canvas_width = 700 # half width canvas_height = 380 # half height gCanvas = tk.Canvas(frame, bg="white", width=canvas_width*2, height=canvas_height*2) draw_track() settings.init() settings.gFrame = tk.Tk() settings.gFrame.geometry('600x500') gRobotDraw = draw.RobotDraw(settings.gFrame, tk) # create scanning behavior settings.gBehaviors[0] = scanning.Behavior("scanning", gRobotList, 4.0, gRobotDraw.get_queue()) gRobotDraw.start() # create 2 virtual robot data objects vrobot = [] joystick = [] keyBindings = [] for robot_i in range(gMaxRobotNum): vrobot.append ( virtual_robot() ) pi4 = 3.1415 / 4 # robot starting positions vrobot[robot_i].set_robot_a_pos(pi4*2, -520 + robot_i * 40, +340 - robot_i * 80) # keyboard input if robot_i == 0: keyBindings = ['w','s','a','d','x'] elif robot_i == 1: keyBindings = ['i','k','j','l',','] joystick.append( Joystick(comm, frame, gCanvas, vrobot[robot_i], robot_i, keyBindings) ) poly_points = [0,0,0,0,0,0,0,0] joystick[robot_i].vrobot.poly_id = gCanvas.create_polygon(poly_points, fill='blue') #robot joystick[robot_i].vrobot.prox_l_id = gCanvas.create_line(0,0,0,0, fill="red") #prox sensors ---- here joystick[robot_i].vrobot.prox_r_id = gCanvas.create_line(0,0,0,0, fill="red") joystick[robot_i].vrobot.floor_l_id = gCanvas.create_oval(0,0,0,0, outline="white", fill="white") #floor sensors joystick[robot_i].vrobot.floor_r_id = gCanvas.create_oval(0,0,0,0, outline="white", fill="white") time.sleep(1) update_vrobot_thread = threading.Thread(target=joystick[robot_i].update_virtual_robot) update_vrobot_thread.daemon = True update_vrobot_thread.start() # virtual world UI drawQueue = Queue.Queue(0) vWorld = virtual_world(drawQueue, joystick[0], vrobot[0], gCanvas, canvas_width, canvas_height) landmark = [-500, 220, -460, 180] vWorld.add_obstacle(landmark) draw_world_thread = threading.Thread(target=draw_virtual_world, args=(vWorld,)) draw_world_thread.daemon = True draw_world_thread.start() gui = VirtualWorldGui(vWorld, frame) gui.drawGrid() gui.drawMap() gCanvas.after(200, gui.updateCanvas, drawQueue) frame.mainloop() gQuit = True for robot in gRobotList: robot.reset() time.sleep(1.0) comm.stop() comm.join() print 'Terminated'
def main(): sys.path.append('..') def _go_stun(port): print '+++++ LISTEN UDP ON PORT', port, 'AND RUN STUN DISCOVERY' stun.stunExternalIP(close_listener=False, internal_port=port, verbose=False).addBoth(_stuned) def _stuned(ip): if stun.getUDPClient() is None: print 'UDP CLIENT IS NONE - EXIT' reactor.stop() return print '+++++ EXTERNAL UDP ADDRESS IS', stun.getUDPClient().externalAddress if sys.argv[1] == 'listen': print '+++++ START LISTENING' return if sys.argv[1] == 'connect': print '+++++ CONNECTING TO REMOTE MACHINE' _try2connect() return lid = misc.getLocalIdentity() udp_contact = 'udp://'+stun.getUDPClient().externalAddress[0]+':'+str(stun.getUDPClient().externalAddress[1]) lid.setProtoContact('udp', udp_contact) lid.sign() misc.setLocalIdentity(lid) misc.saveLocalIdentity() print '+++++ UPDATE IDENTITY', str(lid.contacts) _send_servers().addBoth(_id_sent) def _start_sending_ip(): init(stun.getUDPClient()) A().debug = True reactor.callLater(1, Start) def _start_session(): # transport_udp_session.SetStateChangedCallbackFunc(_state_changed) address = (sys.argv[2], int(sys.argv[3])) sess = transport_udp_session.open_session(address) filename = sys.argv[4] loop_delay = None if len(sys.argv)<6 else int(sys.argv[5]) transport_udp_session._StateChangedCallbackFunc = lambda index, old, new: _state_changed(index, address[0], new, filename, loop_delay) sess.automat('init', None) reactor.callLater(2, _start_session) def _state_changed(index, ip, newstate, filename, loop_delay): print '+++++ STATE CHANGED [%s]' % newstate sess = automat.objects().get(index) if newstate == 'CONNECTED': transport_udp_session.SetStateChangedCallbackFunc(None) if loop_delay: reactor.callLater(2, LoopingCall(send, filename, sess.remote_address[0], sess.remote_address[1]).start, loop_delay, True) else: reactor.callLater(2, send, filename, sess.remote_address[0], sess.remote_address[1]) def _send_servers(): import tmpfile, misc, nameurl, settings, transport_tcp sendfile, sendfilename = tmpfile.make("propagate") os.close(sendfile) LocalIdentity = misc.getLocalIdentity() dhnio.WriteFile(sendfilename, LocalIdentity.serialize()) dlist = [] for idurl in LocalIdentity.sources: # sources for out identity are servers we need to send to protocol, host, port, filename = nameurl.UrlParse(idurl) port = settings.IdentityServerPort() d = Deferred() transport_tcp.sendsingle(sendfilename, host, port, do_status_report=False, result_defer=d, description='Identity') dlist.append(d) dl = DeferredList(dlist, consumeErrors=True) print '+++++ IDENTITY SENT TO %s:%s' % (host, port) return dl def _try2connect(): remote_addr = dhnio.ReadTextFile(sys.argv[3]).split(' ') remote_addr = (remote_addr[0], int(remote_addr[1])) t = int(str(int(time.time()))[-1]) + 1 data = '0' * t stun.getUDPClient().transport.write(data, remote_addr) print 'sent %d bytes to %s' % (len(data), str(remote_addr)) reactor.callLater(1, _try2connect) def _id_sent(x): print '+++++ ID UPDATED ON THE SERVER', x if sys.argv[1] == 'send': _start_sending() elif sys.argv[1] == 'sendip': _start_sending_ip() elif sys.argv[1] == 'receive': _start_receiving() def _start_receiving(): idurl = sys.argv[2] if not idurl.startswith('http://'): idurl = 'http://'+settings.IdentityServerName()+'/'+idurl+'.xml' print '+++++ START RECEIVING FROM', idurl _request_remote_id(idurl).addBoth(_receive_from_remote_peer, idurl) def _receive_from_remote_peer(x, idurl): init(stun.getUDPClient()) A().debug = True contacts.addCorrespondent(idurl) reactor.callLater(1, Start) def _start_sending(): idurl = sys.argv[2] if not idurl.startswith('http://'): idurl = 'http://'+settings.IdentityServerName()+'/'+idurl+'.xml' print '+++++ START SENDING TO', idurl # if len(sys.argv) == 6: # send(sys.argv[5], sys.argv[3], int(sys.argv[4])) # elif len(sys.argv) == 4: _request_remote_id(idurl).addBoth(_send_to_remote_peer, idurl, sys.argv[3], None if len(sys.argv)<5 else int(sys.argv[4])) def _request_remote_id(idurl): print '+++++ REQUEST ID FROM SERVER', idurl return identitycache.immediatelyCaching(idurl) def _send_to_remote_peer(x, idurl, filename, loop_delay): print '+++++ PREPARE SENDING TO', idurl init(stun.getUDPClient()) A().debug = True contacts.addCorrespondent(idurl) reactor.callLater(1, Start) ident = identitycache.FromCache(idurl) if ident is None: print '+++++ REMOTE IDENTITY IS NONE' reactor.stop() return x, udphost, udpport, x = ident.getProtoParts('udp') transport_udp_session.SetStateChangedCallbackFunc(lambda index, old, new: _state_changed(index, udphost, new, filename, loop_delay)) def _send_file(idurl, filename): ident = identitycache.FromCache(idurl) if ident is None: print '+++++ REMOTE IDENTITY IS NONE' reactor.stop() x, udphost, udpport, x = ident.getProtoParts('udp') print '+++++ SENDING TO', udphost, udpport send(filename, udphost, udpport) def _test_cancel(): import transport_control as tc for t in tc.current_transfers(): cancel(t.transfer_id) reactor.callLater(11, _test_cancel) # reactor.callLater(10, _test_cancel) dhnio.SetDebug(14) dhnio.LifeBegins() settings.init() misc.loadLocalIdentity() # contacts.init() # contacts.addCorrespondent(idurl) identitycache.init() identitycache.SetLocalIPs({'http://identity.datahaven.net/veselin.xml': '192.168.1.3', 'http://identity.datahaven.net/veselin-ubuntu-1024.xml': '192.168.1.100'}) port = int(settings.getUDPPort()) if sys.argv[1] in ['listen', 'connect']: port = int(sys.argv[2]) _go_stun(port)
console.setFormatter(formatter) logging.getLogger('').addHandler(console) if __name__ == "__main__": configuration = set_global_config() set_logging_from_configuration(configuration) logger = logging.getLogger(__name__) parser = argparse.ArgumentParser(description='Providence Monitor Framework') parser.add_argument('--tests','-t', action='store_true') parser.add_argument('--mode', help="specify production for production mode, or anything otherwise") parser.add_argument('--p4change', help="specify the p4 change number to debug") args = parser.parse_args() settings.init(args.mode, args.p4change) #-- Basic Credentials setup credentials_file = configuration.get('credentials_file') credential_key = os.environ.get('CREDENTIAL_KEY') if credential_key is None: credential_key = getpass.getpass('Credential Key:') credential_manager = CredentialManager(credentials_file, credential_key) config.credential_manager = credential_manager ## -- test just resets the db everytime -- from models import Base from db import engine if not settings.in_production(): Base.metadata.drop_all(engine) Base.metadata.create_all(engine)
from matplotlib import pyplot from functools import reduce from graphingAndGUI import graphEverythingBacktesting from findSimilar import findSimilar from findSimilar import findSimilarLive from decision import decide from decision import decideLive from profitLoss import profitLoss from backtester import backtester from backtester import liveTester startTime = time.time() settings.init('settings/1.1.txt') #pastPatterns = [] #currentPatternIndex = -1000 #length = 15 #currentPattern = settings.historicData[currentPatternIndex : currentPatternIndex + length] #print('Current Pattern : ' + str(currentPattern)) #historicData = historicData '''def generatePastPatterns(patternLength, historicData): currentPoint = 0 endPoint = len(historicData) - patternLength - futureDistance pastPatterns = [] while currentPoint < endPoint: #print('Current Point ' +str(currentPoint)) x = 0
def main(argv): app_root = os.path.split(os.path.abspath(__file__))[0] settings.init() # Assess command line args num = settings.num if num is None: num = '' path = '' msg = '' usage = 'Usage:\n' \ 'python main.py -p <img_path> -n <number> -m <message>\n' \ 'or\n' \ 'alias -p <img_path> -n <number> -m <message>' try: opts, args = getopt.getopt(argv,"hp:n:m:",["path=", "num=","msg="]) except getopt.GetoptError: print usage sys.exit(2) for opt, arg in opts: if opt == '-h': print usage sys.exit() elif opt in ("-n", "--num"): num = arg elif opt in ("-m", "--msg"): msg = arg elif opt in ("-p", "--path"): path = arg # Load imgur config file config = None try: config_path = os.path.join(app_root, "config.json") fd = open(config_path, 'r') except: print("config file [config.json] not found.") sys.exit(1) try: config = json.loads(fd.read()) except: print("invalid json in config file.") sys.exit(1) # Upload image from given path anonymously to imgur and obtain link mfactory = factory(config) imgur = mfactory.build_api() img_link = "" if path is not None and path is not "": imgur_req = mfactory.build_request_upload_from_path(path) try: res = imgur.retrieve(imgur_req) img_link = res['link'] except expired: print("Expired access token") # Draft and send text message number = str('number=%s' % (num)) message = str('message=%s %s' % (img_link, msg)) cmd = ['curl', 'http://textbelt.com/text', '-d', number, '-d', message] print("Sending Message: [" + img_link + " " + msg + "] to " + num) call(cmd)
depth = 7 # measure process time t0 = time.clock() stop = minutes = seconds = 0 nearness = 0.0 smooth = 0.0 merge = 0.4 gradient = 0.1 edge = 0.0 opencell = 0.5 snake = 0.0 # settings.init( nearness, smooth, merge, gradient, edge, opencell, snake ) settings.init(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5], sys.argv[6], sys.argv[7]) # b = bc.BoardController() b = window() b.window.update_view(b.board) def emptyTiles(board): count = 0 for cell in board: if cell == 0: count += 1 return count def logic():
@contextmanager def timeit_context(name): start_time = time.time() print("Crawler started at %s"%time.strftime("%H:%M:%S")) yield end_time =time.time() print("Crawler ended at %s" % time.strftime("%H:%M:%S")) elapsed_time = end_time- start_time print('[{}] finished in {} seconds'.format(name, int(elapsed_time))) if __name__ == "__main__": logging.basicConfig(level=logging.WARNING, format=' %(asctime)s - %(levelname)s- %(message)s') jsonpickle.set_preferred_backend('json') jsonpickle.set_encoder_options('json', indent=2) repair= settings.init() with timeit_context('crawler'): if repair: fixing_entities() else: main() settings.destroy()
def __init__(self, config_file=None): settings.init(config_file) self.host = settings.get_setting('ftp').get('host') self.username = settings.get_setting('ftp').get('username') self.password = settings.get_setting('ftp').get('password') self.connection = None
def main(path): """The main function of the program. Mode 0 means Layer 3/4 Mode 1 means Layer 7""" s.init(path) # Set IN/OUT for further operations. stdin = os.fdopen(0, 'rb') stdout = os.fdopen(1, 'wb') alt_stdin = os.fdopen(int(os.getenv("ALTERNATE_STDIN")), 'rb') alt_stdout = os.fdopen(int(os.getenv("ALTERNATE_STDOUT")), 'wb') # Give TCPConnection the output information TCPConnection.stdout = stdout TCPConnection.alt_stdout = alt_stdout # All calls to inout_map except 0 or 3 give 'special', which means we got # socket data. #global inout_map inout_map = {0: alt_stdout, 3: stdout} inout_map = defaultdict(lambda: 'special', inout_map) # Register inputs in poller for handling the channels. global poller poller = select.poll() poller.register(stdin, select.POLLIN | select.POLLHUP) poller.register(alt_stdin, select.POLLIN | select.POLLHUP) # give the TCPConnection class access to the poller and the socket info Connection.poller = poller if s.mode == 1: Connection.sockinfo = (s.config.get('Socket', 'path')) # Start the sending routine. t1 = threading.Thread(target=sendingRoutine) t1.daemon = True t1.start() # Start the receiving routine. t2 = threading.Thread(target=receivingRoutine, args=(poller,)) t2.daemon = True t2.start() # Start of main routine: while 1: filedescriptor, readdata = s.receivingQueue.get() if s.mode == 0: out = inout_map[filedescriptor] if filedescriptor == 0: filterpacket, status = tcpipfilter_out(readdata) elif filedescriptor == 3: filterpacket, status = tcpipfilter_in(readdata) else: raise ValueError("Unkown filedescriptor for filter.") if status: out.write(filterpacket) out.flush() s.logfile.flush() elif s.mode == 1: out = inout_map[filedescriptor] if filedescriptor == 0 or filedescriptor == 3: ethpacket = decode(readdata) ptype = l34protocolfilter(ethpacket) if (filedescriptor == 0) and (ptype != Protocol.Other): # Test for filterhits. if l34filter(ethpacket, ptype): # If we have a match on layer34, # proceed with layer 4 management status, con = l4manage(ethpacket, filedescriptor, ptype) # Send l7 data to external controler via socket. if status == 2: send(Connection.SOCK, con, getdata(ethpacket), 0) # con.sock.send(getdata(ethpacket)) # # Forward data to target. # elif status == 1: # send(Connection.TAIL, None, # readdata, 0) # Drop packet else: pass # No filterhit occured. else: # Send data to the other end of the wire. send(Connection.TAIL, None, readdata, 0) # ptype == Protocol.Other elif (filedescriptor == 0) and (ptype == Protocol.Other): # Send data to the other end of the wire. send(Connection.TAIL, None, readdata, 0) # filedescriptor == 3 elif (filedescriptor == 3) and (ptype == Protocol.TCP): # Test for filterhits. if l34filter(ethpacket, ptype, 1): # Only TCP management is needed here, UDP is not # monitored on the inward way. status, con = l4TCPmanage(ethpacket, filedescriptor) # This is the way back, # here we do not send to sock but need to # make sure the correct seq and ack numbers # are set in the packet we foreward. if status == 2: packet = con.makevalid(ethpacket) packet = encode(packet) send(Connection.HEAD, con, packet) # elif status == 1: # send(Connection.HEAD, con, packet, 0) # Drop packet else: pass # No filterhit occured. else: # Send data to the machine at the head # of the wire. send(Connection.HEAD, None, readdata, 0) # filedescriptor == 3 and # ptype == (Protocol.UDP or Protocol.Other) # UDP normally should not be faulty on the answers. # TODO: maybe UDP causes problems on the inward direction else: # Send data to the machine at the head # of the wire. send(Connection.HEAD, None, readdata, 0) # Socket data. else: if s.verbosity: s.logfile.write("Socketdata processed!\n") con = Connection.resolvefd(filedescriptor) l7manage(readdata, con) else: raise ValueError("Unknown mode.") s.logfile.flush()
def __init__(self): handlers = [ # url(r'/', MainHandler), ] app_settings = settings.init() super().__init__(handlers, **app_settings)
import libvirt import main_mini import sys import os import settings import subprocess from uuid import uuid4 settings.init() # Call only once def create_xml(hypervisor,vm_name,ram,uuid,cpu,arch_type,driver,source_path): xml = "<domain type='" + hypervisor + \ "'><name>" + vm_name + "</name> \ <uuid>" + uuid + "</uuid> \ <memory>" + ram + "</memory> \ <vcpu>" + cpu + "</vcpu> \ <os> \ <type arch='" + arch_type + "' machine='pc'>hvm</type> \ <boot dev='hd'/> \ </os> \ <features> \ <acpi/> \ <apic/> \ <pae/> \ </features> \ <clock offset='utc'/> \ <on_poweroff>destroy</on_poweroff> \ <on_reboot>restart</on_reboot> \ <on_crash>restart</on_crash> \ <devices> \ <disk type='file' device='disk'> \ <driver name=" + driver + " type='raw'/> \
lexer=None if lexer is None: lexer=guess_lexer(acode) formatter=HtmlFormatter(linenos=True) return highlight(acode, lexer, formatter) def last_n(n): s1=Session() q = s1.query(Paste).order_by((Paste.publishdate.desc())).limit(n) return q last_five= lambda : last_n(5) settings.init() #DON'T TOUCH #pygments languages_list=[(l[1][0], l[0]) for l in get_all_lexers()] #tuples of long/short names languages_list.sort(key = lambda el: el[0]) languages_list.insert(0,('YOU GUESS', '')) @route('/public/<filepath:path>') def server_static(filepath): return static_file(filepath, root=settings.staticdir) class PasteForm(Form): author = TextField('author',[validators.Length(max=10)])
def setUp(self): settings.init() data = "From: [email protected]\n" \ "Date: Tue, 28 Sep 2016 10:24:09 +0200 (CEST)\n\n" self.mail = MailTicket(StringIO(data))