def main(): today = getCurrentTime() print("------------------------------" "------------------------------\n" "Début du processus - %s" % (today.strftime("%d-%m-%Y, %H:%M:%S"))) password = getpass.getpass("Inscrire le mot de passe Marcel: ") REPEAT_AFTER = 43200 # 12 hours # REPEAT_AFTER = 60 # 60 sec now = time.time() repeatAfter = now + REPEAT_AFTER print("\n...\n") while 1: if time.time() >= repeatAfter: print("LETS PING - %s" % (getCurrentTime().strftime("%d-%m-%Y, %H:%M:%S"))) responseTextHtml = scraper.pingWebsite(URL_TO_FETCH) if scraper.isHtmlElementPresent(responseTextHtml, HTML_ID_TO_TARGET): print("Il n'y a pas d'emploi disponible pour l'instant.\n" "Le processus sera relancé dans %s secondes." % REPEAT_AFTER) # debug smtp.sendEmail(password) else: print("Il y a des emplois disponibles!") smtp.sendEmail(password) repeatAfter = time.time( ) + REPEAT_AFTER # repeat again in X seconds
def buildJournalEntry(entry, ignoreURL): journalEntry = "" if (TODOCommand in entry): journalEntry = config.defaultIndentLevel + " TODO " + utils.getCurrentTime( ) + " " + entry.replace(TODOCommand, '') else: journalEntry = config.defaultIndentLevel + " " + utils.getCurrentTime( ) + " " + entry if (not (ignoreURL)): print(entry) journalEntryURL = utils.containsYTURL(entry) print(journalEntryURL) if (journalEntryURL): #title = getWebPageTitle(journalEntryURL) journalEntry = journalEntry.replace( journalEntryURL, '{{youtube ' + journalEntryURL + '}}') else: journalEntryURL = utils.containsURL(entry) if (journalEntryURL): title = utils.getWebPageTitle(journalEntryURL) journalEntry = journalEntry.replace( journalEntryURL, '#' + config.BookmarkTag + ' [' + title + '](' + journalEntryURL + ')') print(journalEntry) return journalEntry
def lpr_ai4thai(origin_file, out_file, crop_file): url = params.get('AI4Thai', 'lpr_url') payload = {'crop': '1', 'rotate': '1'} files = {'image': open(crop_file, 'rb')} apikey = getAPIKey() headers = { 'Apikey': apikey, } response = requests.post(url, files=files, data=payload, headers=headers) try: print("AI4Thai LPR = " + response.json()[0]["lpr"]) data_dict = {} data_dict["time"] = utils.getCurrentTime() data_dict["lpr"] = response.json()[0]["lpr"] data_dict["origin_file"] = origin_file data_dict["out_file"] = out_file data_dict["crop_file"] = crop_file #print(json.dumps(data_dict)) es = db.connect() result = db.insert(es, json.dumps(data_dict), indexName="lpr") print("Elastic : Successful = {}\n-----------".format( result["_shards"]["successful"])) except Exception as e: print('LPR error: {}'.format(str(response.json()["message"]))) return response
def __init__(self, x, y, z): self.pos = Vec3(x, y, z) self.dir = Vec3(1, 0, 0) self.blockType = block.GOLD_ORE.id self.moveTime = getCurrentTime() self.timeUntilMove = 1000
def __init__(self, x, y , z): self.pos = Vec3(x, y, z) self.dir = Vec3(1, 0, 0) self.blockType = block.GOLD_ORE.id self.moveTime = getCurrentTime() self.timeUntilMove = 1000
def __init__(self): self.player = mc.player self.score = 0 self.attackTime = getCurrentTime() self.timeUntilAttack = 1000 self.attackBlocks = []
def __init__(self): self.player = mc.player self.score = 0 self.attackTime = getCurrentTime() self.timeUntilAttack = 1000 self.attackBlocks = []
def do_run(self): time = utils.getCurrentTime() #print(time) for t in self._regularTimeNotifications: #print(' -- %s' % t) if t == time: for task in self._regularTimeNotifications[time]: bot.sendMessage(config.recepient, task()) print("[%s] \'%s\' task executed" % (self._name, task.__name__))
def __init__(self, x, y, z): self.x = x self.y = y self.z = z self.dir = Vec3(1, 0, 0) self.speed = 1 self.blockType = block.WOOD.id # Used for setting a delay in target move speed self.moveTime = getCurrentTime() self.timeUntilMove = 1000
def __init__(self, x, y, z): self.x = x self.y = y self.z = z self.dir = Vec3(1, 0, 0) self.speed = 1 self.blockType = block.WOOD.id # Used for setting a delay in target move speed self.moveTime = getCurrentTime() self.timeUntilMove = 1000
def __init__(self, GPIONum, maxBlinks=None): if maxBlinks is None: maxBlinks = 4 self.GPIONum = GPIONum self.numBlinks = maxBlinks self.maxBlinks = maxBlinks self.blinkTime = getCurrentTime() self.timeUntilBlink = 300
def __init__(self, GPIONum, maxBlinks=None): if maxBlinks is None: maxBlinks = 4 self.GPIONum = GPIONum self.numBlinks = maxBlinks self.maxBlinks = maxBlinks self.blinkTime = getCurrentTime() self.timeUntilBlink = 300
def gameLoop(): while gameInfo.gameStarted == True: gameInfo.gameTime = getCurrentTime() checkGameBlocksHit() if (len(gameInfo.targets) < gameInfo.maxTargets): createTarget() for target in gameInfo.targets: if (target.canMove() == True): target.move() target.moveTime = getCurrentTime() if (button.pressed() == True and player.canAttack() == True): player.attack() # Uncomment below two lines for auto-attack #if (player.canAttack() == True): #player.attack() for attackBlock in player.attackBlocks: attackBlock.move() if (gameInfo.gameBoard.isInsideWithX(attackBlock.pos) == True): player.attackBlocks.remove(attackBlock) attackBlock.remove() mc.setBlock(attackBlock.pos.x, attackBlock.pos.y, attackBlock.pos.z, block.STONE.id) for target in gameInfo.targets: if target.wasHit(attackBlock.pos) == True: gameInfo.hitTarget(target) gameInfo.checkPlayerWin() break if (ledLight.canBlink(gameInfo.gameTime) == True): ledLight.blink()
def attack(self): x, y, z = self.getPos() x = floatToInt(x) y = floatToInt(y) z = floatToInt(z) attackBlock = AttackBlock(x, y, z) self.attackBlocks.append(attackBlock) # Reset attack timer self.attackTime = getCurrentTime()
def attack(self): x, y, z = self.getPos() x = floatToInt(x) y = floatToInt(y) z = floatToInt(z) attackBlock = AttackBlock(x, y, z) self.attackBlocks.append(attackBlock) # Reset attack timer self.attackTime = getCurrentTime()
def gameLoop(): while gameInfo.gameStarted == True: gameInfo.gameTime = getCurrentTime() checkGameBlocksHit() if (len(gameInfo.targets) < gameInfo.maxTargets): createTarget() for target in gameInfo.targets: if (target.canMove() == True): target.move() target.moveTime = getCurrentTime() if (button.pressed() == True and player.canAttack() == True): player.attack() # Uncomment below two lines for auto-attack #if (player.canAttack() == True): #player.attack() for attackBlock in player.attackBlocks: attackBlock.move() if (gameInfo.gameBoard.isInsideWithX(attackBlock.pos) == True): player.attackBlocks.remove(attackBlock) attackBlock.remove() mc.setBlock(attackBlock.pos.x, attackBlock.pos.y, attackBlock.pos.z, block.STONE.id) for target in gameInfo.targets: if target.wasHit(attackBlock.pos) == True: gameInfo.hitTarget(target) gameInfo.checkPlayerWin() break if (ledLight.canBlink(gameInfo.gameTime) == True): ledLight.blink()
def run_monkey(self): # add by jianan self.startTime = time.time() # end add self.cf.read('monkey.conf') package_name = self.cm.cf.get('monkey_conf', 'package_name').split(" ")[1] self.tasks["pkgName"] = package_name # self.tasks["duration"] = self.hours self.tasks["tasks"] = [] self.tasks["deviceInfo"] = self.ad.get_devices() self.stop_monkey['state'] = 'active' self.start_monkey['state'] = 'disable' try: current_path = os.getcwd() #package_name = self.cf.get('monkey_conf', 'package_name').split(" ") work_path = os.path.join(current_path, "logs") #package_name + utils.getCurrentTime() if not os.path.exists(work_path): os.mkdir(work_path) utils.clearLog() work_path = os.path.join(work_path, package_name + utils.getCurrentTime()) # package_name + utils.getCurrentTime() if not os.path.exists(work_path): os.mkdir(work_path) self.workPath = work_path t = multiprocessing.Process(target=lambda: self.mk.merge_command( work_path, *self.cm.collect(*ENTRYLIST))) t.start() #while True: # utils.clearLog() # if t.is_alive(): # time.sleep(10) #else: #self.collect(work_path) #self.analyse(work_path) #workPath = os.path.join(current_path, "logs", self.pkgName + utils.getCurrentTime()) # os.mkdir(workPath) except: self.ad.stop_monkey(status, work_path) # print "status" finally: self.tasks['crashCount'] = str(self.tasksCrashCount) self.tasks["anrCount"] = str(self.tasksAnrCount) return self.tasks
def getPrinterLatestData(): list = [] with pymssql.connect(databaseip, username, password, databasename) as conn: with conn.cursor(as_dict=True) as cursor: #数据存放于字典中 selectsql = 'select printercount,printername,portname,createtime,checkstatus from printersinfo where createtime = (select max(createtime) from printersinfo )' cursor.execute(selectsql) for row in cursor: row['portstatus'] = utils.getPrinterStatus( row['portname'] ) if row['checkstatus'] == 'Y' else 'Disabled' row['checktime'] = utils.getCurrentTime() list.append(row) return list
def __init__(self, winScore=None): if winScore is None: winScore = 10 self.gameStarted = False self.gameTime = getCurrentTime() self.maxTargets = 1 self.targets = [] self.winScore = winScore self.gameBoard = Rect(0, 0, 0, 0, 0)
def __init__(self, winScore=None): if winScore is None: winScore = 10 self.gameStarted = False self.gameTime = getCurrentTime() self.maxTargets = 1 self.targets = [] self.winScore = winScore self.gameBoard = Rect(0, 0, 0, 0, 0)
def buildJournalEntry(entry, ignoreURL): journalEntry = "" currentTime = utils.getCurrentTime() if currentTime: currentTime += " " else: currentTime = "" # print(processCommandsMapping('21:40 some non todo entry T')) journalEntry = config.defaultIndentLevel + " " + utils.processCommandsMapping( currentTime + entry) # if(TODOCommand in entry): # journalEntry = config.defaultIndentLevel + " TODO " + currentTime + entry.replace(TODOCommand,'') # else: # journalEntry = config.defaultIndentLevel + " " + currentTime + entry if (not (ignoreURL)): # print(entry) journalEntryURL = utils.containsYTURL(entry) # print (journalEntryURL) if (journalEntryURL): #title = getWebPageTitle(journalEntryURL) journalEntry = journalEntry.replace( journalEntryURL, '{{youtube ' + journalEntryURL + '}}') else: journalEntryURL = utils.containsTWUrl(entry) if (journalEntryURL): # journalEntry = utils.generateTwitterIframe(journalEntryURL) tweetString = twitter.LogTwitterUrl(journalEntryURL) journalEntry = journalEntry.replace(journalEntryURL, tweetString) else: journalEntryURL = utils.containsURL(entry) if (journalEntryURL): title = utils.getWebPageTitle(journalEntryURL) if (config.journalsFilesExtension == '.md'): journalEntry = journalEntry.replace( journalEntryURL, '#' + config.BookmarkTag + ' [' + title + '](' + journalEntryURL + ')') elif (config.journalsFilesExtension == '.org'): journalEntry = journalEntry.replace( journalEntryURL, '#' + config.BookmarkTag + ' [[' + journalEntryURL + '][' + title + ']]') print(journalEntry) return journalEntry
def main(): current_time = u.getCurrentTime() print("Current time: ", current_time) if (current_time >= start and current_time <= end) or (current_time == "22:00:00" or current_time == "08:00:00" or current_time == "14:00:00"): rows = postgres.dbReader(connection) message = f'' for row in rows: message += u.messageFormat(row[0]) + f'{row[1]}\n' message += f"**UMUMIY REGISTRATSIYADAN O'TGAN USERLAR SONI:** {rows[0][3]}" entity = client.get_entity(PeerChannel(config.GROUP_ID)) message = client.send_message(entity, message) print("Jo'natildi soat:", current_time) return True return False
def Refresh(): try: #get current time dateStr, weekStr, dayStr, monthStr = getCurrentTime() #get information of weather city, temp, icon, text, tempMin, tempMax = getWeather() #initialize the image im = Image.new('1', (epd.width, epd.height), 255) draw = ImageDraw.Draw(im) draw.text((30, 10), dateStr, font=blackFont20, fill=0) draw.text((30, 45), weekStr, font=blackFont20, fill=0) draw.text((30, 80), city, font=normalFont20, fill=0) bmp = Image.open( "/home/pi/waveshare/E-PaperWeatherCalendar/resources/pictures/" + icon + ".bmp") im.paste(bmp, (30, 115)) draw.text((30, 190), text, font=normalFont20, fill=0) draw.text((30, 225), temp + "°C", font=blackFont20, fill=0) draw.text((30, 260), tempMin + "°C -" + tempMax + "°C", font=blackFont20, fill=0) draw.text((226, 30), dayStr, font=blackFont80, fill=0) draw.text((getMonthX(monthStr), 130), monthStr, font=blackFont30, fill=0) poemFirst, poemSecond = getRandomPoem() poemX = getPoemX(len(poemFirst)) draw.text((poemX, 200), poemFirst, font=normalFont24, fill=0) draw.text((poemX, 240), poemSecond, font=normalFont24, fill=0) epd.display(epd.getbuffer(im)) except Exception as e: print(e) except KeyboardInterrupt: print("ctrl + c:") epd4in2.epdconfig.module_exit() exit()
def dumpData(self, data, download_queue_video, download_queue_captions, download_queue_assignment, skipped_important, skipped): path = "data/logs/" + "log_" + utils.getFormattedDateTimeFile( utils.getCurrentTime().timestamp()) + "/" Path(path).mkdir(parents=True, exist_ok=True) with open(path + 'data.json', 'w') as outfile: json.dump(data, outfile) with open(path + 'download_queue_video.json', 'w') as outfile: json.dump(download_queue_video, outfile) with open(path + 'download_queue_captions.json', 'w') as outfile: json.dump(download_queue_captions, outfile) with open(path + 'download_queue_assignment.json', 'w') as outfile: json.dump(download_queue_assignment, outfile) with open(path + 'skipped_important.json', 'w') as outfile: json.dump(skipped_important, outfile) with open(path + 'skipped.json', 'w') as outfile: json.dump(skipped, outfile)
def post(self, analytic_id): ''' Apply a certain analytic to the provided input data. The input must be a list of datasets, which can be matrices and/or results. ''' #get the analytic _, col = analytics_collection() isResultSource = False #get the input data data = request.get_json(force=True) datasrc = data['src'][0] if isinstance(datasrc, list): msg = "When Posting Analytic %s, datasrc was a list" % ( analytic_id) print(msg) return msg, 400 #Bad Request else: print("Datasource is a <%s>" % type(datasrc)) src_id = datasrc['src_id'] sub_id = datasrc['id'] parameters = data['parameters'] inputs = data['inputs'] name = data['name'] res_id = utils.getNewId() #see if the input data is a result if 'analytic_id' in datasrc: isResultSource = True mat_id = datasrc['src_id'] else: mat_id = sub_id storepath = os.path.join(RESULTS_PATH, mat_id, res_id) + "/" os.makedirs(storepath) # print("Extracted info for analytic:%s\n %s"%(analytic_id, { # 'src_id': src_id, # 'sub_id': sub_id, # 'parameters': parameters, # 'inputs': inputs, # 'name': name, # 'res_id': res_id # })) #run analysis queue = Queue() try: #single process for now utils.run_analysis(queue, analytic_id, parameters, inputs, storepath, name) #multiprocess solution from before # p = Process(target=utils.run_analysis, args=(queue, analytic_id, parameters, inputs, storepath, name)) # p.start() # p.join() # this blocks until the process terminates outputs = queue.get() except: tb = traceback.format_exc() logging.error(tb) return tb, 406 if outputs != None: #store metadata _, res_col = results_collection() try: src = res_col.find({'src_id': mat_id})[0] except IndexError: src = {} src['rootdir'] = os.path.join(RESULTS_PATH, mat_id) + '/' src['src'] = data['src'][0] src['src_id'] = data['src'][0]['id'] src['results'] = [] res_col.insert(src) src = res_col.find({'src_id': mat_id})[0] res = {} res['id'] = res_id res['rootdir'] = storepath res['name'] = name res['src_id'] = mat_id res['created'] = utils.getCurrentTime() res['analytic_id'] = analytic_id res['parameters'] = parameters res['outputs'] = outputs if isResultSource: res['res_id'] = [el['id'] for el in data['src']] results = [] for each in src['results']: results.append(each) results.append(res) res_col.update({ 'src_id': mat_id }, {'$set': { 'results': results }}) return res, 201 else: tb = traceback.format_exc() logging.error(tb) return tb, 406
sqlConf = utils.loadConfig(utils.dbFile) if sqlConf: sqlCommon = sqlCommon % (sqlConf["db.user"], sqlConf["db.pass"], sqlConf["db.host"]) sqlExecuteFile = sqlCommon + " -i \"%s\"" sqlExecuteQuery = sqlCommon + " -Q \"%s\"" if operation == "executefile": sqlFile = sys.argv[2] executeSqlFile(sqlFile) elif operation == "executequery": sqlQuery = sys.argv[2] executeSqlQuery(sqlExecuteQuery % sqlQuery) elif operation == "backup": currentTime = utils.getCurrentTime() newDbBackUpPath = os.path.join(dbBackUpPath, currentTime) utils.show("Creating %s" % newDbBackUpPath) utils.createDirectory(newDbBackUpPath) backUpQuery = sqlExecuteQuery % sqlBackUpQuery executeSqlQuery( backUpQuery % ("OMG_GameWEB", os.path.join(newDbBackUpPath, "OMG_GameWeb.bak"))) executeSqlQuery( backUpQuery % ("PS_Billing", os.path.join(newDbBackUpPath, "PS_Billing.bak"))) executeSqlQuery( backUpQuery % ("PS_ChatLog", os.path.join(newDbBackUpPath, "PS_ChatLog.bak"))) executeSqlQuery(
def put(self, name, ingest_id, group_name=""): ''' Saves a new resource with a ID. Payload can be either a file or JSON structured configuration data. Returns the metadata for the new source. ''' client = db_client() col = db_collection(client, DATALOADER_DB_NAME, DATALOADER_COL_NAME) # If group_name == 'overwrite' then overwrite with same src_id overwrite = False if group_name == 'overwrite': group_name = "" overwrite = True # Check for an existing source with the same name. Do not overwrite unless specified existing_source = find_source(col, name) if existing_source is not None and not overwrite: logging.warn("Source Already Exists: {}".format(existing_source['src_id'])) existing_source['error'] = 1 existing_source['msg'] = "Source Already Exists" return existing_source try: if existing_source: src_id = existing_source['src_id'] if overwrite: col.delete_one({"src_id":src_id}) file_path = '/'.join([DATALOADER_PATH,src_id]) shutil.rmtree(file_path) else: src_id = utils.getNewId() t = utils.getCurrentTime() conn_info = request.get_json() # conn_info = request.get_json(force=True) filepath = None if conn_info == None: file = request.files['file'] ext = re.split('\.', file.filename)[1] if not ext in ALLOWED_EXTENSIONS: print("WARN: File submitted %s is not of a supported filetype".format(file.filename)) # return ('This filetype is not supported.', 415) if 'zip' in file.filename: src_type = 'zip' else: src_type = 'file' rootpath, filepath = write_source_file(DATALOADER_PATH, src_id, file) else: src_type = 'conf' rootpath, filepath = write_source_config(DATALOADER_PATH, src_id, conn_info) rootpath = DATALOADER_PATH + src_id + '/' source = Source(name, rootpath, src_id, src_type, t, ingest_id, group_name, filepath=filepath) source_insert_response = col.insert_one(source.dict()) if (source_insert_response == False): logging.error("Source Insert Failed") tb = traceback.format_exc() return tb, 406 response = col.find_one({'_id':source_insert_response.inserted_id},{"_id":0}) except: tb = traceback.format_exc() return tb, 406 return response, 201
for structure in structures: modelSavePath = utils.addSuffixToPath(config.get('Model', 'ModelPath'), structure) print "Creating model %s" % modelSavePath model = \ Network( [ sourceCorpus.wordCount(structure), int(((sourceCorpus.wordCount(structure) + targetCorpus.wordCount(structure))) * float(config.get('Model', 'HiddenLayerSizeProportion'))), (targetCorpus.wordCount(structure), 'sigmoid') ], loss='crossentropyloss' ) print "Training model %s" % modelSavePath print " - start time: %s\n" % utils.getCurrentTime() data = [ np.asarray(sourceCorpus.getWordVectors(structure), dtype=np.float32), np.asarray(targetCorpus.getWordVectors(structure), dtype=np.float32) ] for train, valid in model.itertrain(data, algo='adadelta', validate_every=1, patience=5): print " time: %s" % utils.getCurrentTime() print " training loss: %s" % train['loss'] print " validation loss: %s" % valid['loss'] print " - stop time: %s" % utils.getCurrentTime() print "Saving model %s" % modelSavePath model.save(modelSavePath)