def update_file(base_dir, uuid, real_path): hasher = hashlib.sha1() try: with open(real_path, "rb") as afile: stat = os.fstat(afile.fileno()) size = stat.st_size mtime = stat.st_mtime buf = afile.read(blocksize) while len(buf) > 0: hasher.update(buf) buf = afile.read(blocksize) except IOError:# ファイルが絶妙なタイミングで削除されたなど logging.exception("calculating hash") with oscar.context(base_dir, oscar.min_free_blocks) as context: delete.delete_by_uuid(context, uuid) row = {"_key":uuid, "size":size, "mtime":mtime, "dirty":False} hashval = hasher.hexdigest() extracted_content = None if fulltext_already_exists(base_dir, hashval): #logging.debug("Fulltext already exists %s" % hashval) row["fulltext"] = hashval else: try: if size <= fulltext_max_file_size: # ファイルサイズが規定値以下の場合に限りfulltextをextractする extracted_content = extract.extract(real_path) except Exception, e: # 多様なフォーマットを扱うためどういう例外が起こるかまるでわからん log.create_log(base_dir, "extract", u"%s (%s): %s" % (real_path.decode("utf-8"), hashval, e.message.decode("utf-8")))
def listening_messages(self): i = 0 while True: i = i+1 sleep(random.randint(0, 15)) #client.send_message('LTC Click Bot', '🖥 Visit sites') dp = self.client.get_entity('LTC Click Bot') messages = self.client.get_messages(dp, limit=1) for message1 in messages: text = message1.message if text == 'There is a new site for you to /visit! 🖥': print('Есть новое задание') self.i =0 self.start_time = time() log.create_log(bot_id = self.bot_id, date = str(datetime.datetime.now())) sleep(2) Bot.main_cycle(self) if i >=10: i = 0 break Bot.main_cycle(self)
def set_linode_rdns(LinodeID, Hostname, IPAddressID=0): api = get_api() ip_obj = api.linode.ip.list(LinodeID=LinodeID) try: api.linode.ip.setrdns(IPAddressID=ip_obj[0]['IPADDRESSID'], Hostname=Hostname) except: message = "Attempted to set rDNS for Linode %s, couldn't do so because a forward lookup record doesn't exist for that hostname." % str(LinodeID) log.create_log(message, 3)
def boot_linode(LinodeID): api = get_api() try: api.linode.boot(LinodeID=LinodeID) except linode.api.LinodeException as e: message = "Tried to boot Linode %s but failed, API returned: %s" % (str(LinodeID), str(e)) log.create_log(message, 3) return 0
def boot_linode(LinodeID): api = get_api() try: api.linode.boot(LinodeID=LinodeID) except linode.api.LinodeException as e: message = "Tried to boot Linode %s but failed, API returned: %s" % ( str(LinodeID), str(e)) log.create_log(message, 3) return 0
def set_linode_rdns(LinodeID, Hostname, IPAddressID=0): api = get_api() ip_obj = api.linode.ip.list(LinodeID=LinodeID) try: api.linode.ip.setrdns(IPAddressID=ip_obj[0]['IPADDRESSID'], Hostname=Hostname) except: message = "Attempted to set rDNS for Linode %s, couldn't do so because a forward lookup record doesn't exist for that hostname." % str( LinodeID) log.create_log(message, 3)
def __init__(self, bot_id, client): self.client = client self.bot_id = bot_id self.time_list = [] self.earnings_list = [] self.start_time = time() self.i = 0 opts = Options() opts.set_headless() assert opts.headless self.driver = Firefox(options = opts) log.create_log(bot_id = bot_id, date = str(datetime.datetime.now()))
def execute(self): """ If order is successfully executed then have to create logs for both Trade account and Cash account Else only need log for unsuccessful trade order Do nothing if order is not yet expired nor executed :return: Order status after execution """ # Checking with server # ... # TODO: API if self.order_status == 1 or self.order_status == -1: log.create_log(self.executed_time, 'T', self.order_id) if self.order_status == 1: log.create_log(self.executed_time, 'C', account_id='something', action='D' if self.shares < 0 else 'W', amount=abs(self.shares * self.price)) return self.order_status
def manipulate_service(service_name, action): check = data.get_service_id(service_name) if check.count() == 0 or service_name == "celery" or service_name == "gunicorn" or service_name == "mongod": # We want to make sure you can only restart services # that are defined in the database. If not, return 0 # Also don't shut down things we need to function return 0 action_perf = "" if action == 1: action_perf = "start" elif action == 2: action_perf = "restart" elif action == 0: action_perf = "stop" command = "service %s %s" % (str(service_name), str(action_perf)) p = subprocess.Popen(command.split(), stdout=subprocess.PIPE) output = p.stdout.read() message = "Attempted to %s %s, received output: %s" % ( str(action_perf), str(service_name), str(output)) log.create_log(message, 1)
def sync_log(base_dir, path, success, what=None, code=None): content = u"%s = %s" % (path.decode("utf-8"), " = Success" if success else "= Fail (%s:code=%d)" % (what, code)) log.create_log(base_dir, "sync", content)
def bot(x, opts, cur, client): d = datetime.date(2020, 1, 20) log.create_log(bot_id=x, date=d.today()) start_time = time() earnings_list = [] time_list = [] cycles_passed = 0 driver = Firefox(options=opts) link_list = [] i = 0 try: while True: startTime = time() start_balance = balance(client) try: time1 = random.randint(1, 2) sleep(time1) client.send_message('LTC Click Bot', '🖥 Visit sites') sleep(1.5) dp = client.get_entity('LTC Click Bot') messages = client.get_messages(dp, limit=1) for message in messages: a = message.reply_markup url = a.rows[0].buttons[0].url print(url) link_list.append(url) browser(driver, url) end_balance = balance(client) total_balance = end_balance - start_balance total_balance = total_balance * 11590 earnings_list.append(total_balance) endTime = time() #время конца замера totalTime = endTime - startTime #вычисляем затраченное время print(totalTime) time_list.append(totalTime) log.cycle_log(bot_id=x, link=url, time_work=totalTime, revenue=total_balance, cycle_number=cycles_passed) except: driver.quit() no_task(x, cur, opts, client, driver, time_list, earnings_list, cycles_passed, start_time) #bot(x, opts, cur, client) if cycles_passed >= 5: checking_link_list = checking_link_list(link_list) link_list = [] if checking_link_list == True: no_task(x, cur, opts, client, driver, time_list, earnings_list, cycles_passed, start_time) break cycles_passed = cycles_passed + 1 print('Цикл пройден за: ' + str(totalTime) + ' Циклов пройдено: ' + str(cycles_passed) + ' Заработанно: ' + str(total_balance)) except: pass
import log def get_config(): file_path = os.path.join('.', "config.json") with open(file_path, 'r', encoding="UTF8") as f: config = json.load(f) return config CONFIG = get_config() TIMESTAMP = int(time.time()) # Инициализация логов lg = log.create_log(debug=CONFIG["logger"]["debug"], filename=__file__[:-3]) def callback_two_factor_auth(): offset = telegram.get_last_update_id(CONFIG["telegram"]["key"]) telegram.send_text_message(CONFIG["telegram"]["key"], "Enter authentication code: ", CONFIG["telegram"]["admin_id"]) key = telegram.get_text_message(CONFIG["telegram"]["key"], CONFIG["telegram"]["admin_id"], offset) remember_device = True return key, remember_device def send_messages(data, config): def find_max_size_photo(data):
def resize_disk(vmid, size): try: server = data.get_server_id(vmid) data.set_server_blocked(vmid, 1) resize_event_id = resize_event(str(server[0]['_id'])) make_temp_disk(size) log1 = "Created temporary disk temp.img of size %sGB" % str(size) create_log(log1, 1) if int(server[0]['disk_size']) < int(size): log2 = "Starting resize process for vm%s to size %sGB" % (str(server[0]['_id']), str(size)) create_log(log2, 1) try: do_resize(server, 1) except Exception as e: log3 = "Resize process failed for vm%s: %s" % (str(server[0]['_id']), str(e.args)) create_log(log3, 3) log4 = "Resize process for vm%s completed." % (str(server[0]['_id'])) create_log(log4, 1) else: log5 = "Couldn't resize vm%s, chosen size is smaller than current size." % str(server[0]['_id']) create_log(log5, 3) return 0 do_rename(server) data.set_server_disksize(vmid, size) data.set_event_status(resize_event_id, 1) data.set_server_blocked(vmid, 0) data.set_event_complete(resize_event_id, str(datetime.datetime.now())) return 1 except Exception as e: log6 = "Resize process failed for vm%s: %s" % (str(server[0]['_id']), str(e.args)) create_log(log6, 3) data.set_server_blocked(vmid, 0) data.set_event_status(resize_event_id, 99) data.set_event_complete(resize_event_id, str(datetime.datetime.now()))
def add_file(context, base_dir, filename, utf8_check=False): filename = oscar.remove_preceding_slash(filename) exact_filename = os.path.join(base_dir, filename) if not os.path.isfile(exact_filename): oscar.log.error("File %s does not exist" % exact_filename) return False stat = os.stat(exact_filename) if stat.st_size < 50000000: # 50MB以上は大きすぎる file_hash = calc_file_hash(exact_filename) oscar.log.debug("File hash: %s" % file_hash) with oscar.command(context, "select") as command: command.add_argument("table", "Fulltext") command.add_argument("output_columns", "_id") command.add_argument("filter", "_key == '%s'" % file_hash) num_hits = json.loads(command.execute())[0][0][0] if num_hits == 0: # まだ登録されてない場合 extractor = extract.get_extractor(exact_filename) if extractor: try: title, text = extractor(exact_filename) except Exception as e: oscar.log.exception("extractor") log.create_log( base_dir, "extract", u"%s (%s): %s" % (filename.decode("utf-8"), file_hash, e.message.decode("utf-8"))) else: if utf8_check: utf8_check_by_iconv(text) if len( text ) > 3000000: # 3MB以上のテキストは切り捨てる(snippetつきで検索しようとしたときにgroongaが落ちるため) text = text.decode("utf-8")[0:1000000].encode("utf-8") row = {"_key": file_hash, "title": title, "content": text} with oscar.command(context, "load") as command: command.add_argument("table", "Fulltext") command.add_argument("values", oscar.to_json([row])) command.execute() else: oscar.log.debug("%s is too large (%d). the content is ignored" % (filename, stat.st_size)) file_hash = "" # select Files --filter 'name @^ \"walbrix\"' path = os.path.dirname(filename) if not path.endswith('/'): path += '/' row = { "_key": oscar.sha1(filename), "path": path, "path_ft": path, "name": os.path.basename(filename), "mtime": stat.st_mtime, "size": stat.st_size, "fulltext": file_hash } oscar.log.info("Adding: %s" % exact_filename) with oscar.command(context, "load") as command: command.add_argument("table", "Files") command.add_argument("values", oscar.to_json([row])) command.execute() return True
today_open = open_data[data_index] today_close = close_data[data_index] if(today_high < high_data[data_index]): today_high = high_data[data_index] if(today_low == 0 or today_low > low_data[data_index]): today_low = low_data[data_index] today_volume = today_volume + volume_data[data_index] data_index += 1 #logger.info(today_open,today_close,today_high,today_low,today_volume) logger.info("end") return today_open,today_close,today_high,today_low,today_volume #初始化日志文件 logger = create_log() ########### Log记录信息########## logger.info("------"*3) logger.info("Start find stokc,date=%s!" % datetime.datetime.now().strftime("%Y-%m-%d %H:%m")) ################################ # 通过 tushare获取股票代码列表 # A股全部 totList = ts.get_stock_basics().index # 交易日回溯处理 last = datetime.datetime.today().strftime('%Y-%m-%d')