def user_create(username, email, password, tenant_id=None, enabled=True): c = _keystoneclient_admin() if tenant_id is None: tenant = c.tenants.create('%s_default_tenant' % username, 'default tenant for %s' %username, enabled=True) tenant_id = tenant.id new_user = c.users.create( username, password, email, tenant_id=tenant_id, enabled=enabled ) new_user.tenant_id = tenant_id # for account balance initialization try: default_role = get_default_role(c) if default_role: c.roles.add_user_role(new_user.id, default_role.id, tenant_id) except Exception as e: LOG.error('Error when adding role %s for user %s at tenant %s' % (default_role, new_user, tenant_id)) LOG.error(e) return new_user
def some_func(): CFG.start_clock_module = datetime.datetime.now() LOG.write_me("\tSTART - SPLIT.py (" + datetime.datetime.now().strftime("%y-%m-%d | %H:%M") + ")") if CFG.source_of_data == "ABC": LOG.write_me("\t\tParameters set for CFG.source_of_data : " + CFG.source_of_data) abc_file_list() elapsed_formatted = UTL.format_elapsed(CFG.start_clock_module) LOG.write_me("\tEND - INPUT_SPLIT.py (" + datetime.datetime.now().strftime("%y-%m-%d | %H:%M") + " | hh.mm.ss.ms " + elapsed_formatted + ")") LOG.write_me("") LOG.write_me("")
def request(url, timeout = 60) : socket.setdefaulttimeout(timeout) try : s = urllib.request.urlopen(url) except urllib.error.URLError as ex: LOG.error ("Failed to request URL %s, %s" %(url, ex)) return "" content = str(s.read().decode('utf-8')) s.close() return content
def System_scan(): LOG.CORELET_LOG(f"Scanning {Userprofile}") Listall_items() os.chdir(Userprofile) Content = os.listdir() for Files in Content: for items in Is_virus: if items in Files: if os.path.isfile(Files) == True: LOG.CORELET_ERROR_LOG(f"Virus found, removed {Files}") os.remove(Files) os.chdir(RootDir) LOG.CORELET_LOG(f"{Userprofile} scanned successfully.") LOG.CORELET_LOG(Fore.GREEN + "Your System is safe!\n")
def connect(self): """ Connects the defined blocks in the flow graph Signal Source > Signal conditioner > Channels >> Observables >> PVT > Output filter """ LOG.INFO("Connecting flowgraph") assert not self.connected_ #ifndef ENABLE_FPGA for ss in self.sig_source_: if self.configuration_.get(ss.role_ + ".enable_FPGA", False) == False: ss.connect(self.top_block_) for sc in self.sig_conditioner_: if self.configuration_.get(sc.role_ + ".enable_FPGA", False) == False: sc.connect(self.top_block_) #endif for ch in self.channels_: ch.connect(self.top_block_) self.observables_.connect(self.top_block_) self.pvt_.connect(self.top_block_) DLOG.INFO("blocks connected internally") #ifndef ENABLE_FPGA RF_Channels = 0 signal_conditioner_ID = 0 for i, ss in enumerate(self.sig_source_): if ss.implementation() == "Raw_Array_Signal_Source": assert False else: RF_Channels = self.configuration_.get( ss.role_ + ".RF_channels", 1) for j in range(RF_Channels): # if ss.get_right_block().output_signature( )[1] > 1 or ss.get_right_block().output_signature( )[1] == -1: if len(self.sig_conditioner_) > signal_conditioner_ID: LOG.INFO("connecting sig_source_ " + str(i) + " stream " + str(j) + " to conditioner " + str(j)) self.top_block_.connect( ss.get_right_block(), j, self.sig_conditioner_[signal_conditioner_ID]. get_left_block(), 0) else: assert False _ = 2 + 2 assert False
def __init__(self): self.buffer = Fifo() self.configfile= "./configs/bird.conf" signal.signal(signal.SIGINT, self.signal_handler) #LogSystem braucht die Config sofort self.m_args = FILEIO.FileIO().ReadLine(self.configfile) self.sLog = LOG.sLog() self.sLog.config(self.m_args,self) #Installed Mods # 0 = Root of Module # 1 = Module().Master() # 2 = Module().CLI_Dict() # 3 = Name self.installed_mods = [] #EventSystem self.m_Events = Events() self.Drone_Name = "" self.InitMods(); self.ReadConfig(); self.StartUP();
def __init__(self): self.buffer = Fifo() self.configfile = "./configs/bird.conf" signal.signal(signal.SIGINT, self.signal_handler) #LogSystem braucht die Config sofort self.m_args = FILEIO.FileIO().ReadLine(self.configfile) self.sLog = LOG.sLog() self.sLog.config(self.m_args, self) #Installed Mods # 0 = Root of Module # 1 = Module().Master() # 2 = Module().CLI_Dict() # 3 = Name self.installed_mods = [] #EventSystem self.m_Events = Events() self.Drone_Name = "" self.InitMods() self.ReadConfig() self.StartUP()
def __init__(self): # Init für die helfenden Elfen self.m_buffer = Fifo() self.m_Events = Events() self.m_SockAPI = NET.API.SocketApi(self) self.m_Modules = MODULESYSTEM(self) self.m_WebServer = None # config laden self.configfile = "./CONFIGS/bird.conf" # signalhandler einbauen signal.signal(signal.SIGINT, self.signal_handler) # LogSystem braucht die Config sofort self.m_args = FILEIO.FileIO().ReadLine(self.configfile) self.sLog = LOG.sLog() self.sLog.config(self.m_args, self) # Der Name unserer Drone self.Drone_Name = "" # Module initialisieren self.InitMods() self.ReadConfig() self.StartUP()
def GetSignalSource(self, configuration, queue, ID): "return GNSSBlockInterface" default_implementation = "File_Signal_Source" role = "SignalSource" assert ID == -1 implementation = configuration.get(role + ".implementation", default_implementation) LOG.INFO("Getting SignalSource with implementation " + implementation) return self.GetBlock(configuration, role, implementation, 0, 1, queue)
def get_default_role(client): """ Gets the default role object from Keystone and saves it as a global since this is configured in settings and should not change from request to request. Supports lookup by name or id. """ global DEFAULT_ROLE default = getattr(settings, "OPENSTACK_KEYSTONE_DEFAULT_ROLE", None) if default and DEFAULT_ROLE is None: try: roles = client.roles.list() except Exception as e: LOG.error('Error when get roles list') LOG.error(e) else: for role in roles: if role.id == default or role.name.lower() == default: DEFAULT_ROLE = role break return DEFAULT_ROLE
def UpdateCodes(): db = SQL.sql(__index_db) db.set("CREATE TABLE IF NOT EXISTS {}(code char(6) unique, name char(36))".format(__index_table)) for index in range(__begin_index, __end_index): index_str='{:0>6}'.format(index) content = URL.request(__index_url.format(index_str)) index_begin = content.find('var STOCKSYMBOL = \'') if index_begin == -1: continue index_end = content.find('\'', index_begin + 19) index_code = content[index_begin + 19:index_end] index_begin = content.find('var STOCKNAME = \'') if index_begin == -1: continue index_end = content.find('\'', index_begin + 17) index_name = content[index_begin + 17:index_end] LOG.info ('%s %s' %(index_code, index_name)) __indexs.append([index_code, index_name]) db.set("REPLACE INTO {} VALUES(\'{}\',\'{}\')".format(__index_table, index_code, index_name)) db.close()
def POST(self): #if web.ctx['ip'] != IP: # return render.login() account = getCookieName() if account == None: return web.seeother('login') if session.count == 0: return web.seeother('login') i = web.input() leftcount = None try: #检查数字是否输入正确 leftcount = int(i.leftcount) except: return "<script type=\"text/javascript\" charset=”utf-8″> alert(\"\u8bf7\u8f93\u5165\u6b63\u786e\u683c\u5f0f\u7684\u6570\u5b57\uff01\"); \ window.location.href=\"accountdetail\"; </script>" myvar = dict(ID=i.parentid) results = config.DB.select('userinfo', myvar, where="ID = $ID") if (len(results) == 0): return "<script type=\"text/javascript\" charset=”utf-8″> alert(\"\u53c2\u6570\u63d0\u4ea4\u9519\u8bef\uff01\u6ca1\u6709\u6743\u9650\uff01\"); \ window.location.href=\"subuserlist\"; </script>" #subuser = results[0] parentID = i.parentid myvar = dict(parentID=i.parentid) results2 = config.DB.select('userinfo', myvar, where="ID = $parentID") author = results2[0] if i.ope == "sub": leftcount = -1 * leftcount '''if author.LEFTCOUNT - leftcount < 0: return "<script type=\"text/javascript\" charset=”utf-8″> alert(\"\u60a8\u7684\u5269\u4f59\u8c03\u7528\u6b21\u6570\u4e0d\u8db3\uff01\"); \ window.location.href=\"accountdetail\"; </script>"''' # add for memcached key = str(parentID) + config._LEFTCOUNT print(key + "-" + str(leftcount)) print("In mem %s" % config.mc.get(key)) LOG.info("======WEB update begin=====") LOG.info("Before update, KEY %s, LEFT %s, delta %s" % (key, config.mc.get(key), leftcount)) if leftcount >= 0: config.mc.incr(key, leftcount) else: config.mc.decr(key, -1 * leftcount) LOG.info("After update, KEY %s, LEFT %s, delta %s" % (key, config.mc.get(key), leftcount)) LOG.info("Add %s for ID=%s, username=%s, time=%s" % (str(leftcount), str(parentID), author.ACCOUNT, time.strftime('%Y-%m-%d %X', time.localtime()))) return "<script type=\"text/javascript\"> alert(\"\u4fee\u6539\u6210\u529f\"); \
def server_suspend_for_admin(deficit_tenants): if not deficit_tenants: return c = _novaclient_admin() servers = c.servers.list(True, {'all_tenants': True}) LOG.debug('Got %s servers: %s', len(servers), servers) for s in servers: try: LOG.debug('to suspend instance #%s under tenant #%s', s.id, s.tenant_id) if s.status.lower()=='active' and s.tenant_id in deficit_tenants: s.suspend() except Exception, e: LOG.error('error while suspending instance #%s under tenant #%s', s.id, s.tenant_id) LOG.exception(e)
def read_write_by(n_file, job_type, path_e_file, file, dict_too_small, dict_big_enough): file_size = os.path.getsize(path_e_file) if job_type == "split_by_fraction": CFG.wanted_file_size = file_size/CFG.number_of_split LOG.write_me("CFG.wanted_file_size : " + CFG.wanted_file_size) if file_size < CFG.wanted_file_size: dict_too_small[file] = file_size else: dict_big_enough[file] = file_size c_file = 1 directory_out_path = os.getcwd() + CFG.output_path_abc + "/" + CFG.split_file path_name_file_out = directory_out_path + "\\" + os.path.splitext(file.decode())[0] + "_" + str(c_file) + ".txt" file_out = open(path_name_file_out, 'w') with open(path_e_file) as f: lines = f.readlines() last_line = len(lines) c_line = 0 for c_line in range (c_line, last_line): # I skip the empty line line = lines[c_line] lineleaned = (''.join)([x for x in line if ord(x) < 128]) lineleaned = lineleaned.replace(" ", "") lineleaned = lineleaned.replace("\n", "") if lineleaned != "": file_out.write(line) file_out_size = file_out.tell() if file_out_size >= CFG.wanted_file_size: file_out.close() c_file = c_file + 1 path_name_file_out = directory_out_path + "\\" + os.path.splitext(file.decode())[0] \ + "_" + str(c_file) + ".txt" file_out = open(path_name_file_out, 'w') c_line = c_line + 1 if c_line == last_line: file_out.close() return(dict_too_small, dict_big_enough)
def POST(self): ID = getCookieID() if ID == None: return web.seeother('login') i = web.input() leftcount = None try: #检查数字是否输入正确 leftcount = int(i.leftcount) except: return "<script type=\"text/javascript\" charset=”utf-8″> alert(\"\u8bf7\u8f93\u5165\u6b63\u786e\u683c\u5f0f\u7684\u6570\u5b57\uff01\"); \ window.location.href=\"subuserlist\"; </script>" #确保这个id是该作者下面的才行 myvar = dict(ID=i.subuserid, PARENTID=ID) results = config.DB.select('userinfo', myvar, where="ID = $ID and PARENTID=$PARENTID") if(len(results) == 0): return "<script type=\"text/javascript\" charset=”utf-8″> alert(\"\u53c2\u6570\u63d0\u4ea4\u9519\u8bef\uff01\u6ca1\u6709\u6743\u9650\uff01\"); \ window.location.href=\"subuserlist\"; </script>" subuser = results[0] #检测子账户的状态是否被冻结 subStatus = subuser.STATUS if subStatus != 0: return "<script type=\"text/javascript\" charset=”utf-8″> alert(\"\u5b50\u7528\u6237\u88ab\u51bb\u7ed3\uff0c\u4e0d\u80fd\u4fee\u6539\u7801\u6570\uff01\"); \ window.location.href=\"subuserlist\"; </script>" myvar = dict(ID=ID) results2 = config.DB.select('userinfo', myvar, where="ID = $ID and PARENTID=-1") author = results2[0] if i.ope == "sub": leftcount = -1 * leftcount if author.LEFTCOUNT - leftcount <= 0: return "<script type=\"text/javascript\" charset=”utf-8″> alert(\"\u60a8\u7684\u5269\u4f59\u8c03\u7528\u6b21\u6570\u4e0d\u8db3\uff01\"); \ window.location.href=\"subuserlist\"; </script>" if subuser.LEFTCOUNT + leftcount < 0: return "<script type=\"text/javascript\" charset=”utf-8″> alert(\"\u5b50\u7528\u6237\u5269\u4f59\u8c03\u7528\u6b21\u6570\u4e0d\u80fd\u4e3a\u8d1f\u503c\uff01\"); \ window.location.href=\"subuserlist\"; </script>" '''config.DB.update('userinfo',\ where='id=$id',vars={'id':i.subuserid}, \ leftCount = subuser.LEFTCOUNT+int(i.leftcount)) config.DB.update('userinfo',\ where='id=$id',vars={'id':ID}, \ leftCount = author.LEFTCOUNT-int(i.leftcount))''' # add for memcached LOG.info("======Web update begin=====\n") LOG.info("Before update, SUBID %s, DELTA %s, subuser leftconut %s, parent leftcount %s" % (str(i.subuserid), str(leftcount), config.mc.get(str(i.subuserid) + config._LEFTCOUNT), config.mc.get(str(ID) + config._LEFTCOUNT))) print("In mem %s" % config.mc.get((str(i.subuserid) + config._LEFTCOUNT))) print("In mem %s" % config.mc.get((str(ID) + config._LEFTCOUNT))) if leftcount >= 0: config.mc.incr(str(i.subuserid) + config._LEFTCOUNT, delta=leftcount) config.mc.decr(str(ID) + config._LEFTCOUNT, delta=leftcount) else: config.mc.decr(str(i.subuserid) + config._LEFTCOUNT, delta = -1 * leftcount) config.mc.incr(str(ID) + config._LEFTCOUNT, delta= -1 * leftcount) LOG.info("After update, subuser leftconut %s, parent leftcount %s" % (config.mc.get(str(i.subuserid) + config._LEFTCOUNT), config.mc.get(str(ID) + config._LEFTCOUNT))) return "<script type=\"text/javascript\"> alert(\"\u4fee\u6539\u6210\u529f\"); \
def POST(self): x = web.input() username=x.username passwd=x.password pwdhash = hashlib.md5(passwd).hexdigest() subname=x.subname subpasswd=x.subpassword subpwdhash = hashlib.md5(subpasswd).hexdigest() subleftcount_delt=x.subleftcount_delt USERID=config.mc.get(username.encode("utf-8")) if USERID==None: print 'user not exist' return "1007" elif pwdhash!=config.mc.get(str(USERID)+"_PASSWORD"): return "1001" parentLeftCount=config.mc.get(str(USERID)+config._LEFTCOUNT) SUBID=config.mc.get(subname.encode("utf-8")) if SUBID==None: return "1007" elif USERID!=config.mc.get(str(SUBID)+"_PARENTID"): return "1007" SUBLEFTCOUNT=config.mc.get(str(SUBID)+config._LEFTCOUNT) SUBSTATUS = config.mc.get(str(SUBID)+ "_STATUS") if SUBSTATUS != 0: return "1013" if (parentLeftCount-int(subleftcount_delt)<0): #print 'leftcount not enough' return "1002" else: if ((SUBLEFTCOUNT+int(subleftcount_delt))<0): return "1009" # add for memcached LOG.info("======DLL update begin=====\n") LOG.info("Before update, SUBID %s, DELTA %s, subuser leftconut %s, parent leftcount %s" % (str(SUBID), int(subleftcount_delt.encode("utf-8")), config.mc.get(str(SUBID) + config._LEFTCOUNT), config.mc.get(str(USERID) + config._LEFTCOUNT))) if int(subleftcount_delt.encode("utf-8")) > 0: config.mc.incr(str(SUBID) + config._LEFTCOUNT, abs(int(subleftcount_delt.encode("utf-8")))) config.mc.decr(str(USERID) + config._LEFTCOUNT, abs(int(subleftcount_delt.encode("utf-8")))) LOG.info("After update, subuser leftconut %s, parent leftcount %s" % (config.mc.get(str(SUBID) + config._LEFTCOUNT), config.mc.get(str(USERID) + config._LEFTCOUNT))) return '0' else: config.mc.decr(str(SUBID) + config._LEFTCOUNT, abs(int(subleftcount_delt.encode("utf-8")))) config.mc.incr(str(USERID) + config._LEFTCOUNT, abs(int(subleftcount_delt.encode("utf-8")))) LOG.info("After update, subuser leftconut %s, parent leftcount %s" % (config.mc.get(str(SUBID) + config._LEFTCOUNT), config.mc.get(str(USERID) + config._LEFTCOUNT))) return '0'
def UpdateIndexs(): db = SQL.sql(__index_db) for index in __indexs: index_code = index[0] index_name = index[1] LOG.info ("Updating index %s %s" % (index_code, index_name)) command = "CREATE TABLE IF NOT EXISTS `{}`(date char(8) unique, price double, volume double)".format(index_code) db.set(command) historys = GetIndex(index_code, __begin_year, __end_year) for record in historys: date = record[0] try : price = float(record[4]) except : price = 0 try : volume = float(record[7]) except : volume = 0 #print ("%s %s %s" % (record[0], record[4], record[7])) command = "REPLACE INTO `{}` VALUES({},{},{})".format(index_code, date, str(price), str(volume)) #print (command) db.set(command) db.close()
def GetPVT(self, configuration): "" default_implementation = "RTKLIB_PVT" implementation = configuration.get("PVT.implementation", default_implementation) LOG.INFO("Getting PVT with implementation " + implementation) Galileo_channels = configuration.get("Channels_1B.count", 0) Galileo_channels += configuration.get("Channels_5X.count", 0) GPS_channels = configuration.get("Channels_1C.count", 0) GPS_channels += configuration.get("Channels_2S.count", 0) GPS_channels += configuration.get("Channels_L5.count", 0) Glonass_channels = configuration.get("Channels_1G.count", 0) Glonass_channels += configuration.get("Channels_2G.count", 0) Beidou_channels = configuration.get("Channels_B1.count", 0) Beidou_channels += configuration.get("Channels_B3.count", 0) return self.GetBlock( configuration, "PVT", implementation, Galileo_channels + GPS_channels + Glonass_channels + Beidou_channels, 0)
def GetSignalConditioner(self, configuration, ID=-1): "returns GNSSBlockInterface" default_implementation = "Pass_Through" role_conditioner = "SignalConditioner" role_datatypeadapter = "DataTypeAdapter" role_inputfilter = "InputFilter" role_resampler = "Resampler" assert ID == -1 signal_conditioner = configuration.get( role_conditioner + ".implementation", default_implementation) if signal_conditioner == "Pass_Through": data_type_adapter = "Pass_Through" input_filter = "Pass_Through" resampler = "Pass_Through" else: data_type_adapter = configuration.get( role_datatypeadapter + ".implementation", default_implementation) input_filter = configuration.get( role_inputfilter + ".implementation", default_implementation) resampler = configuration.get(role_resampler + ".implementation", default_implementation) LOG.INFO("Getting SignalConditioner with DataTypeAdapter implementation: " \ + data_type_adapter + ", InputFilter implementation: " \ + input_filter + ", and Resampler implementation: " \ + resampler) if signal_conditioner == "Array_Signal_Conditioner": # instantiate the array version assert False else: # single-antenna version c = SignalConditioner( configuration, self.GetBlock(configuration, role_datatypeadapter, data_type_adapter, 1, 1), self.GetBlock(configuration, role_inputfilter, input_filter, 1, 1), self.GetBlock(configuration, role_resampler, resampler, 1, 1), role_conditioner, "Signal_Conditioner") return c
def GetObservables(self, configuration): "" default_implementation = "Hybrid_Observables" implementation = configuration.get("Observables.implementation", default_implementation) LOG.INFO("Getting Observables with implementation " + implementation) Galileo_channels = configuration.get("Channels_1B.count", 0) Galileo_channels += configuration.get("Channels_5X.count", 0) GPS_channels = configuration.get("Channels_1C.count", 0) GPS_channels += configuration.get("Channels_2S.count", 0) GPS_channels += configuration.get("Channels_L5.count", 0) Glonass_channels = configuration.get("Channels_1G.count", 0) Glonass_channels += configuration.get("Channels_2G.count", 0) Beidou_channels = configuration.get("Channels_B1.count", 0) Beidou_channels += configuration.get("Channels_B3.count", 0) extra_channels = 1 # For monitor channel sample counter return self.GetBlock( configuration, "Observables", implementation, Galileo_channels + GPS_channels + Glonass_channels + Beidou_channels + extra_channels, Galileo_channels + GPS_channels + Glonass_channels + Beidou_channels)
def GetChannels(self, configuration, queue): "" default_implementation = "Pass_Through" channel_absolute_id = 0 Channels_xx_count = { k: configuration.get("Channels_{}.count".format(k), 0) for k in jld_channels.cid } total_channels = sum(Channels_xx_count.values()) channels = [None] * total_channels # vector GNSSBlockInterface # LOG.INFO("Getting " + str(Channels_xx_count['1C']) + " GPS L1 C/A channels") acquisition_implementation = configuration.get( "Acquisition_1C.implementation", default_implementation) tracking_implementation = configuration.get( "Tracking_1C.implementation", default_implementation) telemetry_decoder_implementation = configuration.get( "TelemetryDecoder_1C.implementation", default_implementation) for i in range(Channels_xx_count['1C']): acquisition_implementation_specific = configuration.get( "Acquisition_1C" + str(channel_absolute_id) + ".implementation", acquisition_implementation) tracking_implementation_specific = configuration.get( "Tracking_1C" + str(channel_absolute_id) + ".implementation", tracking_implementation) telemetry_decoder_implementation_specific = configuration.get( "TelemetryDecoder_1C" + str(channel_absolute_id) + ".implementation", telemetry_decoder_implementation) channels[channel_absolute_id] = self.GetChannel_1C( configuration, acquisition_implementation_specific, tracking_implementation_specific, telemetry_decoder_implementation_specific, channel_absolute_id, queue) channel_absolute_id += 1 return channels
def read_write(path_e_file, file): #, list_only_ascii, list_not_only_ascii): c_file = 1 directory_out_path = os.getcwd() + CFG.output_path_ngram path_name_file_out = directory_out_path + "\\" + os.path.splitext(file.decode())[0] + "_ngram_" + str(CFG.number_of_n_gram) + "grams.txt" file_out = open(path_name_file_out, 'w') with open(path_e_file) as f: lines = f.readlines() last_line = len(lines) c_line = 0 while c_line < last_line: line = lines[c_line] print("line : ", line) #### I have decided that the "cleaning" of the rows: #### - has to be done in a specific module #### - has to be done parametric in order to decide qhat we want to keep # line_no_trailing = (line.rstrip()).lstrip() # print("line_no_trailing : ", line_no_trailing) # print("line_no_trailing : ", line_no_trailing) # print(line_no_trailing.split()) # print(line_no_trailing.split()) line_split = line.split() grams = nltk.ngrams(line_split, CFG.number_of_n_gram) for i in range(CFG.number_of_n_gram): my_index = ("n_gram" + "_" + str(i)) lines_as_ngram_df = pd.DataFrame() for i_gram in grams: print(i_gram) print(i_gram[0]) my_index = tmp = pd.Series(i_gram, index=my_index) #tmp = i_gram.grouper.result_index.values lines_as_ngram_df = lines_as_ngram_df.append(tmp) file_out.write(line_as_ngram + "\n") c_line = c_line + 1 file_out.close() elapsed_formatted = UTL.format_elapsed(CFG.start_clock_module) LOG.write_me("\tEND - NGRAM.py (" + tmp_input_file + " | " + datetime.datetime.now().strftime("%y-%m-%d | %H:%M") + " | hh.mm.ss.ms " + elapsed_formatted + ")") LOG.write_me("") LOG.write_me("")
def GET(self): account = getCookieName() if account == None: return web.seeother('login') ID = getCookieID() if ID == None: return web.seeother('login') i = web.input(subuserid=None) if i.subuserid == None: return "<script type=\"text/javascript\"> alert(\"\u53c2\u6570\u4f20\u9012\u9519\u8bef\uff01\"); \ window.location.href=\"subuserlist\"; </script>" #确保这个id是该作者下面的才行 myvar = dict(ID=i.subuserid, PARENTID=ID) parentLeftCount=0 results = config.DB.select('userinfo', myvar, where="ID = $ID and PARENTID=$PARENTID") if(len(results) == 0): return "<script type=\"text/javascript\" charset=”utf-8″> alert(\"\u53c2\u6570\u63d0\u4ea4\u9519\u8bef\uff01\u6ca1\u6709\u6743\u9650\uff01\"); \ window.location.href=\"subuserlist\"; </script>" #获取作者leftcount res1=config.DB.query('select * from userinfo where ID = $ID', vars={'ID':ID}) if (len(res1)==0): return web.seeother('login') res = results[0] subStatus = res.STATUS if subStatus != 0: return "<script type=\"text/javascript\" charset=”utf-8″> alert(\"\u5b50\u7528\u6237\u88ab\u51bb\u7ed3\uff0c\u4e0d\u80fd\u5220\u9664\uff01\"); \ window.location.href=\"subuserlist\"; </script>" parentLeftCount = res1[0].LEFTCOUNT delSubUser(i.subuserid) '''config.DB.update('userinfo',\ where='id=$id',vars={'id':ID}, \ leftCount = parentLeftCount + res.LEFTCOUNT)''' config.mc.set(str(i.subuserid) + "_STATUS", 2) config.mc.set(str(i.subuserid) + "_LEFTCOUNT", 0) LOG.info("======WEB delete begin=====\n") LOG.info("Before delete, SUBID %s, SUBCOUNT = %s, parent leftcount %s" % (str(i.subuserid), str(res.LEFTCOUNT), config.mc.get(str(ID) + config._LEFTCOUNT))) # add for memcached config.mc.incr(str(ID) + config._LEFTCOUNT, delta=res.LEFTCOUNT) LOG.info("After delete, parent leftcount %s" % (config.mc.get(str(ID) + config._LEFTCOUNT))) return "<script type=\"text/javascript\"> alert(\"\u5220\u9664\u6210\u529f\uff01\"); \
def POST(self): x = web.input() username=x.username passwd=x.password pwdhash = hashlib.md5(passwd).hexdigest() subname=x.subname parentid=-2 parentLeftCount=0 res1=config.DB.query('select * from userinfo where account=$account and password=$password and parentid=-1', vars={'account':username,'password':pwdhash}) if (len(res1)==0): #print 'user error' return "1001" else: re=res1[0] parentid=re.ID parentLeftCount=re.LEFTCOUNT res2=config.DB.query('select * from userinfo where account=$account and parentid=$parentid', vars={'account':subname,'parentid':parentid}) if (len(res2)==0): #print 'user not exist' return "1007" else: res=res2[0] if res.STATUS != 0: return "1013" delSubUser(res.ID) '''config.DB.update('userinfo',\ where='id=$id',vars={'id':parentid}, \ leftCount = parentLeftCount+res.LEFTCOUNT)''' #just mark status = 2 and leftcount = 0 for delete config.mc.set(str(res.ID) + "_STATUS", 2) config.mc.set(str(res.ID) + "_LEFTCOUNT", 0) # add for memcached LOG.info("======DLL delete begin=====\n") LOG.info("Before delete, SUBID %s, LEFT %s, parent leftcount %s" % (str(res.ID), str(res.LEFTCOUNT), config.mc.get(str(parentid) + config._LEFTCOUNT))) config.mc.incr(str(parentid) + config._LEFTCOUNT, delta=res.LEFTCOUNT) LOG.info("After delete, parent leftcount %s" % (config.mc.get(str(parentid) + config._LEFTCOUNT))) return '0'
def some_func(): CFG.start_clock_module = datetime.datetime.now() LOG.write_me("\tSTART - ASCII.py (" + datetime.datetime.now().strftime("%y-%m-%d | %H:%M") + ")") directory_in_path = os.getcwd() + CFG.input_path_ascii directory_in = os.fsencode(directory_in_path) list_not_only_ascii = list() list_only_ascii = list() LOG.write_me("\t\tWorking on the files in this path : " + os.path.relpath( directory_in.decode(), os.path.dirname(os.path.abspath(__file__)))) LOG.write_me("\t\tdummy : '" + CFG.dummy + "'") empty_folder = True for n_file, file, c_files in UTL.list_of_files(directory_in): empty_folder = False path_e_file = directory_in.decode() + "\\" + file.decode() read_write(path_e_file, file, list_only_ascii, list_not_only_ascii) if empty_folder: UTL.empty_input_folder(directory_in_path) LOG.write_me("\t\tTotal Number of files : " + str(c_files)) LOG.write_me(" ") LOG.write_me("\t\tList of file with only ASCII chars :") for item in list_only_ascii: LOG.write_me("\t\t" + str(item).rjust(35, ' ')) LOG.write_me(" ") LOG.write_me("\t\tList of file with also NON-ASCII chars ():") for item in list_not_only_ascii: LOG.write_me("\t\t" + str(item).rjust(35, ' ')) LOG.write_me(" ") LOG.write_me("\t\tFile created at the following location: " + CFG.output_path_ascii) LOG.write_me(" ") elapsed_formatted = UTL.format_elapsed(CFG.start_clock_module) LOG.write_me("\tEND - ASCII_ONLY.py (" + datetime.datetime.now().strftime("%y-%m-%d | %H:%M") + " | hh.mm.ss.ms " + elapsed_formatted + ")") LOG.write_me("") LOG.write_me("")
t.STATUS = "未知状态!" if t.CALLEDCOUNT == 0: t.COLUMN1 = 100.00 else: t.COLUMN1 = "%10.2f" % (100.0 * t.SUCCESSCOUNT / t.CALLEDCOUNT) t.COLUMN2 = "updateparentcount?parentid=" + str(t.ID) if t.PARENTID == -1: t.COLUMN3 = "作者账户" else: t.COLUMN3 = "子账户" views.append(t) return render.accountdetail(views) class logout: def GET(self): #if web.ctx['ip'] != IP: # return render.login() web.setcookie(cookieName, '', expires=-1) web.setcookie(cookieID, '', expires=-1) session.count = 0 return web.seeother('login') if __name__ == "__main__": LOG.info("Admin Web Server Start") #app = web.application(urls, globals()) print(sys.argv) PASSWORD = sys.argv[2] app.run()
WinDir = os.environ['SYSTEMDRIVE'] Username = os.environ['USERNAME'] CONSOLE_DATA = "RealTime Protection is activated." Logo = pygame.image.load("Logo.png") Circle = pygame.image.load("Circle.png") clock = pygame.time.Clock() IsPressed = {'System_scan': 0, 'Custom_scan': 0, 'Realtime_protection': 0} # Setting up window Display = pygame.display.set_mode((475, 650)) pygame.display.set_caption("Corelet") pygame.display.set_icon(Logo) # Setting up Terminal os.system('title Corelet') LOG.CORELET_LOG(Fore.GREEN + "Corelet") LOG.CORELET_LOG( Fore.GREEN + f"Your Windows installtion drive \"{WinDir}\\\" has been detected for scan." ) with open("PROPERTIES.json", "r") as READ: Content = json.load(READ) for Data in Content["PROPERTIES"]: if Data["realtime_protection"] == True and Data[ "startup_protection"] == True: LOG.CORELET_LOG("Startup Protection is enabled.") LOG.CORELET_LOG("Realtime Protection is enabled.\n") RealTime_Protection(Count) elif Data["realtime_protection"] == False and Data[ "startup_protection"] == False:
def some_func(): CFG.start_clock_module = datetime.datetime.now() LOG.write_me("\tSTART - BINARY.py (" + datetime.datetime.now().strftime("%y-%m-%d | %H:%M") + ")") directory_in_path = os.getcwd() + CFG.input_path_binary directory_in = os.fsencode(directory_in_path) LOG.write_me("\t\tWorking on the files in this path : " + os.path.relpath( directory_in.decode(), os.path.dirname(os.path.abspath(__file__)))) empty_folder = True for n_file, file, c_files in UTL.list_of_files(directory_in): empty_folder = False path_e_file = directory_in.decode() + "\\" + file.decode() read_write(path_e_file, file) if empty_folder: UTL.empty_input_folder(directory_in_path) LOG.write_me("\t\tTotal Number of files : " + str(c_files)) LOG.write_me("\t\tFile created at the following location: " + CFG.output_path_binary) elapsed_formatted = UTL.format_elapsed(CFG.start_clock_module) LOG.write_me("\tEND - BINARY.py (" + datetime.datetime.now().strftime("%y-%m-%d | %H:%M") + " | hh.mm.ss.ms " + elapsed_formatted + ")") LOG.write_me("") LOG.write_me("")
def abc_file_list(): directory_in_path = os.getcwd() + CFG.input_path_abc directory_in = os.fsencode(directory_in_path) dict_too_small = dict() dict_big_enough = dict() LOG.write_me("\t\tWorking on the files in this path : " + os.path.relpath(directory_in.decode(), os.path.dirname(os.path.abspath(__file__)))) empty_folder = True for n_file, file, c_files in UTL.list_of_files(directory_in): empty_folder = False path_e_file = directory_in.decode() + "\\" + file.decode() if CFG.split_file == "split_by_line": read_write_single_line(n_file,path_e_file, file, dict_too_small, dict_big_enough) if CFG.split_file == "split_by_size": read_write_by("split_by_size", n_file, path_e_file, file, dict_too_small, dict_big_enough) if CFG.split_file == "split_by_fraction": read_write_by("split_by_fraction", n_file, path_e_file, file, dict_too_small, dict_big_enough) if empty_folder: UTL.empty_input_folder(directory_in_path) LOG.write_me("\t\tTotal Number of files : " + str(c_files)) LOG.write_me("") LOG.write_me("\t\tProcessed files - dict_big_enough[file]:") LOG.write_me("\t\tname".rjust(35, ' ') + "\t|\t size:") for key, value in dict_big_enough.items(): LOG.write_me("\t\t" + str(key).rjust(35, ' ') + "\t|\t" + str(value)) LOG.write_me("\t\tFile created at the following location: " + CFG.output_path_abc) LOG.write_me(" ") LOG.write_me("\t\tNot processed files - dict_too_small[file]:") LOG.write_me("\t\tname".rjust(35, ' ') + "\t|\t size:") for key, value in dict_too_small.items(): LOG.write_me("\t\t" + str(key).rjust(35, ' ') + "\t|\t" + str(value)) LOG.write_me(" ")
def some_func(): CFG.start_clock_module = datetime.datetime.now() LOG.write_me("\tSTART - CLEAN.py (" + datetime.datetime.now().strftime("%y-%m-%d | %H:%M") + ")") my_root_dir = os.getcwd() list_output_dir = list() list_of_files = list() LOG.write_me("\t\tList of the files deleted from the 'OUTPUT' folders:") for root, dirs, files in os.walk(my_root_dir): if not str(root).endswith("ABACUS"): if "OUTPUT_" in str(root): for file in files: if str(file).endswith(".txt"): rel_path_file = os.path.relpath( root, my_root_dir) + "/" + file LOG.write_me("\t\t- " + rel_path_file) path_file = root + "\\" + file os.remove(path_file) list_of_files.append(rel_path_file) if len(list_of_files) == 0: LOG.write_me("\t\t\t- No output file to clean") elapsed_formatted = UTL.format_elapsed(CFG.start_clock_module) LOG.write_me("\tEND - CLEAN.py (" + datetime.datetime.now().strftime("%y-%m-%d | %H:%M") + " | hh.mm.ss.ms " + elapsed_formatted + ")") LOG.write_me("") LOG.write_me("")
####################################### S T A R T ####################################### if __name__ == '__main__': print("PRG START: __init________") import datetime import _cfg_GLOBAL as CFG import LOG import CLEAN import SPLIT import ASCII import BINARY import NGRAM CFG.start_clock_prg = datetime.datetime.now() # 05 - LOG LOG.open_my_log() # 08 - DELETE ALL OUTPUT FILES CLEAN.some_func() # 10 - SPLIT (set available options in _cfg_GLOBAL.py ) LOG.write_me("\tCFG.split_file : " + CFG.split_file) if CFG.split_file is not "none": SPLIT.some_func() else: LOG.write_me("\t\tParameters set for CFG.split_file : " + CFG.split_file) LOG.write_me("\t\tYou have chosen to make '" + CFG.split_file + "' split and therefore nothing there will be in the " + CFG.output_path_abc + " folders")
def empty_input_folder(directory_in_path): LOG.write_me( "\t\t The input folder is empty, please check | The program execution is interrupted" ) sys.exit("\t\tEmpty Input Folder '" + directory_in_path)
if(len(results) == 0): return "<script type=\"text/javascript\" charset=”utf-8″> alert(\"\u539f\u59cb\u5bc6\u7801\u9519\u8bef\uff0c\u8bf7\u91cd\u65b0\u8f93\u5165\"); \ window.location.href=\"changepwd\"; </script>" pwdhash = hashlib.md5(i.password).hexdigest() n = config.DB.update('userinfo', where="account = '" + account + "'", password=pwdhash) return "<script type=\"text/javascript\"> alert(\"\u5bc6\u7801\u4fee\u6539\u6210\u529f\"); \ window.location.href=\"changepwd\"; </script>" class logout: def GET(self): web.setcookie(cookieName, '', expires=-1) web.setcookie(cookieID, '', expires=-1) return web.seeother('login') if __name__ == "__main__": #threading.Thread(target = demaonThread, args = (), name = 'demonthread').start() LOG.info("Web server start") print(config.mc) print(str(146) + config._LEFTCOUNT) print(config.mc.get("146_LEFTCOUNT")) print(config.mc.get("144_LEFTCOUNT")) print(config.mc.get("145_LEFTCOUNT")) config.mc.set("test", "1") print(config.mc.get("test")) #print("In mem %s" % config.mc.get(str(144) + config._LEFTCOUNT)) #alipay.alipayDeamon().start() app = web.application(urls, globals()) app.run() app.run()