def change_master(self,failoverid,new_master,method): ''' usage: python mha_switch.py --group=xx --old_master=ip:port --new_master=ip:port --type=xx --record=xx ''' cmd='mha_switch.py' cmd_type='python' sql='select s.name,s.ip,master from failover f ,server s where f.manager=s.id and f.id=%s' %failoverid (group,ip,old_master)=self.q.fetchRow(sql) #data=self.q.fetchRow(sql) _old=InstanceGet().get_instance_by_id(old_master) _new=InstanceGet().get_instance_by_id(new_master) _old_m="%s:%s" %(_old.get('ip'),_old.get('port')) _new_m="%s:%s" %(_new.get('ip'),_new.get('port')) record_id=self.add_failover_record(failoverid, method,_old_m,_new_m) self.add_failover_record_detail(record_id, 'mega',now(),0, 'Y', 'Start the switch task.') args="--group=%s --old_master= --new_master=%s:%s --type=%s --record=%s" %(group,_old_m,_new_m,method,record_id) #group name ,old master ,new master,action result=remote_cmd(ip,None,cmd,cmd_type,args) if result == 0: self.stat_failover_record(record_id, 'Failed') self.add_failover_record_detail(record_id, 'mega',now(),0, 'Y', 'End the task as failed to call remote script') return False else: self.add_failover_record_detail(record_id, 'mega',now(),0, 'Y', 'Call the remote script on %s' % ip) return True
def test_insert(): print now() for i in range(1000000): insert("test", {"test":1, "test2": "222xxxx", "test3":"kaonima", "test4": "qusiba"}, safe=False ) print now()
def test_task_user_find(): print "start" print now() result = find("test_task", {"userList.userSeq": 5}, return_type="cusor") print now() #查询没问题 result = find("test_task", {"userList.userSeq": 5}) #取数据很慢非常慢 print "end" print result print result.count()
def _create_epub_single(files, output, title): import pypub creator = "Anonymous" language = 'cn' rights = now() publisher = 'Anonymous' print('Creating epub "%s" include %s chapters' % (title, len(files))) book = pypub.Epub(title, creator=creator, language=language, rights=rights, publisher=publisher) for file in files: name = os.path.basename(file) c_title = os.path.splitext(name)[0] c_file = file book.add_chapter(pypub.create_chapter_from_file(c_file, c_title)) book.create_epub(output, epub_name=title)
def index(): ''' @todo:index page: ''' pages = get_query_page( session.get('page', None), get_query(request.values, session.get('boot_time', now())), session.get('sid'), 'news' ) return_html = "index.html" if request.values.get("ajax", None): return_html = "news/list.html" session.update(set_page_session(request.url, pages)) return render_template( return_html, pages=pages, search_value=request.args.get('title','') )
def GET(self): web_input = web.input() try: project_id = int(web_input.pop('id')) except ValueError: return '' except KeyError: return '' raw_data = json.dumps(web_input) server_data = json.dumps({ 'User-Agent': web.ctx.env.get('HTTP_USER_AGENT'), 'Request-IP': web.ctx.env.get('REMOTE_ADDR') }) save_raw_data( project_id=project_id, raw_data=raw_data, server_data=server_data, got_time=now() ) return ''
def buy_index(): pages = get_query_page( session.get('page', None), get_query(request.values, session.get('boot_time', now())), session.get('sid'), 'buy' ) return_html = "buy/index.html" if request.values.get("ajax", None): return_html = "buy/list.html" session.update(set_page_session(request.url, pages)) return render_template( return_html, pages = pages, provice_list = get_info_list("city", query={'f_id': "0"}, return_type = "list", sort=1), city_list = get_info_list("city", query={'f_id': "35"}, return_type = "list", sort=1), search_value=request.args.get('title','') )
def get_page(collection, query={}, limit=settings_run.DEFAULT_LIMIT): """ @todo:获取分页 @params collection:集合名称 @params query:查询 @params page: 页数 @params limit: 数据数量 @return: """ page = int(query.get("page", 1)) boot_time = query.get("boot_time", None) if not boot_time or page == 1: boot_time = now() query["create_time"] = {"$lt": str(boot_time)} if query.has_key("page"): query.pop("page") if query.has_key("old_page"): query.pop("old_page") if query.has_key("boot_time"): query.pop("boot_time") collection_data = find(collection, query, limit=limit) data = list(collection_data) length = len(data) if length > 0: boot_time = data[length - 1].get("create_time") else: boot_time = "" if query.has_key("create_time"): query.pop("create_time") return { "count": find(collection, query, return_type="cusor").count(), "data": data, "page": page, "limit": limit, "boot_time": boot_time, }
def get_page(collection, query={}, limit=settings_run.DEFAULT_LIMIT): ''' @todo:获取分页 @params collection:集合名称 @params query:查询 @params page: 页数 @params limit: 数据数量 @return: ''' page = int(query.get('page', 1)) boot_time = query.get('boot_time', None) if not boot_time or page == 1: boot_time = now() query['create_time'] = {'$lt': str(boot_time)} if query.has_key('page'): query.pop('page') if query.has_key('old_page'): query.pop('old_page') if query.has_key('boot_time'): query.pop('boot_time') collection_data = find(collection, query, limit=limit) data = list(collection_data) length = len(data) if length > 0: boot_time = data[length - 1].get('create_time') else: boot_time = '' if query.has_key('create_time'): query.pop('create_time') return { 'count': find(collection, query, return_type="cusor").count(), 'data': data, 'page': page, 'limit': limit, 'boot_time': boot_time, }
from lib.store import update from lib.utils import now from pymongo.objectid import ObjectId for i in range(1000): update('news', {'create_time':{'$ne': 'a'}}, {'create_time': str(now())}) update('users', {'create_time':{'$ne': 'a'}}, {'create_time': str(now())}) update('sell', {'create_time':{'$ne': 'a'}}, {'create_time': str(now())})
model = RatLesNetv2(modalities=modalities, filters=filters) model.to(device) for i, m in enumerate(args.model): if len(args.model) > 1: outputPath = baseOutputPath + str(i + 1) + "/" os.makedirs(outputPath) else: outputPath = baseOutputPath # Loading model model.load_state_dict(torch.load(m)) model.eval() print(now() + "Start generating masks (model " + str(i + 1) + ")") with torch.no_grad(): for te_i in range(len(test_data)): if te_i % 10 == 0: print("Masks generated: {}/{}".format(te_i, len(test_data))) X, Y, id_ = test_data[te_i] output = model(X) pred = output[0].cpu().numpy() # BCDHW # Optional Post-processing if removeSmallIslands_thr != -1: pred = removeSmallIslands(pred, thr=removeSmallIslands_thr) if type(Y) != type(None):
) else: print("> Available GPUs:") for i in range(torch.cuda.device_count()): print(" > GPU #" + str(i) + " (" + torch.cuda.get_device_name(i) + ")") raise Exception("The GPU #" + str(args.gpu) + " does not exist. Check available GPUs.") if args.gpu > -1: device = torch.device("cuda:" + str(args.gpu)) else: device = torch.device("cpu") # Parsing the data print(now() + "Loading data") train_data = DataWrapper(args.input, "train", device, loadMemory=args.loadMemory) if args.validation != -1: val_data = DataWrapper(args.validation, "validation", device, loadMemory=args.loadMemory) else: val_data = [] ### Creating a new folder for the current run outputPath = os.path.join( args.output,
db.execute(""" create table project_results ( id integer not null primary key autoincrement, project_id integer not null, raw_data longtext default null, server_data longtext default null, got_time char(50) default null ) """) db.execute(""" create table xss_core ( id integer not null primary key autoincrement, name char(50) not null, script longtext default null, owner integer not null default 0 ) """) db.execute('insert into xss_core(name, script) values (?, ?)', ('Get Cookies', get_cookie)) db.execute('insert into projects(name, type, owner, created_date) values (?, ?, ?, ?)', ('test', 1, 1, now())) db.commit() db.close() #todo add some data of xss-scripts and test account.