class Provision(object): def __init__(self, repolist): self.repolist = repolist self.yum = YumUtils() self.shell = ShellUtils() self.com = Common() def provision(self): # run instack self.com.set_user('stack', 'stack') self.com.set_repo(['http://trunk.rdoproject.org/centos7/delorean-deps.repo',]) self._install_pkg(['epel-release', 'instack-undercloud']) self._deploy_instack() return self._get_instack_ip() def is_instack(self): if self._get_instack_ip() != '': return True def _install_pkg(self, pkgs): for pkg in pkgs: self.yum.yum_install(pkg) def _deploy_instack(self): return self.shell._exec_shell_cmd('su stack instack-virt-setup') def _get_instack_ip(self): return self.shell._exec_shell_cmd("arp -n | grep virbr0 | awk '{print $1}'")
def _downloader(self): while (not self.empty()) and (not self.project.shutdown_signal): t = threading.current_thread() Common.check_for_pause(self.project) slip = self.get() if callable(slip.url): file_url = slip.url() else: file_url = slip.url t.name = "Downloading: " + slip.item[slip.filename_key] self.project.log("transaction", "Downloading " + slip.item[slip.filename_key], "info", True) try: data = Common.webrequest( file_url, self.headers(), self.http_callback, None, False, True ) # Response object gets passed to shutil.copyfileobj self.storage_callback(data, slip) except urllib.error.HTTPError as err: self.project.log( "exception", "{} failed to download - HTTPError {}".format(slip.item[slip.filename_key], err.code), "warning", ) if self.project.shutdown_signal: self.project.log( "exception", "{} received shutdown signal. Stopping...".format(threading.current_thread().name), "warning", ) else: self.project.log("transaction", "{} has completed.".format(threading.current_thread().name), "info")
def main(): # parse arguments parser = argparse.ArgumentParser() parser.add_argument('--settings', action='store_true', dest='settings', help='Open Tor Browser Launcher settings') parser.add_argument('url', nargs='*', help='URL to load') args = parser.parse_args() settings = bool(args.settings) url_list = args.url # load the version and print the banner with open(os.path.join(SHARE, 'version')) as buf: tor_browser_launcher_version = buf.read().strip() print _('Tor Browser Launcher') print _('By Micah Lee, licensed under MIT') print _('version {0}').format(tor_browser_launcher_version) print 'https://github.com/micahflee/torbrowser-launcher' common = Common(tor_browser_launcher_version) # is torbrowser-launcher already running? tbl_pid = common.get_pid(common.paths['tbl_bin'], True) if tbl_pid: print _('Tor Browser Launcher is already running (pid {0}), bringing to front').format(tbl_pid) common.bring_window_to_front(tbl_pid) sys.exit() if settings: # settings mode app = Settings(common) else: # launcher mode app = Launcher(common, url_list)
def run(self): """ """ c = Common() sleep(3) subprocess.call(['/usr/sbin/puppetca', '--sign', '%s.%s' % (c.client_name(),self.domain)])
def getUserInfo(self, username): ''' 用户查看自己的基本信息 :param username: 微信用户提供的用户名 :return 'res[]/fail d': ''' if not username: return 'fail', errors.NOT_BIND r = Db.select('t_user', where="wx_name=$username", vars=locals(), limit=1) # 查看用户是否已绑定 if not r: # 用户还未绑定设备,即用户未注册账号 return 'fail', errors.NOT_BIND u = r[0] # 取出第一个用户为当前用户 res = dict() # 返回结果的字典 res['id'] = u['id'] # 用户ID res['bt'] = Common.secToStr(u['bind_time']) # 绑定时间 res['bs'] = '已绑定' # 绑定状态 r2 = Db.select('t_user_attribute', what='count(*)', where="user_id=$u['id']", vars=locals()) # 获取登录次数 if not r2: res['count'] = 0 # 登录次数 res['last'] = Common.secToLast(0) # 最后一次登录时间 else: res['count'] = r2[0]['count(*)'] # 登录次数 r3 = Db.select('t_user_attribute', where="user_id=$u['id']", vars=locals(), order="time desc", limit=1) # 获取最后一个记录 res['last'] = Common.secToLast(r3[0].time) # 最后一次登录时间 return 'success', res
def getDeviceInfo(self, username): ''' 用户查看设备信息 :param username: 微信用户提供的用户名 :return 'res[]/fail d': ''' if not username: return 'fail', errors.NOT_BIND r = Db.select('t_user', where='wx_name = $username', vars=locals(), limit=1) # 查看用户是否已绑定 if not r: # 用户还未绑定设备 return 'fail', errors.NOT_BIND u = r[0] # 取出第一个用户为当前用户 r1 = Db.select('t_device', where="id=$u['device_id']", vars=locals(), limit=1) # 获取设备基本信息 if not r1: # 如果设备不存在,则为系统错误 return 'fail', errors.ERROR_SYSTEM d = r1[0] # 取出第一个设备作为当前设备 res = dict() # 返回结果的字典 res['id'] = d['id'] # 设备ID res['ct'] = Common.secToStr(d['create_time']) # 生产日期 res['bs'] = '已绑定' # 绑定状态 res['rf'] = d['delay'] # 收集数据频率 r2 = Db.select('t_device_attribute', what='count(*)', where="device_id=$d['id']", vars=locals()) # 获取上传次数 if not r2: res['count'] = 0 # 上传次数 res['last'] = 0 # 最后一次上传时间 else: res['count'] = r2[0]['count(*)'] # 上传次数 r3 = Db.select('t_device_attribute', where="device_id=$d['id']", vars=locals(), order="time desc", limit=1) # 获取最后一个记录 res['last'] = Common.secToLast(r3[0].time) # 最后一次上传时间 return 'success', res
def get_access_token(self, client_id, client_secret): response_type = 'code' query_string = {} if self.provider == "google": query_string = ( {'redirect_uri': self.config['REDIRECT_URI'], 'response_type': response_type, 'client_id': client_id, 'scope': self.project.config['OAUTH_SCOPE'], 'approval_prompt': 'force', 'access_type': 'offline'}) elif self.provider == "dropbox": query_string = ({'response_type': response_type, 'client_id': client_id}) params = urllib.parse.urlencode(query_string) step1 = self.config['OAUTH_ENDPOINT'] + '?' + params Common.launch_browser(step1) code = IO.get("Authorization Code:") query_string = ({'code': code, 'grant_type': 'authorization_code', 'client_id': client_id, 'client_secret': client_secret}) if self.provider == "google": query_string['scope'] = '' query_string['redirect_uri'] = self.config['REDIRECT_URI'] params = urllib.parse.urlencode(query_string) response = Common.webrequest(self.config['TOKEN_ENDPOINT'], {'content-type': 'application/x-www-form-urlencoded;charset=utf-8'}, self.http_intercept, params) json_response = json.loads(response) self.parse_token(json_response) self.project.save("OAUTH", self.oauth)
def ConstraintNotDominatedByX(self, model): """ Creates a constraint preventing search in dominated regions. """ DisjunctionOrLessMetrics = list() for i in range(len(self.metrics_variables)): if self.metrics_objective_direction[i] == Common.METRICS_MAXIMIZE: DisjunctionOrLessMetrics.append( SMTLib.SMT_GT( self.metrics_variables[i], SMTLib.SMT_IntConst( Common.evalForNum(model, self.metrics_variables[i].convert(self.cfr.solver.converter)) ), ) ) # model[self.metrics_variables[i]]) else: DisjunctionOrLessMetrics.append( SMTLib.SMT_LT( self.metrics_variables[i], SMTLib.SMT_IntConst( Common.evalForNum(model, self.metrics_variables[i].convert(self.cfr.solver.converter)) ), ) ) # model[self.metrics_variables[i]]) return SMTLib.SMT_Or(*DisjunctionOrLessMetrics)
def main(): with open(os.path.join(SHARE, 'version')) as buf: tor_browser_launcher_version = buf.read().strip() print _('Tor Browser Launcher') print _('By Micah Lee, licensed under MIT') print _('version {0}').format(tor_browser_launcher_version) print 'https://github.com/micahflee/torbrowser-launcher' common = Common(tor_browser_launcher_version) # is torbrowser-launcher already running? tbl_pid = common.get_pid(common.paths['tbl_bin'], True) if tbl_pid: print _('Tor Browser Launcher is already running (pid {0}), bringing to front').format(tbl_pid) common.bring_window_to_front(tbl_pid) sys.exit() if '-settings' in sys.argv: # settings mode app = Settings(common) else: # launcher mode app = Launcher(common)
def verify(self): self.project.log("transaction", "Verifying all downloaded files...", "highlight", True) verification_file = os.path.join(self.project.working_dir, Common.timely_filename("verification", ".csv")) errors = 0 pct = 0 tot_hashes = 0 with open(verification_file, 'w') as f: f.write("TIME_PROCESSED,REMOTE_FILE,LOCAL_FILE,REMOTE_HASH,LOCAL_HASH,MATCH\n") for item in self.verification: rh = "" match = "" lh = Common.hashfile(open(item['local_file'], 'rb'), hashlib.md5()) lf = item['local_file'] rf = item['remote_file'] if 'remote_hash' in item: tot_hashes += 1 rh = item['remote_hash'] if lh == item['remote_hash']: match = "YES" else: match = "NO" errors += 1 self.project.log("exception", "Verification failed for remote file {} and local file {}".format(rf,lf), "critical", True) else: rh = "NONE PROVIDED" match = "N/A" f.write('"{date}","{rf}","{lf}","{rh}","{lh}","{m}"\n'.format(date=Common.utc_get_datetime_as_string(),rf=rf,lf=lf,rh=rh,lh=lh,m=match)) pct = ((tot_hashes - errors) / tot_hashes) * 100 self.project.log("transaction", "Verification of {} items completed with {} errors. ({:.2f}% Success rate)".format(tot_hashes, errors, pct), "highlight", True)
def op_substring(whole_string, left_index, right_index): (_, whole_mask) = whole_string.getInstanceSort(0) whole_i = whole_mask.get(0) left_stringID = Common.STRCONS_SUB + str(Common.getStringUID()) right_stringID = Common.STRCONS_SUB + str(Common.getStringUID()) Common.string_map[left_stringID] = left_index Common.string_map[right_stringID] = right_index return IntArg(["Substring$" + whole_i + "$" + left_stringID + "$" + right_stringID])
def __init__(self, short, priest): super().__init__(short, priest) self.common = Common(short, priest) self.show_numbers = False self._psalms = [] for i in range(1, 151): psalm = load("texts/slav/psalter/" + str(i) + ".txt") psalm = psalm.split("\n") self._psalms.append(psalm)
def op_substring(whole_string, left_index, right_index): (_, whole_mask) = whole_string.getInstanceSort(0) whole_i = whole_mask.get(0) left_stringID = Common.STRCONS_SUB + str(Common.getStringUID()) right_stringID = Common.STRCONS_SUB + str(Common.getStringUID()) Common.string_map[left_stringID] = left_index Common.string_map[right_stringID] = right_index return IntArg( ["Substring$" + whole_i + "$" + left_stringID + "$" + right_stringID])
def __init__(self): super().__init__() self.common = Common() # convert_into_lines(datapath_oq, 'controversial-data.csv', outname) # write_to_text(datapath_oq, 'controversial-data-separate-lines-sample-single.csv', 'segment-labeling-sample-single.txt') self.convert_into_lines(self.datapath_sbm, self.readfile_sbm, self.newfile_sbm, self.newdir_path)
def ConstraintMustDominatesX(self, model): """ Returns a constraint that a new instance has to be better than the instance represented by model in at least one dimension, and better or equal in all the other ones. """ dominationDisjunction = [] i = 0 for dominatedByMetric in self.metrics_variables: dominationConjunction = [] j = 0 if self.metrics_objective_direction[i] == Common.METRICS_MAXIMIZE: dominationConjunction.append( SMTLib.SMT_GT( dominatedByMetric, SMTLib.SMT_IntConst( Common.evalForNum(model, dominatedByMetric.convert(self.cfr.solver.converter)) ), ) ) else: dominationConjunction.append( SMTLib.SMT_LT( dominatedByMetric, SMTLib.SMT_IntConst( Common.evalForNum(model, dominatedByMetric.convert(self.cfr.solver.converter)) ), ) ) for AtLeastEqualInOtherMetric in self.metrics_variables: if j != i: if self.metrics_objective_direction[j] == Common.METRICS_MAXIMIZE: dominationConjunction.append( SMTLib.SMT_GE( AtLeastEqualInOtherMetric, SMTLib.SMT_IntConst( Common.evalForNum( model, AtLeastEqualInOtherMetric.convert(self.cfr.solver.converter) ) ), ) ) else: dominationConjunction.append( SMTLib.SMT_LE( AtLeastEqualInOtherMetric, SMTLib.SMT_IntConst( Common.evalForNum( model, AtLeastEqualInOtherMetric.convert(self.cfr.solver.converter) ) ), ) ) j = 1 + j i = 1 + i dominationDisjunction.append(SMTLib.SMT_And(*dominationConjunction)) constraintDominateX = SMTLib.SMT_Or(*dominationDisjunction) return constraintDominateX
def get_status(self): self.device = '' for gpu in self.gpus: self.device = [[gpu.name, Common.get_percentage(gpu.load)]] if Common.get_percentage( gpu.load ) >= self.params_obj.config_params['HIGH_LOAD_GPU']: return 'HIGH' return 'OK'
def readBlog(server, poster, args, ts): ets = Common.getTsFromNaming(server, Common.TagAdapter, ts) if (ets is not None): td = [poster, args.topic, None] dataList = Common.getSortedUnique(ets, td) for data in dataList: print(f'read blog: {data}')
def main(): language = os.getenv('LANGUAGE') languages = {language: _LANGUAGES[language]} if language in _LANGUAGES.keys() else _LANGUAGES for language in languages: try: languages[language]() except Exception as e: _LOG.info(f'Failed to collect issues status for {language}: {e}') Common.push_md_to_storage()
def resume(self): cmd = shlex.split('virsh start ' + self.vm_identifier) if p == 0: Common.msg_to_user('virtual machine was resumed', Common.INFO_MSG) else: Common.msg_to_user('was not possible resume virtual machine', Common.ERRO_MSG) return False return True
def __init__(self): """ Constructor: Used to initialize all the class variables """ self.utility = Utility() self.file_handling = FileHandling() self.common = Common() self.query_highlight = Query_Highlight() self.baseline_runs = Baseline_Runs()
class Chassis(ChassisBase): """Platform-specific Chassis class""" FAN_CONFIG = 'fan.json' PSU_CONFIG = 'psu.json' def __init__(self): ChassisBase.__init__(self) self._api_common = Common() self._api_config = self._api_common.get_config_path() self.__initialize_fan() self.__initialize_psu() # self.is_host = self._api_common.is_host() # if not self.is_host: # self.__initialize_fan() # self.__initialize_psu() # self.__initialize_thermals() # else: # self.__initialize_components() def __initialize_fan(self): from sonic_platform.fan import Fan fan_config_path = os.path.join(self._api_config, self.FAN_CONFIG) fan_config = self._api_common.load_json_file(fan_config_path) if fan_config: fan_index = 0 for drawer_index in range(0, fan_config['drawer_num']): for index in range(0, fan_config['fan_num_per_drawer']): fan = Fan(fan_index, drawer_index, conf=fan_config) fan_index += 1 self._fan_list.append(fan) # def __initialize_sfp(self): # from sonic_platform.sfp import Sfp # for index in range(0, NUM_SFP): # sfp = Sfp(index) # self._sfp_list.append(sfp) # self.sfp_module_initialized = True def __initialize_psu(self): from sonic_platform.psu import Psu psu_config_path = os.path.join(self._api_config, self.PSU_CONFIG) psu_config = self._api_common.load_json_file(psu_config_path) if psu_config: psu_index = 0 for index in range(0, psu_config['psu_num']): psu = Psu(psu_index, conf=psu_config) psu_index += 1 self._psu_list.append(psu)
def translateWords(self, txt: str): word_list_dict = cm.findInvert(df.SYMBOLS, txt, is_reversed=True) temp_tran = str(txt) for loc, mm in word_list_dict.items(): word = mm.txt tran, selected_item, matched_ratio, untran_word_dic = self.simpleFuzzyTranslate(word) if tran: temp_tran = cm.jointText(temp_tran, tran, loc) has_tran = (temp_tran != txt) return (temp_tran if has_tran else None)
def _save_file(self, data, slip, stream=True): Common.check_for_pause(self.project) savepath = slip.savepath path_to_create = os.path.dirname(savepath) # Just the directory not the filename if not os.path.isdir(path_to_create): os.makedirs(path_to_create, exist_ok=True) self.project.savedata(data, savepath, stream) self.project.log("transaction", "Saved file to " + savepath, "info", True)
def __init__(self, index, drawer_index, is_psu_fan=False, psu_index=0, conf=None): FanBase.__init__(self) self.fan_index = index self.drawer_index = drawer_index self._config = conf self._api_common = Common() self._name = self.get_name() self._is_psu_fan = is_psu_fan
def main(): issues = collect_open_issues() language = os.getenv('LANGUAGE') languages = { _CONVERT[language]: _LANGUAGES[_CONVERT[language]] } if language in _CONVERT else _LANGUAGES for language in languages: language_issues = select_language_issues(issues, language) languages[language](language_issues) Common.push_md_to_storage()
def __init__(self, short, priest): super().__init__(short, priest) # Heavenly King, Trisagion, Our Father, etc. self.lent = False self.alleluia = False self.typika_sign = TypikaSign.Ferial self.common = Common(short, priest) self.ectenia = Ectenia(short, priest) self.psalter = Psalter(short, priest) self.day = Day.Sat
def __init__(self, master=None): tk.Frame.__init__(self, master) self.pack() # self.text_box = None self.config_text_box() self.common = Common(self.logger) self.create_widgets() self.welcome()
def bc(): #annots = ({1284: {'be': 1289, 'bi': 12, 'bs': 1284}, 4229: {'be': 4234, 'bi': 50, 'bs': 4229}, 2696: {'be': 2702, 'bi': 33, 'bs': 2696}, 4322: {'be': 4328, 'bi': 53, 'bs': 4322}, 4880: {'be': 4887, 'bi': 60, 'bs': 4880}, 1809: {'be': 1817, 'bi': 19, 'bs': 1809}, 274: {'be': 280, 'bi': 3, 'bs': 274}, 2883: {'be': 2890, 'bi': 35, 'bs': 2883}, 916: {'be': 922, 'bi': 6, 'bs': 916}, 1173: {'be': 1178, 'bi': 9, 'bs': 1173}, 1942: {'be': 1946, 'bi': 21, 'bs': 1942}, 3972: {'be': 3979, 'bi': 46, 'bs': 3972}, 3247: {'be': 3252, 'bi': 39, 'bs': 3247}, 3996: {'be': 4003, 'bi': 47, 'bs': 3996}, 3359: {'be': 3363, 'bi': 42, 'bs': 3359}, 2466: {'be': 2472, 'bi': 28, 'bs': 2466}, 1444: {'be': 1457, 'bi': 13, 'bs': 1444}, 1959: {'be': 1968, 'bi': 22, 'bs': 1959}, 168: {'be': 176, 'bi': 1, 'bs': 168}, 2350: {'be': 2356, 'bi': 27, 'bs': 2350}, 1199: {'be': 1204, 'bi': 10, 'bs': 1199}, 4401: {'be': 4408, 'bi': 55, 'bs': 4401}, 3509: {'be': 3514, 'bi': 43, 'bs': 3509}, 4662: {'be': 4670, 'bi': 57, 'bs': 4662}, 3257: {'be': 3263, 'bi': 40, 'bs': 3257}, 1723: {'be': 1728, 'bi': 17, 'bs': 1723}, 2493: {'be': 2500, 'bi': 29, 'bs': 2493}, 3006: {'be': 3012, 'bi': 36, 'bs': 3006}, 2625: {'be': 2632, 'bi': 32, 'bs': 2625}, 1602: {'be': 1606, 'bi': 15, 'bs': 1602}, 2755: {'be': 2761, 'bi': 34, 'bs': 2755}, 2119: {'be': 2124, 'bi': 25, 'bs': 2119}, 4797: {'be': 4801, 'bi': 59, 'bs': 4797}, 1485: {'be': 1492, 'bi': 14, 'bs': 1485}, 2509: {'be': 2516, 'bi': 30, 'bs': 2509}, 1105: {'be': 1113, 'bi': 7, 'bs': 1105}, 2002: {'be': 2010, 'bi': 23, 'bs': 2002}, 4179: {'be': 4194, 'bi': 48, 'bs': 4179}, 4312: {'be': 4316, 'bi': 52, 'bs': 4312}, 1241: {'be': 1248, 'bi': 11, 'bs': 1241}, 3279: {'be': 3283, 'bi': 41, 'bs': 3279}, 2524: {'be': 2531, 'bi': 31, 'bs': 2524}, 2014: {'be': 2019, 'bi': 24, 'bs': 2014}, 4301: {'be': 4307, 'bi': 51, 'bs': 4301}, 2272: {'be': 2279, 'bi': 26, 'bs': 2272}, 225: {'be': 232, 'bi': 2, 'bs': 225}, 1122: {'be': 1129, 'bi': 8, 'bs': 1122}, 613: {'be': 617, 'bi': 4, 'bs': 613}, 3047: {'be': 3053, 'bi': 37, 'bs': 3047}, 4220: {'be': 4227, 'bi': 49, 'bs': 4220}, 3602: {'be': 3609, 'bi': 44, 'bs': 3602}, 3645: {'be': 3651, 'bi': 45, 'bs': 3645}, 1832: {'be': 1844, 'bi': 20, 'bs': 1832}, 4723: {'be': 4729, 'bi': 58, 'bs': 4723}, 756: {'be': 763, 'bi': 5, 'bs': 756}, 3191: {'be': 3196, 'bi': 38, 'bs': 3191}, 1785: {'be': 1791, 'bi': 18, 'bs': 1785}, 4986: {'be': 4991, 'bi': 61, 'bs': 4986}, 4603: {'be': 4610, 'bi': 56, 'bs': 4603}, 1660: {'be': 1664, 'bi': 16, 'bs': 1660}, 4351: {'be': 4357, 'bi': 54, 'bs': 4351}}, {1664: {'be': 1664, 'bi': 16, 'bs': 1660}, 4610: {'be': 4610, 'bi': 56, 'bs': 4603}, 3252: {'be': 3252, 'bi': 39, 'bs': 3247}, 4357: {'be': 4357, 'bi': 54, 'bs': 4351}, 2356: {'be': 2356, 'bi': 27, 'bs': 2350}, 1289: {'be': 1289, 'bi': 12, 'bs': 1284}, 4234: {'be': 4234, 'bi': 50, 'bs': 4229}, 3979: {'be': 3979, 'bi': 46, 'bs': 3972}, 2702: {'be': 2702, 'bi': 33, 'bs': 2696}, 4227: {'be': 4227, 'bi': 49, 'bs': 4220}, 4991: {'be': 4991, 'bi': 61, 'bs': 4986}, 4003: {'be': 4003, 'bi': 47, 'bs': 3996}, 3609: {'be': 3609, 'bi': 44, 'bs': 3602}, 280: {'be': 280, 'bi': 3, 'bs': 274}, 1817: {'be': 1817, 'bi': 19, 'bs': 1809}, 1946: {'be': 1946, 'bi': 21, 'bs': 1942}, 922: {'be': 922, 'bi': 6, 'bs': 916}, 176: {'be': 176, 'bi': 1, 'bs': 168}, 3363: {'be': 3363, 'bi': 42, 'bs': 3359}, 2472: {'be': 2472, 'bi': 28, 'bs': 2466}, 4316: {'be': 4316, 'bi': 52, 'bs': 4312}, 1968: {'be': 1968, 'bi': 22, 'bs': 1959}, 1457: {'be': 1457, 'bi': 13, 'bs': 1444}, 1204: {'be': 1204, 'bi': 10, 'bs': 1199}, 1178: {'be': 1178, 'bi': 9, 'bs': 1173}, 4408: {'be': 4408, 'bi': 55, 'bs': 4401}, 1844: {'be': 1844, 'bi': 20, 'bs': 1832}, 3514: {'be': 3514, 'bi': 43, 'bs': 3509}, 4670: {'be': 4670, 'bi': 57, 'bs': 4662}, 3263: {'be': 3263, 'bi': 40, 'bs': 3257}, 1728: {'be': 1728, 'bi': 17, 'bs': 1723}, 4801: {'be': 4801, 'bi': 59, 'bs': 4797}, 3651: {'be': 3651, 'bi': 45, 'bs': 3645}, 2500: {'be': 2500, 'bi': 29, 'bs': 2493}, 1606: {'be': 1606, 'bi': 15, 'bs': 1602}, 2632: {'be': 2632, 'bi': 32, 'bs': 2625}, 2761: {'be': 2761, 'bi': 34, 'bs': 2755}, 2890: {'be': 2890, 'bi': 35, 'bs': 2883}, 2124: {'be': 2124, 'bi': 25, 'bs': 2119}, 4887: {'be': 4887, 'bi': 60, 'bs': 4880}, 2531: {'be': 2531, 'bi': 31, 'bs': 2524}, 1492: {'be': 1492, 'bi': 14, 'bs': 1485}, 4729: {'be': 4729, 'bi': 58, 'bs': 4723}, 1113: {'be': 1113, 'bi': 7, 'bs': 1105}, 2010: {'be': 2010, 'bi': 23, 'bs': 2002}, 3012: {'be': 3012, 'bi': 36, 'bs': 3006}, 1248: {'be': 1248, 'bi': 11, 'bs': 1241}, 4194: {'be': 4194, 'bi': 48, 'bs': 4179}, 2019: {'be': 2019, 'bi': 24, 'bs': 2014}, 2279: {'be': 2279, 'bi': 26, 'bs': 2272}, 232: {'be': 232, 'bi': 2, 'bs': 225}, 617: {'be': 617, 'bi': 4, 'bs': 613}, 3053: {'be': 3053, 'bi': 37, 'bs': 3047}, 4307: {'be': 4307, 'bi': 51, 'bs': 4301}, 4328: {'be': 4328, 'bi': 53, 'bs': 4322}, 3283: {'be': 3283, 'bi': 41, 'bs': 3279}, 1129: {'be': 1129, 'bi': 8, 'bs': 1122}, 2516: {'be': 2516, 'bi': 30, 'bs': 2509}, 763: {'be': 763, 'bi': 5, 'bs': 756}, 3196: {'be': 3196, 'bi': 38, 'bs': 3191}, 1791: {'be': 1791, 'bi': 18, 'bs': 1785}}) annotsL = [{'be': 1289, 'bi': 12, 'bs': 1284}, {'be': 4887, 'bi': 60, 'bs': 4880}, {'be': 1817, 'bi': 19, 'bs': 1809}, {'be': 280, 'bi': 3, 'bs': 274}, {'be': 3252, 'bi': 39, 'bs': 3247}, {'be': 3363, 'bi': 42, 'bs': 3359}, {'be': 1844, 'bi': 20, 'bs': 1832}, {'be': 2356, 'bi': 27, 'bs': 2350}, {'be': 4408, 'bi': 55, 'bs': 4401}, {'be': 4670, 'bi': 57, 'bs': 4662}, {'be': 3651, 'bi': 45, 'bs': 3645}, {'be': 2632, 'bi': 32, 'bs': 2625}, {'be': 1606, 'bi': 15, 'bs': 1602}, {'be': 2890, 'bi': 35, 'bs': 2883}, {'be': 2124, 'bi': 25, 'bs': 2119}, {'be': 1113, 'bi': 7, 'bs': 1105}, {'be': 4194, 'bi': 48, 'bs': 4179}, {'be': 1129, 'bi': 8, 'bs': 1122}, {'be': 617, 'bi': 4, 'bs': 613}, {'be': 3609, 'bi': 44, 'bs': 3602}, {'be': 2500, 'bi': 29, 'bs': 2493}, {'be': 4729, 'bi': 58, 'bs': 4723}, {'be': 3196, 'bi': 38, 'bs': 3191}, {'be': 4991, 'bi': 61, 'bs': 4986}, {'be': 4227, 'bi': 49, 'bs': 4220}, {'be': 3979, 'bi': 46, 'bs': 3972}, {'be': 4234, 'bi': 50, 'bs': 4229}, {'be': 2702, 'bi': 33, 'bs': 2696}, {'be': 4307, 'bi': 51, 'bs': 4301}, {'be': 922, 'bi': 6, 'bs': 916}, {'be': 1178, 'bi': 9, 'bs': 1173}, {'be': 1946, 'bi': 21, 'bs': 1942}, {'be': 4003, 'bi': 47, 'bs': 3996}, {'be': 2472, 'bi': 28, 'bs': 2466}, {'be': 1457, 'bi': 13, 'bs': 1444}, {'be': 1968, 'bi': 22, 'bs': 1959}, {'be': 176, 'bi': 1, 'bs': 168}, {'be': 1204, 'bi': 10, 'bs': 1199}, {'be': 3514, 'bi': 43, 'bs': 3509}, {'be': 3263, 'bi': 40, 'bs': 3257}, {'be': 1728, 'bi': 17, 'bs': 1723}, {'be': 4801, 'bi': 59, 'bs': 4797}, {'be': 3012, 'bi': 36, 'bs': 3006}, {'be': 2761, 'bi': 34, 'bs': 2755}, {'be': 1492, 'bi': 14, 'bs': 1485}, {'be': 2516, 'bi': 30, 'bs': 2509}, {'be': 2010, 'bi': 23, 'bs': 2002}, {'be': 4316, 'bi': 52, 'bs': 4312}, {'be': 1248, 'bi': 11, 'bs': 1241}, {'be': 3283, 'bi': 41, 'bs': 3279}, {'be': 2531, 'bi': 31, 'bs': 2524}, {'be': 2019, 'bi': 24, 'bs': 2014}, {'be': 2279, 'bi': 26, 'bs': 2272}, {'be': 232, 'bi': 2, 'bs': 225}, {'be': 4328, 'bi': 53, 'bs': 4322}, {'be': 3053, 'bi': 37, 'bs': 3047}, {'be': 1664, 'bi': 16, 'bs': 1660}, {'be': 763, 'bi': 5, 'bs': 756}, {'be': 1791, 'bi': 18, 'bs': 1785}, {'be': 4610, 'bi': 56, 'bs': 4603}, {'be': 4357, 'bi': 54, 'bs': 4351}] lBlinks = [{'duration': 300.0, 'start': 5700.0, 'fs': 170, 'end': 6000.0, 'fe': 179}, {'duration': 466.666667, 'start': 18766.67, 'fs': 562, 'end': 19233.333333, 'fe': 576}, {'duration': 366.666667, 'start': 25200.0, 'fs': 755, 'end': 25566.666667, 'fe': 766}, {'duration': 500.0, 'start': 30533.33, 'fs': 915, 'end': 31033.333333, 'fe': 930}, {'duration': 466.666667, 'start': 36033.33, 'fs': 1080, 'end': 36500.0, 'fe': 1094}, {'duration': 333.333333, 'start': 36866.67, 'fs': 1105, 'end': 37200.0, 'fe': 1115}, {'duration': 466.666667, 'start': 37333.33, 'fs': 1119, 'end': 37800.0, 'fe': 1133}, {'duration': 333.333333, 'start': 39133.33, 'fs': 1173, 'end': 39466.666667, 'fe': 1183}, {'duration': 200.0, 'start': 49466.67, 'fs': 1483, 'end': 49666.666667, 'fe': 1489}, {'duration': 266.666667, 'start': 55333.33, 'fs': 1659, 'end': 55600.0, 'fe': 1667}, {'duration': 400.0, 'start': 57466.67, 'fs': 1723, 'end': 57866.666667, 'fe': 1735}, {'duration': 433.333333, 'start': 59466.67, 'fs': 1783, 'end': 59900.0, 'fe': 1796}, {'duration': 466.666667, 'start': 64700.0, 'fs': 1940, 'end': 65166.666667, 'fe': 1954}, {'duration': 366.666667, 'start': 66766.67, 'fs': 2002, 'end': 67133.333333, 'fe': 2013}, {'duration': 400.0, 'start': 67133.33, 'fs': 2013, 'end': 67533.333333, 'fe': 2025}, {'duration': 366.666667, 'start': 70600.0, 'fs': 2117, 'end': 70966.666667, 'fe': 2128}, {'duration': 466.666667, 'start': 75733.33, 'fs': 2271, 'end': 76200.0, 'fe': 2285}, {'duration': 433.333333, 'start': 78366.67, 'fs': 2350, 'end': 78800.0, 'fe': 2363}, {'duration': 466.666667, 'start': 82100.0, 'fs': 2462, 'end': 82566.666667, 'fe': 2476}, {'duration': 333.333333, 'start': 84100.0, 'fs': 2522, 'end': 84433.333333, 'fe': 2532}, {'duration': 500.0, 'start': 87533.33, 'fs': 2625, 'end': 88033.333333, 'fe': 2640}, {'duration': 300.0, 'start': 89866.67, 'fs': 2695, 'end': 90166.666667, 'fe': 2704}, {'duration': 233.333333, 'start': 91866.67, 'fs': 2755, 'end': 92100.0, 'fe': 2762}, {'duration': 400.0, 'start': 100166.67, 'fs': 3004, 'end': 100566.666667, 'fe': 3016}, {'duration': 333.333333, 'start': 101533.33, 'fs': 3045, 'end': 101866.666667, 'fe': 3055}, {'duration': 333.333333, 'start': 106366.67, 'fs': 3190, 'end': 106700.0, 'fe': 3200}, {'duration': 433.333333, 'start': 108133.33, 'fs': 3243, 'end': 108566.666667, 'fe': 3256}, {'duration': 466.666667, 'start': 108566.67, 'fs': 3256, 'end': 109033.333333, 'fe': 3270}, {'duration': 400.0, 'start': 109233.33, 'fs': 3276, 'end': 109633.333333, 'fe': 3288}, {'duration': 333.333333, 'start': 111933.33, 'fs': 3357, 'end': 112266.666667, 'fe': 3367}, {'duration': 333.333333, 'start': 116900.0, 'fs': 3506, 'end': 117233.333333, 'fe': 3516}, {'duration': 300.0, 'start': 121500.0, 'fs': 3644, 'end': 121800.0, 'fe': 3653}, {'duration': 266.666667, 'start': 129366.67, 'fs': 3880, 'end': 129633.333333, 'fe': 3888}, {'duration': 366.666667, 'start': 131133.33, 'fs': 3933, 'end': 131500.0, 'fe': 3944}, {'duration': 166.666667, 'start': 140700.0, 'fs': 4220, 'end': 140866.666667, 'fe': 4225}, {'duration': 333.333333, 'start': 140966.67, 'fs': 4228, 'end': 141300.0, 'fe': 4238}, {'duration': 333.333333, 'start': 143366.67, 'fs': 4300, 'end': 143700.0, 'fe': 4310}, {'duration': 333.333333, 'start': 153433.33, 'fs': 4602, 'end': 153766.666667, 'fe': 4612}, {'duration': 266.666667, 'start': 157500.0, 'fs': 4724, 'end': 157766.666667, 'fe': 4732}, {'duration': 333.333333, 'start': 159900.0, 'fs': 4796, 'end': 160233.333333, 'fe': 4806}, {'duration': 400.0, 'start': 162600.0, 'fs': 4877, 'end': 163000.0, 'fe': 4889}, {'duration': 300.0, 'start': 166200.0, 'fs': 4985, 'end': 166500.0, 'fe': 4994}] rBlinks = [{'duration': 400.0, 'start': 5633.33, 'fs': 168, 'end': 6033.333333, 'fe': 180}, {'duration': 333.333333, 'start': 18900.0, 'fs': 566, 'end': 19233.333333, 'fe': 576}, {'duration': 366.666667, 'start': 25200.0, 'fs': 755, 'end': 25566.666667, 'fe': 766}, {'duration': 366.666667, 'start': 30533.33, 'fs': 915, 'end': 30900.0, 'fe': 926}, {'duration': 366.666667, 'start': 36833.33, 'fs': 1104, 'end': 37200.0, 'fe': 1115}, {'duration': 433.333333, 'start': 37400.0, 'fs': 1121, 'end': 37833.333333, 'fe': 1134}, {'duration': 166.666667, 'start': 40000.0, 'fs': 1199, 'end': 40166.666667, 'fe': 1204}, {'duration': 366.666667, 'start': 47666.67, 'fs': 1429, 'end': 48033.333333, 'fe': 1440}, {'duration': 300.0, 'start': 49533.33, 'fs': 1485, 'end': 49833.333333, 'fe': 1494}, {'duration': 400.0, 'start': 57433.33, 'fs': 1722, 'end': 57833.333333, 'fe': 1734}, {'duration': 500.0, 'start': 59466.67, 'fs': 1783, 'end': 59966.666667, 'fe': 1798}, {'duration': 366.666667, 'start': 64666.67, 'fs': 1939, 'end': 65033.333333, 'fe': 1950}, {'duration': 466.666667, 'start': 66666.67, 'fs': 1999, 'end': 67133.333333, 'fe': 2013}, {'duration': 366.666667, 'start': 67133.33, 'fs': 2013, 'end': 67500.0, 'fe': 2024}, {'duration': 400.0, 'start': 70633.33, 'fs': 2118, 'end': 71033.333333, 'fe': 2130}, {'duration': 433.333333, 'start': 78333.33, 'fs': 2349, 'end': 78766.666667, 'fe': 2362}, {'duration': 233.333333, 'start': 82200.0, 'fs': 2465, 'end': 82433.333333, 'fe': 2472}, {'duration': 233.333333, 'start': 83133.33, 'fs': 2493, 'end': 83366.666667, 'fe': 2500}, {'duration': 433.333333, 'start': 83666.67, 'fs': 2509, 'end': 84100.0, 'fe': 2522}, {'duration': 433.333333, 'start': 84100.0, 'fs': 2522, 'end': 84533.333333, 'fe': 2535}, {'duration': 333.333333, 'start': 87533.33, 'fs': 2625, 'end': 87866.666667, 'fe': 2635}, {'duration': 266.666667, 'start': 89900.0, 'fs': 2696, 'end': 90166.666667, 'fe': 2704}, {'duration': 366.666667, 'start': 91866.67, 'fs': 2755, 'end': 92233.333333, 'fe': 2766}, {'duration': 500.0, 'start': 96033.33, 'fs': 2880, 'end': 96533.333333, 'fe': 2895}, {'duration': 400.0, 'start': 100200.0, 'fs': 3005, 'end': 100600.0, 'fe': 3017}, {'duration': 466.666667, 'start': 101500.0, 'fs': 3044, 'end': 101966.666667, 'fe': 3058}, {'duration': 333.333333, 'start': 106366.67, 'fs': 3190, 'end': 106700.0, 'fe': 3200}, {'duration': 333.333333, 'start': 108233.33, 'fs': 3246, 'end': 108566.666667, 'fe': 3256}, {'duration': 200.0, 'start': 108566.67, 'fs': 3256, 'end': 108766.666667, 'fe': 3262}, {'duration': 466.666667, 'start': 109233.33, 'fs': 3276, 'end': 109700.0, 'fe': 3290}, {'duration': 266.666667, 'start': 111966.67, 'fs': 3358, 'end': 112233.333333, 'fe': 3366}, {'duration': 200.0, 'start': 116966.67, 'fs': 3508, 'end': 117166.666667, 'fe': 3514}, {'duration': 266.666667, 'start': 120100.0, 'fs': 3602, 'end': 120366.666667, 'fe': 3610}, {'duration': 333.333333, 'start': 121533.33, 'fs': 3645, 'end': 121866.666667, 'fe': 3655}, {'duration': 66.666667, 'start': 131233.33, 'fs': 3936, 'end': 131300.0, 'fe': 3938}, {'duration': 266.666667, 'start': 131400.0, 'fs': 3941, 'end': 131666.666667, 'fe': 3949}, {'duration': 166.666667, 'start': 131800.0, 'fs': 3953, 'end': 131966.666667, 'fe': 3958}, {'duration': 366.666667, 'start': 139266.67, 'fs': 4177, 'end': 139633.333333, 'fe': 4188}, {'duration': 300.0, 'start': 140666.67, 'fs': 4219, 'end': 140966.666667, 'fe': 4228}, {'duration': 500.0, 'start': 140966.67, 'fs': 4228, 'end': 141466.666667, 'fe': 4243}, {'duration': 333.333333, 'start': 143400.0, 'fs': 4301, 'end': 143733.333333, 'fe': 4311}, {'duration': 266.666667, 'start': 143733.33, 'fs': 4311, 'end': 144000.0, 'fe': 4319}, {'duration': 133.333333, 'start': 144100.0, 'fs': 4322, 'end': 144233.333333, 'fe': 4326}, {'duration': 200.0, 'start': 153433.33, 'fs': 4602, 'end': 153633.333333, 'fe': 4608}, {'duration': 300.0, 'start': 157500.0, 'fs': 4724, 'end': 157800.0, 'fe': 4733}, {'duration': 333.333333, 'start': 159900.0, 'fs': 4796, 'end': 160233.333333, 'fe': 4806}] l, r, o = Cmn.detectionCoverageF(annotsL, lBlinks, rBlinks) Cmn.displayDetectionCoverage(l, r, o) #for r in res: # print r return
def get_test_data(self, field_type): detail = self.core.get_field_detail(field_type) if detail is None: return Error.PLUGINERR path = './plugins/fieldtype/' + detail['name'].lower() print(path) dev = Common.get_file_content(path + '/developer.test', default='') usr = Common.get_file_content(path + '/user.test', default='') print('[USER]', usr + '@') return dev, usr
def __init__(self): """ Constructor: Used to initialize all the class variables """ self.utility = Utility() self.frequency_map = defaultdict() self.synonyms_map = defaultdict() self.file_handling = FileHandling() self.common = Common() self.indexer = Indexer()
def construct_common_model(finetune, conv_model, sent_embedder): if finetune: model = Common(conv_model, n_filters=conv_model.get_n_blocks() * args.n_filters, encoder=sent_embedder) else: model = Common(conv_model, n_filters=conv_model.get_n_blocks() * args.n_filters) return model
def __init__(self): """ Constructor: Used to initialize all the class variables """ self.utility = Utility() self.file_handling = FileHandling() self.common = Common() self.indexer = Indexer() self.baseline_runs = Baseline_Runs() self.threshold_length = 20
def setUp(self): self.calc_cls = CalculationFactory('vasp.base.BasicCalculation') Common.import_paw() Paw = DataFactory('vasp.paw') self.code = Code() self.code.set_computer(self.computer) self.code.set_remote_computer_exec((self.computer, '/bin/foo')) self.paw_in = Paw.load_paw(element='In')[0] self.paw_as = Paw.load_paw(element='As')[0] self.tmp, self.tmpf = tempfile.mkstemp()
def ConstraintMustDominatesX(self, model): """ Returns a constraint that a new instance has to be better than the instance represented by model in at least one dimension, and better or equal in all the other ones. """ dominationDisjunction = [] i = 0 for dominatedByMetric in self.metrics_variables: dominationConjunction = [] j = 0 if self.metrics_objective_direction[i] == Common.METRICS_MAXIMIZE: dominationConjunction.append( SMTLib.SMT_GT( dominatedByMetric, SMTLib.SMT_IntConst( Common.evalForNum( model, dominatedByMetric.convert( self.cfr.solver.converter))))) else: dominationConjunction.append( SMTLib.SMT_LT( dominatedByMetric, SMTLib.SMT_IntConst( Common.evalForNum( model, dominatedByMetric.convert( self.cfr.solver.converter))))) for AtLeastEqualInOtherMetric in self.metrics_variables: if j != i: if self.metrics_objective_direction[ j] == Common.METRICS_MAXIMIZE: dominationConjunction.append( SMTLib.SMT_GE( AtLeastEqualInOtherMetric, SMTLib.SMT_IntConst( Common.evalForNum( model, AtLeastEqualInOtherMetric.convert( self.cfr.solver.converter))))) else: dominationConjunction.append( SMTLib.SMT_LE( AtLeastEqualInOtherMetric, SMTLib.SMT_IntConst( Common.evalForNum( model, AtLeastEqualInOtherMetric.convert( self.cfr.solver.converter))))) j = 1 + j i = 1 + i dominationDisjunction.append( SMTLib.SMT_And(*dominationConjunction)) constraintDominateX = SMTLib.SMT_Or(*dominationDisjunction) return constraintDominateX
def setUp(self): self.calc_cls = CalculationFactory('vasp.amn') self.code = Code() self.code.set_computer(self.computer) self.code.set_remote_computer_exec((self.computer, '/bin/foo')) Common.import_paw() self.tmpd, self.tmpf = tempfile.mkstemp() self.wdat = Common.wdat() self.wdat.add_file(self.tmpf, 'test1') self.wdat.add_file(self.tmpf, 'test2') self.wdat._make_archive()
def handleEventForEachMessage(message, ts): if (message[Common.MessageEvent] == Common.EventWrite): ts._out(Common.messageToTuple(message)) elif (message[Common.MessageEvent] == Common.EventTake): # ignore return value ts._inp(Common.messageToTuple(message)) elif ((message[Common.MessageEvent] == Common.EventStart) or (message[Common.MessageEvent] == Common.EventAdapter)): ts._out(Common.messageToTuple(message)) Common.updateServerList(ts, message[Common.MessageEntity])
def duplicateMSGIDToUntranslatedMSGSTR(self, title_list): is_empty = (title_list == None or len(title_list) < 1) if (is_empty): return False self.printTitleOnce() for (block_index, text_block) in enumerate(self.block_list): print("Processing block:{}".format(text_block.getTextWithID())) msgid = text_block.msgid id_text = msgid.flatText() id_text = cm.stripQuote(id_text) #print("msgid: {}".format(id_text)) is_numeric = cm.isNumber(id_text) is_title = (id_text in title_list) is_ignore = cm.isIgnored(id_text) if (not is_title or is_ignore or is_numeric): continue msgstr = text_block.msgstr str_text = msgstr.flatText() str_text = cm.stripQuote(str_text) is_translated = (re.search("[\ ]?-- ", str_text) != None) inconsistent = (re.search(" - ", str_text) != None) #print("{}".format(self.getTextWithIDFlat())) if (is_translated): continue if (inconsistent): replace_text = re.sub(" - ", " -- ", str_text) else: if (len(str_text) > 0): replace_text = "{} -- {}".format(str_text, id_text) else: replace_text = "-- {}".format(id_text) print("-" * 80) print("OLD:{}\n".format(text_block.getTextWithID())) msgstr.setText(replace_text) self.setDirty() print("NEW:{}".format(text_block.getTextWithID())) #print("CHANGED:{}".format(self.getTextWithIDFlat())) if (self.isDirty()): temp_path = "/home/htran/temp.po" print("Saving Changes:{}".format(temp_path)) self.saveText(out_path=temp_path) #print("Saving Changes:{}".format(self.path)) #self.saveText() print("-" * 80)
def pause(self): cmd = shlex.split('virsh shutdown ' + self.vm_identifier) p = subprocess.Popen(cmd).wait() if p == 0: Common.msg_to_user('virtual machine was turned off', Common.INFO_MSG) else: Common.msg_to_user('was not possible pause virtual machine', Common.ERRO_MSG) return False return True
def get_attention_per_path(source_strings, path_strings, target_strings, attention_weights): # attention_weights: (time, contexts) results = [] for time_step in attention_weights: attention_per_context = {} for source, path, target, weight in zip(source_strings, path_strings, target_strings, time_step): string_triplet = ( Common.binary_to_string(source), Common.binary_to_string(path), Common.binary_to_string(target)) attention_per_context[string_triplet] = weight results.append(attention_per_context) return results
def deleteUser(userId): user = Users.get(Users, userId) if user is None: return jsonify(Common.falseReturn(Common, None, '找不到要删除的数据')) else: deleteRow = Users.delete(Users, userId) user = Users.get(Users, userId) if user is None: return getUsers() else: return jsonify(Common.falseReturn(Common, None, '删除失败'))
def __run(): # Setup default evaluator. evaluator = ThreeClassEvaluator(DataType.Test) experiment_data = RuSentRelTrainingData( labels_scaler=labels_scaler, stemmer=stemmer, evaluator=evaluator, opinion_formatter=RuSentRelOpinionCollectionFormatter(), callback=CallbackEvalF1NPU(DataType.Test)) extra_name_suffix = Common.create_exp_name_suffix( use_balancing=balanced_input, terms_per_context=terms_per_context, dist_in_terms_between_att_ends=dist_in_terms_between_attitude_ends) # Composing experiment. experiment = create_experiment(exp_type=exp_type, experiment_data=experiment_data, folding_type=folding_type, rusentrel_version=rusentrel_version, experiment_io_type=CustomNetworkExperimentIO, ruattitudes_version=ra_version, load_ruattitude_docs=False, extra_name_suffix=extra_name_suffix) full_model_name = Common.create_full_model_name(folding_type=folding_type, model_name=model_name, input_type=model_input_type) model_io = NeuralNetworkModelIO( full_model_name=full_model_name, target_dir=experiment.ExperimentIO.get_target_dir(), # From this depends on whether we have a specific dir or not. source_dir=None if model_name_tag is None else u"", model_name_tag=ModelNameTagArg.NO_TAG if model_name_tag is None else model_name_tag) # Setup model io. experiment_data.set_model_io(model_io) # Check dir existence in advance. model_dir = model_io.get_model_dir() if not exists(model_dir): print u"Skipping [path not exists]: {}".format(model_dir) return engine = ExperimentF1pnuEvaluator(experiment=experiment, data_type=DataType.Test, max_epochs_count=max_epochs_count, forced=force_eval) # Starting evaluation process. engine.run()
def check_stuck_render(): while self.executing: time.sleep(1.0) if self.executing and not self.date_finish_expire is None and self.date_finish_expire < datetime.datetime.now( ): Common.warning( 'Nuke finished but still running (hung?), finishing up.' ) self.exitcode_force = 0 self.kill() break # We are done
def setUp(self): """ :return: """ print(self.case_name + "測試開始前準備") self.startTime = time.time() self.log = Log.MyLog.get_log() self.logger = self.log.get_logger() currenttime = Common.get_time("%Y%m%d%H%M%S%f") self.timestamp = currenttime[0:14] self.duedate = Common.get_duedate(self.duedate)
def run(self): try: # 读取待转换的书籍信息 wait_converts = Tbl_Wait_Converts() wait_converts_info = wait_converts.get(self.convert_id) if not wait_converts_info: raise Exception, '未找到待转换的书籍信息' # 读取书籍所需图片 book_img = Tbl_Book_Img() book_img_info = book_img.get(self.book_id) if book_img_info: # 更新书籍所需图片表信息 book_img.update_local_path(self.book_id, self.book_images_task.get()) # 读取书籍信息 books = Tbl_Books() book_info = books.get_by_book_id(self.book_id) ## 调用转换功能 out_file_path = proc_helper.convert( str(wait_converts_info['book_html_local_path']), self.out_dir, book_info['book_author'], book_info['book_cover_local_path']) if out_file_path == None: # 转换失败 wait_converts.update_status(gk7.STATUS.get('error'), self.convert_id) raise Exception, '转换html to mobi失败' # 转换成功,修改状态,添加书籍输出路径 wait_converts.update_status(gk7.STATUS.get('complete'), self.convert_id, out_file_path) # 修改书籍文件路径 books.update_file_path(self.book_id, out_file_path) wait_email = Tbl_Wait_Emails() # 修改待发送邮件附件信息 wait_email.update_attach_file(self.email_id, out_file_path) # 读取待发送邮件信息 wait_email_info = wait_email.get(self.email_id) if not wait_email_info: raise Exception, '未找到待发送邮件信息,邮件ID:%s' % self.email_id # 发送邮件 Common.send_mail(self.send_mail_type, self.email_id, wait_email_info['email_attach_file'], str(wait_email_info['email_to_user']), str(wait_email_info['email_title']), str(wait_email_info['email_auth'])) except Exception, err: logger.error(u'异步线程出错,转换ID:%s,错误信息:%s', self.convert_id, err) exit(-1)
def checkCertificate(self): """ Method used to check if the host is already linked on master on not. return 'False' or 'True'. """ c = Common() try: checkCert = subprocess.Popen(['/usr/sbin/puppetca', '-la'],stdout=subprocess.PIPE) checkCertPIPE = checkCert.communicate()[0] clientCert = re.search('.*\+.*%s\.%s' % (c.client_name(),self.re_domain), checkCertPIPE) except Exception, e: print 'error :', e
def _build_fs(self, link): self.project.log("transaction", "Calculating total GMail items...", "info", True) response = Common.webrequest(link, self.oauth_provider.get_auth_header(), self.oauth_provider.http_intercept) json_response = json.loads(response) if 'nextPageToken' in json_response: threads = json_response['threads'] self._add_items_to_threads(threads) next_url = Common.joinurl(self.project.config['API_ENDPOINT'], "users/me/threads?userId=me&includeSpamTrash=true&pageToken={}".format(json_response['nextPageToken'])) self._build_fs(next_url) else: items = json_response['threads'] self._add_items_to_threads(items)
def _build_fs(self, link, cursor = None): self.project.log("transaction", "Calculating total dropbox items...", "info", True) if cursor: response = Common.webrequest(link, self.oauth_provider.get_auth_header(), self.oauth_provider.http_intercept, urllib.parse.urlencode({'cursor': cursor})) else: response = Common.webrequest(link, self.oauth_provider.get_auth_header(), self.oauth_provider.http_intercept, "") json_response = json.loads(response) has_more = json_response['has_more'] cursor = json_response['cursor'] for item in json_response['entries']: self.files.append(item[1]) if has_more: self._build_fs(link, cursor)
def parseEntries(contents): """ Return entries listed in given wikipage. :param contents: wikicode of page with lists :return: list of entry-dict items """ units = [] header_t = u'{{user:Lokal Profil/LSH2' row_t = u'{{User:Lokal Profil/LSH3' while(True): table, contents, lead_in = Common.findUnit(contents, header_t, u'|}') if not table: break while(True): unit, table, dummy = Common.findUnit(table, row_t, u'}}', brackets={u'{{': u'}}'}) if not unit: break params = {u'name': '', u'more': '', u'frequency': '', u'technique': '', u'creator': '', u'link': '', u'category': '', u'other': '' } while(True): part, unit, dummy = Common.findUnit( unit, u'|', u'\n', brackets={u'[[': u']]', u'{{': u'}}'}) if not part: break if u'=' in part: part = part.replace(u'<small>', '').replace(u'</small>', '') part = part.strip(' \n\t') # can't use split as coord uses second equality sign pos = part.find(u'=') key = part[:pos].strip() value = part[pos + 1:].strip() if value: if (key) in params.keys(): params[key] = value.split(u'/') else: print u'Unrecognised parameter: %s = %s' \ % (key, value) units.append(params.copy()) # end units # end tables return units
def sync(self): d1 = datetime.now() d = Downloader.Downloader(self.project, self.oauth_provider.http_intercept, self._save_file, self.oauth_provider.get_auth_header, self.project.threads) if self.project.args.mode == "full": self.project.log("transaction", "Full acquisition initiated", "info", True) else: self.project.log("transaction", "Metadata acquisition initiated", "info", True) self.initialize_items() cnt = len(self.files) self.project.log("transaction", "Total items queued for acquisition: " + str(cnt), "info", True) self.metadata() for file in self.files: self.project.log("transaction", "Calculating " + file['path'], "info", True) if file['is_dir'] == False: download_uri = lambda f=file: self._get_download_uri(f) metadata_download_uri = self.oauth_provider.config['API_ENDPOINT'] + '/metadata/auto' + file['path'] parentmap = self._get_parent_mapping(file) filetitle = self._get_file_name(file) orig = os.path.basename(file['path']) if filetitle != orig: self.project.log("exception", "Normalized '{}' to '{}'".format(orig, filetitle), "warning", True) if 'bytes' in file: self.file_size_bytes += int(file['bytes']) save_metadata_path = Common.assert_path(os.path.normpath(os.path.join(os.path.join(self.project.project_folders['metadata'], parentmap), filetitle + ".json")), self.project) if save_metadata_path: self.project.log("transaction", "Queueing {} for download...".format(orig), "info", True) d.put(Downloader.DownloadSlip(metadata_download_uri, file, save_metadata_path, 'path')) if self.project.args.mode == "full": save_download_path = Common.assert_path(os.path.normpath(os.path.join(os.path.join(self.project.project_folders['data'], parentmap), filetitle)), self.project) if save_download_path: self.project.log("transaction", "Queueing {} for download...".format(orig), "info", True) d.put(Downloader.DownloadSlip(download_uri, file, save_download_path, 'path')) self.project.log("transaction", "Total size of files to be acquired is {}".format(Common.sizeof_fmt(self.file_size_bytes, "B")), "highlight", True) if self.project.args.prompt: IO.get("Press ENTER to begin acquisition...") d.start() d.wait_for_complete() d2 = datetime.now() delt = d2 - d1 self.project.log("transaction", "Acquisition completed in {}".format(str(delt)), "highlight", True)
def op_intersection(left,right): ''' :param left: :type left: :class:`~ExprArg` :param right: :type right: :class:`~ExprArg` :returns: :class:`~ExprArg` Computes the set intersection (left & right) ''' assert isinstance(left, ExprArg) assert isinstance(right, ExprArg) if left.getInts() or right.getInts(): sys.exit("FIXME ints intersection") matches = getSetInstancePairs(left,right) newInstances = {} for (sort,index) in matches.keys(): key = (sort,index) ((lexpr,lpol),(rexpr,rpol)) = matches[(sort,index)] if rpol == Common.DEFINITELY_OFF or lpol == Common.DEFINITELY_OFF: continue else: new_expr = mAnd(lexpr,rexpr) newInstances[key] = (new_expr, Common.aggregate_polarity(lpol, rpol)) return ExprArg(newInstances)
def __init__(self): # common.Common.__init__(self) self.common = Common('title_screen.gif') self.level_to_load = 1 self.screen = Screen() # self.levels = Levels() # self.level = Level1() # pg.display.update() self.level_sprites = pg.sprite.Group() self.prince = Prince() # initial_prince_pos = (580, 85) # self.prince.rect.x = initial_prince_pos[0] # self.prince.rect.y = initial_prince_pos[1] self.level_sprites.add(self.prince) surface = pg.display.get_surface() # cursor.image, cursor.rect = self.common.get_image(x=0, y=0, width=640, height=480) # surface.blit(cursor.image, cursor.rect) image = self.common.get_image(x=0, y=0, width=800, height=600, scale_to=1.3) surface.blit(image, (0, 0)) # self.levels = Levels() font = self.common.get_font(); label = font.render("Press space to start!", 1, (255, 255, 255)) surface.blit(label, (280, 530)) # self.screen = pg.display.set_mode((800, 600)) pg.display.update() self.events()
def _save_raw_mail(self, data, slip): data = data.read().decode('utf-8') msg = json.loads(data) msg_data = msg["raw"] msg_data = base64.urlsafe_b64decode(msg_data).decode('utf-8') labels = msg["labelIds"] data_dir = self.project.project_folders["data"] for label in labels: mbox = mailbox.mbox(os.path.join(self.mbox_dir, label)) mbox_msg = email.message_from_bytes(msg_data.encode(), mailbox.mboxMessage) mbox.add(mbox_msg) label_path = os.path.join(data_dir, label) save_path = os.path.join(label_path, slip.savepath) save_path = Common.assert_path(save_path, self.project) if save_path: if not os.path.isdir(os.path.dirname(save_path)): os.makedirs(os.path.dirname(save_path), exist_ok=True) self.project.savedata(msg_data, save_path, False) self.project.log("transaction", "Saved file to " + save_path, "info", True) for part in mbox_msg.walk(): content_disposition =part.get("Content-Disposition", None) if content_disposition: data = part.get_payload(decode=True) att_name = part.get_filename() if att_name: att_dir = os.path.join(label_path, slip.savepath[:slip.savepath.index('.')]) att_path = os.path.join(att_dir, att_name) os.makedirs(att_dir, exist_ok=True) with open(att_path, 'wb') as f: f.write(data) self.project.log("transaction", "Saved attachment to " + save_path, "info", True) mbox.flush()
def __init__(self): pg.init() # pg.event.set_allowed([pg.KEYDOWN, pg.KEYUP, pg.QUIT]) pg.display.set_caption('Prince of Persia!') self.screen = pg.display.set_mode((800, 600)) self.level_to_load = 1 self.level_sprites = pg.sprite.Group() self.prince = Prince() # self.prince.rect.x = 580 # self.prince.rect.y = 85 self.level_sprites.add(self.prince) self.common = Common('title_screen.gif') image = self.common.get_image(x=0, y=0, width=800, height=600, scale_to=1.3) self.screen.blit(image, (0, 0)) font = self.common.get_font(); label = font.render("Press space to start!", 1, (255, 255, 255)) self.screen.blit(label, (280, 530)) pg.display.flip() self.splash_screen()
def getSoundStatus(self, username): ''' 用户获取声音状态 :param username: :return: ''' if not username: return 'fail', errors.NOT_BIND, '' r = Db.select('t_user', where="wx_name=$username", vars=locals(), limit=1) # 查看用户是否已绑定 if not r: # 用户还未绑定设备 return 'fail', errors.NOT_BIND, '' u = r[0] # 取出第一个用户为当前用户 r1 = Db.select('t_device', where="id=$u['device_id']", vars=locals(), limit=1) # 获取设备基本信息 if not r1: # 如果设备不存在,则为系统错误 return 'fail', errors.ERROR_SYSTEM, '' d = r1[0] # 取出第一个设备作为当前设备 s = d['sound'] # 返回的指令状态 t = 0 '''查看指令队列是否有未执行的指令''' r2 = Db.select('t_order_quene', what="code,time", where="device_id=$d['id'] and status=1", vars=locals(), order="time desc", limit=1) if r2: order = r2[0] # 取出最后的一个指令码 t = Common.secToLast(order['time']) if order['code'] == orders.OPEN_SOUND: s = 3 # 等待打开 elif order['code'] == orders.CLOSE_SOUND: s = 4 # 等待关闭 return 'success', s, t
def getDeviceLocationInfo(self, username): ''' 读取设备当前坐标信息 :param username: 微信用户提供的用户名 :return 'res[]/fail d': ''' if not username: return 'fail', errors.NOT_BIND r = Db.select('t_user', where="wx_name=$username", vars=locals(), limit=1) # 查看用户是否已绑定 if not r: # 用户还未绑定设备 return 'fail', errors.NOT_BIND u = r[0] # 取出第一个用户为当前用户 r1 = Db.select('t_device', where="id=$u['device_id']", vars=locals(), limit=1) # 获取设备基本信息 if not r1: # 如果设备不存在,则为系统错误 return 'fail', errors.ERROR_SYSTEM d = r1[0] # 取出第一个设备作为当前设备 res = dict() # 返回结果的字典 res['id'] = d['id'] # 设备ID r3 = Db.select('t_device_attribute', where="device_id=$d['id'] and gps!='-1,-1'", vars=locals(), order="time desc", limit=1) # 获取最后一个记录 if not r3: # 返回默认坐标:北京 res['lat'] = 0 # 经度 res['lon'] = 0 # 纬度 res['last'] = 0 # 最后一次上传时间 else: l = r3[0] gps = l['gps'] g = gps.split(',') # 解析坐标值 res['lat'] = g[0] # 经度 res['lon'] = g[1] # 纬度 res['last'] = Common.secToLast(l['time']) # 最后一次上传时间 return 'success', res
def _get_file_name(self, file): mime_type = file['mimeType'] title = file['title'] version = "" drivetype = "" ext = "" if self.is_duplicate(file): version = ' (' + file['version'] + ')' if ('application/vnd.google-apps' in mime_type) and (mime_type != "application/vnd.google-apps.folder"): if 'exportLinks' in file: export_link = self._get_download_url(file) ext = '.' + export_link[export_link.index('exportFormat=') + 13:] drivetype = mime_type[mime_type.rindex('.'):] if '.' in title: extension = title[title.rindex('.'):] base = title[:title.rindex('.')] filename = "{base}{extension}{drivetype}{ext}".format(base=base, extension=extension, drivetype=drivetype, ext=ext) else: filename = "{title}{drivetype}{ext}".format(title=title,drivetype=drivetype, ext=ext) if '.' in filename: extension = filename[filename.rindex('.'):] base = filename[:filename.rindex('.')] filename = "{base}{version}{extension}".format(base=base, version=version, extension=extension) else: filename = "{title}{version}".format(title=title, version=version) return Common.safe_file_name(filename)
def _get_download_uri(self, file): response = Common.webrequest(self.oauth_provider.config['API_ENDPOINT'] + '/media/auto' + file['path'], self.oauth_provider.get_auth_header(), self.oauth_provider.http_intercept) json_response = json.loads(response) if 'url' in json_response: return json_response['url'] else: return None
def get(cls): if not cls.system_handler: system_class = Config.get_value("services", "system_class") if not system_class: cls.system_handler = _System() else: cls.system_handler = Common.create_from_class_path(system_class) return cls.system_handler
def copy_files_upgrade(self): try: upgrade_modules = self.get_upgrade_modules_list() backup_all = "-backup" in sys.argv or "-rpm" in sys.argv ver_dict = Common.get_versions_dict() backup_dir = "/".join((Common.Paths.LOGMIND_PATH, "backup", "components")) if not os.path.exists(backup_dir): os.makedirs(backup_dir) else: shutil.rmtree(backup_dir) os.makedirs(backup_dir) for module in upgrade_modules: module = module.strip() print "Upgrading " + module + " from version '" + str(ver_dict[module]) + "' to version '" + str(Version.VERSION[module]) + "'" for d in Common.Paths.COMPONENTS_DIRS_DICT[module]: src = "/".join((os.path.dirname(sys.argv[0]), "logmind", d)) dst = "/".join((Common.Paths.LOGMIND_PATH, d)) if backup_all: print "Creating backup of", d #shutil.copytree(dst, "/".join((backup_dir,d)), ignore=shutil.ignore_patterns("log", "service")) if os.path.isdir(dst): for sub in os.listdir(dst): print "backing up:", sub if not sub in ["log", "logs", "service"]: sub_src = "/".join((dst, sub)) sub_dst = "/".join((backup_dir, d ,sub)) if not os.path.exists("/".join((backup_dir, d))): os.makedirs("/".join((backup_dir, d))) if os.path.isdir(sub_src): shutil.copytree(sub_src, sub_dst) else: shutil.copy(sub_src, sub_dst) else: shutil.copy(dst, "/".join((backup_dir,d))) print "Upgrading", d if os.path.isdir(dst): shutil.rmtree(dst) shutil.copytree(src, dst) else: os.remove(dst) shutil.copy(src, dst) # Updating global version file. ver_file = "/".join((os.path.dirname(sys.argv[0]), "logmind", "version")) shutil.copy(ver_file, Common.Paths.LOGMIND_PATH) return True except Exception, e: print "ERROR: ", e return False
def run(self): try: # 读取待转换的书籍信息 wait_converts = Tbl_Wait_Converts() wait_converts_info = wait_converts.get(self.convert_id) if not wait_converts_info: raise Exception, '未找到待转换的书籍信息' # 读取书籍所需图片 book_img = Tbl_Book_Img() book_img_info = book_img.get(self.book_id) if book_img_info: # 更新书籍所需图片表信息 book_img.update_local_path(self.book_id, self.book_images_task.get()) # 读取书籍信息 books = Tbl_Books() book_info = books.get_by_book_id(self.book_id) ## 调用转换功能 out_file_path = proc_helper.convert(str(wait_converts_info['book_html_local_path']), self.out_dir, book_info['book_author'], book_info['book_cover_local_path']) if out_file_path == None: # 转换失败 wait_converts.update_status(gk7.STATUS.get('error'), self.convert_id) raise Exception, '转换html to mobi失败' # 转换成功,修改状态,添加书籍输出路径 wait_converts.update_status(gk7.STATUS.get('complete'), self.convert_id, out_file_path) # 修改书籍文件路径 books.update_file_path(self.book_id, out_file_path) wait_email = Tbl_Wait_Emails() # 修改待发送邮件附件信息 wait_email.update_attach_file(self.email_id, out_file_path) # 读取待发送邮件信息 wait_email_info = wait_email.get(self.email_id) if not wait_email_info: raise Exception, '未找到待发送邮件信息,邮件ID:%s' %self.email_id # 发送邮件 Common.send_mail(self.send_mail_type, self.email_id, wait_email_info['email_attach_file'], str(wait_email_info['email_to_user']), str(wait_email_info['email_title']), str(wait_email_info['email_auth'])) except Exception, err: logger.error(u'异步线程出错,转换ID:%s,错误信息:%s', self.convert_id, err) exit(-1)