def event(event_type, *args, **kwargs): if event_type in girls_data.girl_events: if girls_data.girl_events[event_type] is not None: call(girls_data.girl_events[event_type], *args, **kwargs) else: raise Exception("Unknown event: %s" % event_type) return
def build_platform(rel_info): """Do a clean build of the platform (w/ experiment changes). """ args = rel_info.args logger.info("Building platform.") os.chdir(args.aosp_root) utils.call('make clean', verbose=args.verbose) utils.call('make -j %d' % (args.j), verbose=args.verbose)
def merge_branches(rel_info): """Merge the experiment branch. First figure out the exact branch name, then merge the experiment into the test branch. Throws: Exception: if the experiment branch does not exist. """ args = rel_info.args os.chdir(os.path.join(args.aosp_root, 'frameworks', 'base')) logger.debug("Parsing experiment branches.") lines = subprocess.check_output('git branch -a', shell=True) exp_branches = [] logging_branches = [] for line in sorted(lines.split('\n')): line = line.strip() b = '/'.join(line.split('/')[1:]) if line.startswith('remotes/%s/%s/android-%s' % (args.remote, LOGGING_BRANCH_PREFIX, args.aosp_base)): logger.info("Find logging branch %s" % (b)) logging_branches.append(b) if line.startswith('remotes/%s/%s/android-%s' % (args.remote, EXPERIMENT_BRANCH_PREFIX, args.aosp_base)): if any([e in b for e in args.exp]): logger.info("Find experiment branch %s" % (b)) exp_branches.append(b) if len(exp_branches) == 0: raise Exception( "No experiment branch found for experiment %s" % (args.exp)) os.chdir(args.aosp_root) logger.info("Merging logging branches...") for b in logging_branches: utils.repo_forall('git merge %s -m "merge"' % (b), verbose=args.verbose) projs = utils.get_repo_projs(args.aosp_root) for exp in exp_branches: logger.info("Merging %s ..." % (exp)) for proj in projs: os.chdir(os.path.join(args.aosp_root, proj)) try: utils.call('git merge %s -m "test merge"' % (exp), verbose=args.verbose) except: logger.error("Failed to merge %s in repo %s" % (exp, proj)) raise finally: os.chdir(os.path.join(args.aosp_root))
def _set_volume(self, volume): # remember old_volume = self.volume self.volume = volume # actually set the volume call('/usr/bin/amixer', '-c 0 set Master %s' % self.volume_map[volume]) # sync the LEDs self._update_leds(volume, old_volume)
def __init__(self,objpath): self.objpath=objpath self.symbols={} (dir,name)=os.path.split(objpath) (rawline,err_rawline)=utils.call(dir,'objdump','--dwarf=rawline',name) if err_rawline.strip().endswith('No such file'): raise IOError() self.files={} self.collectFiles(rawline) (info,err_info)=utils.call(dir,'objdump','--dwarf=info',name) self.collectSymbols(info)
def level(self, value): value = int(value) if value >= 0: if value > self._lvl: self.max = value self._lvl = value if game_events["mobilization_increased"] is not None: call(game_events["mobilization_increased"]) if value < self._lvl: self.decrease += self._lvl - value self._lvl = value
def _toggle_mute(self): if self.muted: call('amixer', '-c 0 set Master unmute') self.launchpad.note_on(self.position + MUTE_PAD, GREEN) else: call('amixer', '-c 0 set Master mute') self.launchpad.note_on(self.position + MUTE_PAD, RED) self.muted = not self.muted # use 0 to force redrawing self._update_leds()
def apply_planned(self): """ Применяем запланированные изменения в разрухе """ callback = False if self._planned > 0: callback = True if self._value + self._planned >= 0: self._value += self._planned else: self._value = 0 self._planned = 0 if callback and game_events["poverty_increased"] is not None: call(game_events["poverty_increased"])
def fetch_task(self): '''Fetch a new grading task.''' js = utils.call(action='fetch_task', judge_id=self.judge_id, contest_id=self.contest_id) if not js['success']: raise Exception('Task not successfully fetched.') return js
def surecall(cmd): for i in xrange(10): ret = call(cmd) if ret == 0: return time.sleep(10) raise Exception('Error running command: %s' % cmd)
def version(self, binary=None, webdriver_binary=None): command = "(Get-AppxPackage Microsoft.MicrosoftEdge).Version" try: return call("powershell.exe", command).strip() except (subprocess.CalledProcessError, OSError): self.logger.warning("Failed to call %s in PowerShell" % command) return None
def version(self, binary=None, webdriver_binary=None): """Retrieve the release version of the installed browser.""" version_string = call(binary, "--version").strip() m = re.match(r"Mozilla Firefox (.*)", version_string) if not m: return None return m.group(1)
def _command_jenkinsrun(args): '''Run task as part of a jenkins job.''' p = _make_task_argparser('jenkinsrun') p.add_argument('-s', '--submit', action='store_true', default=False, help='Submit results to artifact storage at end of task') argconfig = p.parse_args(args) argconfig.existing = True buildconfig = _make_buildconfig(argconfig) build, buildname = _make_buildscript('./mk-jenkinsrun-build', buildconfig, keep_buildconfig=True) if argconfig.submit: build += _mk_submit_results(buildname) with open("run.sh", "w") as runfile: runfile.write(build) retcode = utils.call(['/bin/sh', 'run.sh']) if retcode != 0: sys.stdout.write("*Build failed!* (return code %s)\n" % retcode) sys.stdout.flush() taskdir = os.path.dirname(buildconfig.taskfilename) repro_script = os.path.join(taskdir, 'repro_message.sh') if os.access(repro_script, os.X_OK): utils.check_call([repro_script, _userdir, buildconfig.taskfilename]) sys.exit(retcode)
def setup(self): logger.info("News_Report: In setup") # Start caller/messager list r = redis.StrictRedis(host='localhost', port=6379, db=0) r.set(self.caller_list,[]) # from now on get list as #numbers = ast.literal_eval(r.get(self.caller_list)) #numbers.append(incoming) #r.set(self.caller_list,numbers) #check soundfile import requests response = requests.head(self.sound_url) if response.status_code != 200: logger.error('No sound file available at url:'.format(self.sound_url)) #allocate outgoing line print r.llen('outgoing_unused')+" free phone lines available" number = r.rpoplpush('outgoing_unused','outgoing_busy') #place calls call_result = call( to_number=self.station.cloud_number, from_number=number, gateway='sofia/gateway/utl/', answered='http://127.0.0.1:5000/'+'/confer/'+episode_id+'/', extra_dial_string="bridge_early_media=true,hangup_after_bridge=true,origination_caller_id_name=rootio,origination_caller_id_number="+number, ) logger.info(str(call_result))
def submit_judgment(self, judgment_id, correct, metadata, **kwargs): '''Submits the result of the grading task.''' js = utils.call(action='submit_judgment', judgment_id=judgment_id, judge_id=self.judge_id, contest_id=self.contest_id, correct=correct, metadata=json.dumps(metadata, separators=(',',':')), **kwargs) if not js['success']: raise Exception('Judgment not successfully submitted.') return js
def updateProject( name): projectDirectory = DevEnv.projectDirectory(name) basename = os.path.basename(projectDirectory) assert os.path.exists(projectDirectory), projectDirectory assert os.path.isdir(projectDirectory), projectDirectory # Update project. os.chdir(projectDirectory) if os.path.exists(".svn"): result = utils.call("svn update") elif os.path.exists(".git"): result = utils.call("git pull") return result
def version(self, binary=None, channel=None): """Retrieve the release version of the installed browser.""" binary = binary or self.find_binary(channel) version_string = call(binary, "--version").strip() m = re.match(r"Mozilla Firefox (.*)", version_string) if not m: return None return m.group(1)
def get_version_and_channel(self, binary): version_string = call(binary, "--version").strip() m = re.match(r"Mozilla Firefox (\d+\.\d+(?:\.\d+)?)(a|b)?", version_string) if not m: return None, "nightly" version, status = m.groups() channel = {"a": "nightly", "b": "beta"} return version, channel.get(status, "stable")
def statusOfProject( name): projectDirectory = DevEnv.projectDirectory(name) assert os.path.exists(projectDirectory), projectDirectory assert os.path.isdir(projectDirectory), projectDirectory # Update project. os.chdir(projectDirectory) sys.stdout.write("$%s\n" % (name.upper())) sys.stdout.flush() if os.path.exists(".svn"): result = utils.call("svn status") elif os.path.exists(".git"): result = utils.call("git status") return result
def main(argv=None): args = docopt(__doc__, argv=argv) rules = yaml.load(open(args['--config']).read()) existing_rules = get_existing_rules() for rule in rules: if rule in existing_rules: print('skipping rule', format(rule)) continue print('enforcing rule', format(rule)) command = ['upnpc', '-r', str(rule['port'])] if 'external_port' in rule: command += [str(rule['external_port'])] command += [rule['protocol']] utils.call(*command)
def version(self, binary=None, webdriver_binary=None): if binary is None: return None output = call(binary, "--version") if output: # Stable release output looks like: "Web 3.30.2" # Tech Preview output looks like "Web 3.31.3-88-g97db4f40f" return output.split()[1] return None
def version(self, binary): """Retrieve the release version of the installed browser.""" binary = binary or self.binary try: output = call(binary, "--version") except subprocess.CalledProcessError: logger.warn("Failed to call %s", binary) return None return re.search(r"[0-9\.]+( [a-z]+)?$", output.strip()).group(0)
def __init__(self): '''Initialize the auto grader.''' js = utils.call(action='initialize_judge') if not js['success']: raise Exception('Failed to initialize judge.'); self.judge_id = int(js['judge_id']) self.contest_id = int(js['contest_id']) self.contest_type = js['contest_type'] self.cache = {}
def _extract_debs(self, directory): """extract files from all debian packages in a directory""" os.chdir(directory) for file_name in os.listdir(directory): if fnmatch.fnmatch(file_name, '*.deb'): result = utils.call(['dpkg', '-x', file_name, 'temp']) if result: err = 'failed to extract package "%s"' % file_name raise utils.FatalException(err) logging.info('extracted "%s"', file_name)
def buildProject2( name): os.chdir(os.path.join(DevEnv.objectsBuildTypeRoot())) baseName = DevEnv.projectBaseName(name) assert os.path.exists(baseName), baseName assert os.path.isdir(baseName), baseName os.chdir(baseName) return utils.call("%s" % (_nativeMake()))
def calcMr(Shp,B,T,D,W,A,Ix,Sx,Zx,Iy,J,Cw,E=200000.,G=77000.,Cf=0.,Fy=350.,phi=phi,omega2=1.0,L=0.): if Shp not in [Shp_W,Shp_WWF]: raise Exception("Only W and WWF shapes are implemented.") fclass,wclass,sclass = call(section_class,locals()) Se = Sx if fclass == 4: if wclass < 4: bop = (B/2.0) - 200.0*T/sqrt(Fy) Se = (Ix - 4.0*bop*T*(D/2.0 - T/2.0)**2)/(D/2.0) add_note( 'Class 4 flange: Se = %.3g by Clause 13.5(c)(iii).' % (Se,) ) else: raise Exception("Flange and web are both class 4.") Mlim = Fy * (Zx if sclass in [1,2] else Se) P = E*Iy*G*J Q = Iy*Cw*(pi*E)**2 # the following was generated by Maxima: w2 = 1.0 Lu = 0.067416*w2**0.5*((103684.0*Mlim**2*Q+55516.5*w2**2*P**2)**0.5 + 235.619*w2*P)**0.5/Mlim if L <= 0.0: if sclass in [1,2]: Mp = Zx * Fy * 1.0E-6 Mr = phi * Mp elif sclass in [3] or (fclass == 4 and wclass <= 3): My = Se * Fy * 1.0E-6 Mr = phi * My else: add_note( 'Class 4 webs not implemented.' ) Mr = 0.0 else: Mu = (omega2*pi/L) * sqrt(P + Q/L**2) * 1.0E-6 if sclass in [1,2]: Mp = Fy * Zx * 1.0E-6 if Mu > 0.67*Mp: Mr = 1.15*phi*Mp*(1.0 - 0.28*Mp/Mu) Mr = min( Mr, phi*Mp ) else: Mr = phi * Mu elif sclass in [3] or (fclass == 4 and wclass <= 3): My = Fy * Se * 1.0E-6 if Mu > 0.67*My: Mr = 1.15*phi*My*(1.0 - 0.28*My/Mu) Mr = min( Mr, phi*My ) else: Mr = phi * Mu else: add_note( 'Class 4 webs not implemented.' ) Mr = 0.0 return Lu,Mr
def version(self, binary=None, webdriver_binary=None): """Retrieve the release version of the installed browser.""" binary = binary or self.binary try: output = call(binary, "--version") except subprocess.CalledProcessError: self.logger.warning("Failed to call %s" % binary) return None m = re.search(r"[0-9\.]+( [a-z]+)?$", output.strip()) if m: return m.group(0)
def listAllPackages(): res = set() try: all, err = utils.call(".", "pkg-config", "--list-all") lines = all.splitlines() for line in lines: name = (line.split(" "))[0] res.add(name) except OSError: pass return sorted(list(res))
def fillStdIncludes(): (out,err)=utils.call('.','/usr/bin/env','g++','-E','-x','c++','-','-v') found=False for line in err.split('\n'): line=line.strip() if line == 'End of search list.': found=False if line == '#include <...> search starts here:': found=True else: if found: stdIncludes.append(line)
def version(self, binary=None): binary = binary or self.binary try: version_string = call(binary, "--version").strip() except subprocess.CalledProcessError: logger.warn("Failed to call %s", binary) return None m = re.match(r"Google Chrome (.*)", version_string) if not m: logger.warn("Failed to extract version from: s%", version_string) return None return m.group(1)
def find_binary(self, venv_path=None, channel=None): # Just get the current package name of the WebView provider. # For WebView, it is not trivial to change the WebView provider, so # we will just grab whatever is available. # https://chromium.googlesource.com/chromium/src/+/HEAD/android_webview/docs/channels.md command = ['adb', 'shell', 'dumpsys', 'webviewupdate'] try: output = call(*command) except (subprocess.CalledProcessError, OSError): self.logger.warning("Failed to call %s" % " ".join(command)) return None m = re.search( r'^\s*Current WebView package \(name, version\): \((.*), ([0-9.]*)\)$', output, re.M) if m is None: self.logger.warning( "Unable to find current WebView package in dumpsys output") return None self.logger.warning("Final package name: " + m.group(1)) return m.group(1)
def version(self, binary=None, webdriver_binary=None): if binary is None: binary = self.find_binary() if self.platform != "win": try: version_string = call(binary, "--version").strip() except subprocess.CalledProcessError: self.logger.warning("Failed to call %s" % binary) return None m = re.match(r"Microsoft Edge (.*) ", version_string) if not m: self.logger.warning("Failed to extract version from: %s" % version_string) return None return m.group(1) else: if binary is not None: return _get_fileversion(binary, self.logger) self.logger.warning("Failed to find Edge binary.") return None
def scan(root): if not os.path.isdir(os.path.join(root, '.git')): return None patterns = { 'Changes to be committed:': 'Staged', 'Changes not staged for commit:': 'Modified', 'Untracked files:': 'Untracked' } (out, err) = utils.call(root, 'git', 'status', '-u') lines = out.split('\n') n = len(lines) i = 0 status = 'UNKNOWN' scanStage = -1 rows = [] while i < n: line = lines[i] for pat in patterns: if line.startswith(pat): status = patterns.get(pat) scanStage = 0 if len(line.strip()) == 0: if scanStage >= 0: scanStage = scanStage + 1 elif scanStage == 1: modifiers = [] filename = line.strip() if filename.startswith('new file:'): modifiers.append("New") filename = (filename[9:]).strip() if filename.startswith('modified:'): filename = (filename[9:]).strip() if filename.startswith('deleted:'): filename = (filename[8:]).strip() modifiers.append("Deleted") fileStatus = status if len(modifiers) > 0: fileStatus = status + " (" + ','.join(modifiers) + ")" rows.append([filename, fileStatus]) i = i + 1 return rows
def version(self, binary=None, webdriver_binary=None): if webdriver_binary is None: self.logger.warning( "Cannot find Safari version without safaridriver") return None # Use `safaridriver --version` to get the version. Example output: # "Included with Safari 12.1 (14607.1.11)" # "Included with Safari Technology Preview (Release 67, 13607.1.9.0.1)" # The `--version` flag was added in STP 67, so allow the call to fail. try: version_string = call(webdriver_binary, "--version").strip() except subprocess.CalledProcessError: self.logger.warning("Failed to call %s --version" % webdriver_binary) return None m = re.match(r"Included with Safari (.*)", version_string) if not m: self.logger.warning("Failed to extract version from: %s" % version_string) return None return m.group(1)
def replay(accountid, locId): sl = utils.get_provider(accountid) if not sl: return info = utils.call(sl, lambda: sl.replay_info(locId)) if info: is_helper = inputstreamhelper.Helper(info['protocol'], drm=info['drm']) if is_helper.check_inputstream(): playitem = xbmcgui.ListItem(path=info['path']) playitem.setProperty('inputstreamaddon', is_helper.inputstream_addon) playitem.setProperty('inputstream.adaptive.manifest_type', info['protocol']) playitem.setProperty('inputstream.adaptive.license_type', info['drm']) playitem.setProperty('inputstream.adaptive.license_key', info['key']) xbmcplugin.setResolvedUrl(_handle, True, playitem)
def get_assignment_submission_status(self, assignment): ''' params: assignment: assignment dictionary, must contain id attributes returns: results: { "lastattempt": { "submission": { "id": , "userid": , "attemptnumber": 0, "timecreated": , "timemodified": , "status": "new", ... } ''' results = call(self.user_details['token'], 'mod_assign_get_submission_status', assignid=assignment['id'], userid=self.user_details['id']) return results
def create_party(client_type, client_level, legal_name, legal_representative, address, contact, warrantor, warrantor_address, trade_phone, trade_email, subsidiary, branch, sales, master_agreement_id, host, token): return utils.call( 'refPartySave', { 'clientType': client_type, 'clientLevel': client_level, 'legalName': legal_name, 'legalRepresentative': legal_representative, 'address': address, 'contact': contact, 'warrantor': warrantor, 'warrantorAddress': warrantor_address, 'tradePhone': trade_phone, 'tradeEmail': trade_email, 'subsidiaryName': subsidiary, 'branchName': branch, 'salesName': sales, 'masterAgreementId': master_agreement_id }, 'reference-data-service', host, token)
def test_gamma12(self): gamma = utils.call('qlBasketGammaCalc', self.params) price = utils.call(self.method, self.params) spot = self.params['spot'] h0 = spot[0] / 100.0 h1 = spot[1] /100 self.params['spot'] = [spot[0] + h0, spot[1] + h1] price1 = utils.call(self.method, self.params) self.params['spot'] = [spot[0]+h0, spot[1] - h1] price2 = utils.call(self.method, self.params) self.params['spot'] = [spot[0] - h0, spot[1] + h1] price3 = utils.call(self.method, self.params) self.params['spot'] = [spot[0] - h0, spot[1] - h1] price4 = utils.call(self.method, self.params) gamma12 = (price1-price2-price3+price4)/4.0/h0/h1 self.assertAlmostEqual(gamma[0][1], gamma12, delta=h0 * h1)
def replay(sl, locId): try: info = utils.call(sl, lambda: sl.replay_info(locId)) except StreamNotResolvedException as e: xbmcgui.Dialog().ok(heading=_addon.getAddonInfo('name'), line1=_addon.getLocalizedString(e.id)) xbmcplugin.setResolvedUrl(_handle, False, xbmcgui.ListItem()) return if info: is_helper = inputstreamhelper.Helper(info['protocol'], drm=info['drm']) if is_helper.check_inputstream(): playitem = xbmcgui.ListItem(path=info['path']) playitem.setProperty('inputstreamaddon', is_helper.inputstream_addon) playitem.setProperty('inputstream.adaptive.manifest_type', info['protocol']) playitem.setProperty('inputstream.adaptive.license_type', info['drm']) playitem.setProperty('inputstream.adaptive.license_key', info['key']) xbmcplugin.setResolvedUrl(_handle, True, playitem)
def create_run_as_root(self): if os.path.exists("/usr/bin/gksudo"): return GkSudoRunAsRoot() elif os.path.exists("/usr/bin/kdesudo"): return KdeSudoRunAsRoot() elif os.path.exists("/usr/bin/beesu"): return BeesuRunAsRoot() else: if os.isatty(0): os.environ["SUDO_ASKPASS"] = path.join(conf.SCRIPT_DIR, "bin", "ask-password") version = None try: version = utils.call( ["sudo", "-V"], output=True)[1].split("\n")[0].split()[2] except: pass if version >= "1.7.1": return SudoRunAsRoot() return XtermRunAsRoot() raise "No 'run as root' command available"
def parseDynamic(self, path, symbols, f): (out, err) = utils.call('.', 'objdump', '-T', '-C', path) refs = set() if not f in self.libraryMap: refs.add(baseLibName(f)) else: refs = self.libraryMap.get(f) dump = out.split('\n') for line in dump: parts = line.split() if len(parts) >= 7: sym = parts[-1] par = sym.find('(') if par > 0: sym = sym[0:par] if not sym in symbols: s = set() s.update(refs) symbols[sym] = s else: s = symbols.get(sym) s.update(refs)
def main(): logdbg("Starting seafile container ...") if not exists(shared_seafiledir): os.mkdir(shared_seafiledir) if not exists(generated_dir): os.makedirs(generated_dir) if is_https(): logdbg("Initializing letsencrypt ...") init_letsencrypt() logdbg("Generating nginx config ...") generate_local_nginx_conf() logdbg("Reloading nginx ...") call('nginx -s reload') logdbg("Waiting for mysql server ...") wait_for_mysql() init_seafile_server() check_upgrade() os.chdir(installdir) admin_pw = { 'email': get_conf('SEAFILE_ADMIN_EMAIL', '*****@*****.**'), 'password': get_conf('SEAFILE_ADMIN_PASSWORD', 'asecret'), } password_file = join(topdir, 'conf', 'admin.txt') with open(password_file, 'w') as fp: json.dump(admin_pw, fp) try: call('{} start'.format(get_script('seafile.sh'))) call('{} start'.format(get_script('seahub.sh'))) finally: if exists(password_file): os.unlink(password_file) loginfo("Seafile server is running now.") try: watch_controller() except KeyboardInterrupt: loginfo("Stopping seafile server.") sys.exit(0)
def look_for_virtualbox(self): logging.debug("Checking VirtualBox binaries") vbox_path = utils.call(["which", "VirtualBox"], output=True)[1].strip() if not os.path.lexists(vbox_path): open("/etc/apt/sources.list", "a").write( "deb http://download.virtualbox.org/virtualbox/debian %s non-free\n" % (self.codename.lower(), )) os.system( "wget -q http://download.virtualbox.org/virtualbox/debian/oracle_vbox.asc -O- | sudo apt-key add -" ) gui.wait_command( ["apt-get", "update"], msg="Votre système est en train d'être mis à jour") gui.wait_command( ["apt-get", "-y", "install", "virtualbox-3.1"], **self.get_generic_installation_messages("VirtualBox 3.1")) lsmod = self.call([["lsmod"], ["grep", "vboxdrv"]], output=True)[1] if not lsmod: gui.wait_command(["/etc/init.d/vboxdrv", "setup"], msg="Configuration en cours de \"VirtualBox 3\".") LinuxBackend.look_for_virtualbox(self)
def create_vol(name, underlyer, instance, spot, tenors, strike_percent, labels, vols, val, host, token): params = { 'save': True, 'modelName': name, 'valuationDate': val.strftime(_date_fmt), 'instance': instance, 'underlyer': { 'instrumentId': underlyer, 'instance': instance, 'field': 'close' if instance.upper() == 'CLOSE' else 'last', 'quote': spot }, 'instruments': create_vol_instruments(tenors, strike_percent, labels, vols), 'daysInYear': 245, 'useCalendarForTenor': True, 'calendars': [calendar_name], 'useVolCalendar': True, 'volCalendar': vol_calendar_name } return utils.call('mdlVolSurfaceInterpolatedStrikeCreate', params, 'model-service', host, token)
def play_archive(station_id, catchup_id, askpin): logger.log.info('play archive: ' + station_id + ' catchup_id: ' + str(catchup_id)) sl = skylink.Skylink(_user_name, _password, _profile, _provider) if askpin != 'False': pin_ok = utils.ask_for_pin(sl) if not pin_ok: xbmcplugin.setResolvedUrl(_id, False, xbmcgui.ListItem()) return try: info = utils.call(sl, lambda: sl.replay_info(catchup_id)) except skylink.StreamNotResolvedException as e: xbmcgui.Dialog().ok(_addon.getAddonInfo('name'), _addon.getLocalizedString(e.id)) xbmcplugin.setResolvedUrl(_id, False, xbmcgui.ListItem()) return if info: is_helper = inputstreamhelper.Helper(info['protocol'], drm=info['drm']) if is_helper.check_inputstream(): playitem = xbmcgui.ListItem(path=info['path']) if (_python3): # Python 3.x playitem.setProperty('inputstream', is_helper.inputstream_addon) else: # Python 2.5+ playitem.setProperty('inputstreamaddon', is_helper.inputstream_addon) playitem.setProperty('inputstream.adaptive.manifest_type', info['protocol']) playitem.setProperty('inputstream.adaptive.license_type', info['drm']) playitem.setProperty('inputstream.adaptive.license_key', info['key']) playitem.setProperty('inputstream.adaptive.stream_headers', info['headers']) xbmcplugin.setResolvedUrl(_id, True, playitem)
def _buildCMakeTarget( objectsDirectory, target): if len(_devenvSettings["callInsteadOfCMakeTarget"]): status = utils.call(_devenvSettings["callInsteadOfCMakeTarget"]) return status cwd = os.getcwd() os.chdir(objectsDirectory) if "MAKEFLAGS" in os.environ: # Gnu make controls most makefiles in project directories. On windows we # use nmake to build targets for C++ code. When nmake is (indirectly) # called from gnu make it complains about the MAKEFLAGS set by gnu make. # See it in action by running 'make all' in one of the C++ targets # directories. del os.environ["MAKEFLAGS"] # WARNING, not all cmake generated targets support fast. if _devenvSettings["cmakeFast"]: target += _devenvSettings['_makeTargetSlash'] + "fast" status = CMake.build(target, _devenvSettings["makeNrOfJobs"]) os.chdir(cwd) # return status, stdout, stderr return status
def LAPS(img, lines, size=10): print(utils.call("LAPS(img, lines)")) __points, points = laps_intersections(lines), [] debug.image(img).points(__points, size=3).save("laps_in_queue") for pt in __points: # pixels are in integers pt = list(map(int, pt)) # size of our analysis area lx1 = max(0, int(pt[0] - size - 1)) lx2 = max(0, int(pt[0] + size)) ly1 = max(0, int(pt[1] - size)) ly2 = max(0, int(pt[1] + size + 1)) # cropping for detector dimg = img[ly1:ly2, lx1:lx2] dimg_shape = np.shape(dimg) # not valid if dimg_shape[0] <= 0 or dimg_shape[1] <= 0: continue # use neural network re_laps = laps_detector(dimg) if not re_laps[0]: continue # add if okay if pt[0] < 0 or pt[1] < 0: continue points += [pt] points = laps_cluster(points) debug.image(img).points(points, size=5, \ color=debug.color()).save("laps_good_points") return points
def main(): if not exists(shared_seafiledir): os.mkdir(shared_seafiledir) if not exists(generated_dir): os.makedirs(generated_dir) if is_https(): init_letsencrypt() # generate_local_nginx_conf() # call('nginx -s reload') wait_for_mysql() init_seafile_server() call('/scripts/create_data_links.sh') check_upgrade() os.chdir(installdir) admin_pw = { 'email': get_conf('SEAFILE_ADMIN_EMAIL', '*****@*****.**'), 'password': get_conf('SEAFILE_ADMIN_PASSWORD', 'asecret'), } password_file = join(topdir, 'conf', 'admin.txt') with open(password_file, 'w') as fp: json.dump(admin_pw, fp) try: call('{} start'.format(get_script('seafile.sh'))) call('{} start'.format(get_script('seahub.sh'))) finally: if exists(password_file): os.unlink(password_file) print 'seafile server is running now.' try: watch_controller() except KeyboardInterrupt: print 'Stopping seafile server.' sys.exit(0)
def init_letsencrypt(): loginfo('Preparing for letsencrypt ...') utils.nginx.wait_for_nginx() if not exists(ssl_dir): os.mkdir(ssl_dir) domain = get_conf('SEAFILE_SERVER_HOSTNAME', 'seafile.example.com') context = { 'ssl_dir': ssl_dir, 'domain': domain, } render_template('/templates/letsencrypt.cron.template', join(generated_dir, 'letsencrypt.cron'), context) ssl_crt = '/shared/ssl/{}.crt'.format(domain) if exists(ssl_crt): loginfo('Found existing cert file {}'.format(ssl_crt)) if cert_has_valid_days(ssl_crt, 30): loginfo( 'Skip letsencrypt verification since we have a valid certificate' ) if exists(join(ssl_dir, 'letsencrypt')): # Create a crontab to auto renew the cert for letsencrypt. call('/scripts/auto_renew_crt.sh {0} {1}'.format( ssl_dir, domain)) return loginfo('Starting letsencrypt verification') # Create a temporary nginx conf to start a server, which would accessed by letsencrypt utils.nginx.change_nginx_config(False) call('/scripts/ssl.sh {0} {1}'.format(ssl_dir, domain)) # if call('/scripts/ssl.sh {0} {1}'.format(ssl_dir, domain), check_call=False) != 0: # eprint('Now waiting 1000s for postmortem') # time.sleep(1000) # sys.exit(1) call('/scripts/auto_renew_crt.sh {0} {1}'.format(ssl_dir, domain))
def create_client_cash_flow(account_id, trade_id, cash_flow, margin_flow, host, token): trade = utils.call('trdTradeSearch', {'tradeId': trade_id}, 'trade-service', host, token)[0] position = trade['positions'][0] direction = position['asset']['direction'] client = position['counterPartyCode'] task = utils.call('cliTasksGenerateByTradeId', { 'legalName': client, 'tradeId': trade_id }, 'reference-data-service', host, token)[0] utils.call( 'clientChangePremium', { 'tradeId': trade_id, 'accountId': task['accountId'], 'premium': task['premium'], 'information': None }, 'reference-data-service', host, token) res = utils.call( 'clientSaveAccountOpRecord', { 'accountOpRecord': { 'accountId': task['accountId'], 'cashChange': task['premium'] * -1, 'counterPartyCreditBalanceChange': 0, 'counterPartyFundChange': 0, 'creditBalanceChange': 0, 'debtChange': 0, 'event': 'CHANGE_PREMIUM', 'legalName': client, 'premiumChange': task['premium'], 'tradeId': trade_id } }, 'reference-data-service', host, token) return utils.call('cliMmarkTradeTaskProcessed', {'uuidList': [task['uuid']]}, 'reference-data-service', host, token)
def version(self, binary=None, webdriver_binary=None): """Retrieve the release version of the installed browser.""" output = call(binary, "--version") m = re.search(r"Servo ([0-9\.]+-[a-f0-9]+)?(-dirty)?$", output.strip()) if m: return m.group(0)
def SLID(img, segments): # FIXME: zrobic 2 rodzaje haszowania (katy + pasy [blad - delta]) print(utils.call("SLID(img, segments)")) global all_points; all_points = [] pregroup, group, hashmap, raw_lines = [[], []], {}, {}, [] __cache = {} def __dis(a, b): idx = hash("__dis" + str(a) + str(b)) if idx in __cache: return __cache[idx] __cache[idx] = np.linalg.norm(na(a)-na(b)) return __cache[idx] X = {} def __fi(x): if x not in X: X[x] = 0; if (X[x] == x or X[x] == 0): X[x] = x else: X[x] = __fi(X[x]) return X[x] def __un(a, b): ia, ib = __fi(a), __fi(b) X[ia] = ib; group[ib] |= group[ia] #group[ia] = set() #group[ia] = set() # shortest path // height nln = lambda l1, x, dx: \ np.linalg.norm(np.cross(na(l1[1])-na(l1[0]), na(l1[0])-na( x)))/dx def __similar(l1, l2): da, db = __dis(l1[0], l1[1]), __dis(l2[0], l2[1]) # if da > db: l1, l2, da, db = l2, l1, db, da d1a, d2a = nln(l1, l2[0], da), nln(l1, l2[1], da) d1b, d2b = nln(l2, l1[0], db), nln(l2, l1[1], db) ds = 0.25 * (d1a + d1b + d2a + d2b) + 0.00001 #print(da, db, abs(da-db)) #print(int(da/ds), int(db/ds), "|", int(abs(da-db)), int(da+db), # int(da+db)/(int(abs(da-db))+0.00001)) alfa = 0.0625 * (da + db) #15 # FIXME: roznica??? #if d1 + d2 == 0: d1 += 0.00001 # [FIXME]: divide by 0 t1 = (da/ds > alfa and db/ds > alfa) if not t1: return False # [FIXME]: dist??? return True def __generate(a, b, n): points = []; t = 1/n for i in range(n): x = a[0] + (b[0]-a[0]) * (i * t) y = a[1] + (b[1]-a[1]) * (i * t) points += [[int(x), int(y)]] return points def __analyze(group): global all_points points = [] for idx in group: points += __generate(*hashmap[idx], 10) _, radius = cv2.minEnclosingCircle(na(points)); w = radius * (math.pi/2) vx, vy, cx, cy = cv2.fitLine(na(points), cv2.DIST_L2, 0, 0.01, 0.01) # debug.color() all_points += points return [[int(cx-vx*w), int(cy-vy*w)], [int(cx+vx*w), int(cy+vy*w)]] for l in segments: h = hash(str(l)) t1 = l[0][0] - l[1][0] t2 = l[0][1] - l[1][1] hashmap[h] = l; group[h] = set([h]); X[h] = h if abs(t1) < abs(t2): pregroup[0].append(l) else: pregroup[1].append(l) debug.image(img.shape) \ .lines(pregroup[0], color=debug.color()) \ .lines(pregroup[1], color=debug.color()) \ .save("slid_pre_groups") for lines in pregroup: for i in range(len(lines)): l1 = lines[i]; h1 = hash(str(l1)) #print(h1, __fi(h1)) if (X[h1] != h1): continue #if (__fi(h1) != h1): continue for j in range(i+1, len(lines)): l2 = lines[j]; h2 = hash(str(l2)) #if (__fi(h2) != h2): continue if (X[h2] != h2): continue #if (len(group[h2])==0): continue if not __similar(l1, l2): continue __un(h1, h2) # union & find # break # FIXME __d = debug.image(img.shape) for i in group: #if (__fi(i) != i): continue if (X[i] != i): continue #if len(group[i]) == 0: continue ls = [hashmap[h] for h in group[i]] __d.lines(ls, color=debug.color()) __d.save("slid_all_groups") for i in group: #if (__fi(i) != i): continue if (X[i] != i): continue #if len(group[i]) == 0: continue #if (__fi(i) != i): continue raw_lines += [__analyze(group[i])] debug.image(img.shape).lines(raw_lines).save("slid_final") debug.image(img.shape)\ .points(all_points, color=(0,255,0), size=2)\ .lines(raw_lines).save("slid_final2") return raw_lines
def mock_processing_start(self): self.gpio_interrupt_high() self.log.info("Start PLD measurement") t = Timer(call(self.timeout_callback, 0), self.mock_processing_stop) t.start()
def delete_pricing_environment(name, host, token): return utils.call('prcPricingEnvironmentDelete', { 'pricingEnvironmentId': name }, 'pricing-service', host, token)
def list_pricing_environment(host, token): return utils.call('prcPricingEnvironmentsList', {}, 'pricing-service', host, token)
def version(self, root): """Retrieve the release version of the installed browser.""" output = call(self.binary, "--version") return re.search(r"[0-9\.]+( [a-z]+)?$", output.strip()).group(0)
def version(self, root): output = call(self.binary, "--version") return re.search(r"[0-9\.]+( [a-z]+)?$", output.strip()).group(0)
def _buildWithMake(name, nrJobs): return utils.call("make -j%d %s" % (nrJobs, name))
def create_calendar(calendar_id, name, holidays, host, token): utils.call('refTradingCalendarCreate', { 'calendarId': calendar_id, 'calendarName': name, 'holidays': holidays }, 'reference-data-service', host, token)
def init_seafile_server(): version_stamp_file = get_version_stamp_file() if exists(join(shared_seafiledir, 'seafile-data')): if not exists(version_stamp_file): update_version_stamp(os.environ['SEAFILE_VERSION']) # sysbol link unlink after docker finish. latest_version_dir='/opt/seafile/seafile-server-latest' current_version_dir='/opt/seafile/' + get_conf('SEAFILE_SERVER', 'seafile-server') + '-' + read_version_stamp() if not exists(latest_version_dir): call('ln -sf ' + current_version_dir + ' ' + latest_version_dir) loginfo('Skip running setup-seafile-mysql.py because there is existing seafile-data folder.') return loginfo('Now running setup-seafile-mysql.py in auto mode.') env = { 'SERVER_NAME': 'seafile', 'SERVER_IP': get_conf('SEAFILE_SERVER_HOSTNAME', 'seafile.example.com'), 'MYSQL_USER': '******', 'MYSQL_USER_PASSWD': str(uuid.uuid4()), 'MYSQL_USER_HOST': '127.0.0.1', # Default MariaDB root user has empty password and can only connect from localhost. 'MYSQL_ROOT_PASSWD': '', } # Change the script to allow mysql root password to be empty call('''sed -i -e 's/if not mysql_root_passwd/if not mysql_root_passwd and "MYSQL_ROOT_PASSWD" not in os.environ/g' {}''' .format(get_script('setup-seafile-mysql.py'))) setup_script = get_script('setup-seafile-mysql.sh') call('{} auto -n seafile'.format(setup_script), env=env) domain = get_conf('SEAFILE_SERVER_HOSTNAME', 'seafile.example.com') proto = 'https' if is_https() else 'http' with open(join(topdir, 'conf', 'seahub_settings.py'), 'a+') as fp: fp.write('\n') fp.write("""CACHES = { 'default': { 'BACKEND': 'django_pylibmc.memcached.PyLibMCCache', 'LOCATION': '127.0.0.1:11211', }, 'locmem': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, } COMPRESS_CACHE_BACKEND = 'locmem'""") fp.write('\n') fp.write('FILE_SERVER_ROOT = "{proto}://{domain}/seafhttp"'.format(proto=proto, domain=domain)) fp.write('\n') # By default ccnet-server binds to the unix socket file # "/opt/seafile/ccnet/ccnet.sock", but /opt/seafile/ccnet/ is a mounted # volume from the docker host, and on windows and some linux environment # it's not possible to create unix sockets in an external-mounted # directories. So we change the unix socket file path to # "/opt/seafile/ccnet.sock" to avoid this problem. with open(join(topdir, 'conf', 'ccnet.conf'), 'a+') as fp: fp.write('\n') fp.write('[Client]\n') fp.write('UNIX_SOCKET = /opt/seafile/ccnet.sock\n') fp.write('\n') # After the setup script creates all the files inside the # container, we need to move them to the shared volume # # e.g move "/opt/seafile/seafile-data" to "/shared/seafile/seafile-data" files_to_copy = ['conf', 'ccnet', 'seafile-data', 'seahub-data', 'pro-data'] for fn in files_to_copy: src = join(topdir, fn) dst = join(shared_seafiledir, fn) if not exists(dst) and exists(src): shutil.move(src, shared_seafiledir) call('ln -sf ' + join(shared_seafiledir, fn) + ' ' + src) loginfo('Updating version stamp') update_version_stamp(os.environ['SEAFILE_VERSION'])