def display_msg(msg, status=None, color=None): Configure.g_maxlen = max(Configure.g_maxlen, len(msg)) if status: print "%s :" % msg.ljust(Configure.g_maxlen), Params.pprint(color, status) else: print "%s" % msg.ljust(Configure.g_maxlen)
def __init__(self, path=None): """ The path is an `wget -r` path. Meaning it has the parts: host/path?query. The cache implementation will determine the final local path name. """ super(File, self).__init__() os.chdir(Params.ROOT) if path: rpath = self.apply_rules(path) self.init(rpath) # symlink to rewritten path # if path != rpath and not os.path.exists(path): # Params.log("Symlink: %s -> %s" %(path, rpath)) # os.makedirs(os.path.dirname(path)) # os.symlink(rpath, path) # check if target is symlink, must exist if os.path.islink(rpath): target = os.readlink(rpath) if not os.path.exists(target): Params.log("Warning: broken symlink, replacing: %s" % target) os.unlink(rpath) # check if target is partial, rename i = 1 if os.path.exists(rpath + Params.PARTIAL): while os.path.exists("%s.%s%s" % (rpath, i, Params.PARTIAL)): i += 1 shutil.copyfile(rpath + Params.PARTIAL, "%s.%s%s" % (rpath, i, Params.PARTIAL)) Params.log("Warning: backed up duplicate incomplete %s" % i) # XXX: todo: keep largest partial only assert len(self.path) < 255, "LBYL, cache location path to long for Cache.File! "
def init(self, path): Params.log("FileTreeQH.init %r" % path, 5) # encode query if present sep = path.find( '?' ) # other encoding in query/fragment part if sep != -1: if '&' in path[sep:]: qsep='&' parts = path[sep:].split('&') elif ';' in path[sep:]: qsep=';' parts = path[sep:].split(';') else: qsep='' parts = [path[sep:]] parts.sort() path = path[ :sep ] + os.sep + '#' + md5(qsep.join(parts)).hexdigest() # optional removal of directories in entire path psep = Params.ENCODE_PATHSEP if psep: path = path.replace( '/', psep) # make archive path if Params.ARCHIVE: path = time.strftime( Params.ARCHIVE, time.gmtime() ) + path self.path = os.path.join(Params.ROOT, path) self.file = None
def densitySimulation(number=500, rRatio=10**4, p=1, q=5 / 8, mRatio=1, qStar=.3, m=1, eta=0.1): """ :param number: Number of cells to use. More means better resolution but slower run. Note cell size is on logarithmic scale. :param rRatio: Radius of the disk divided by the radius of the star :param p: Power law index of density. Between 1/2 and 2. Probably better modern estimates available :param q: Power law index of temperature/sound speed. Between 1/2 and 3/4 :param mRatio: Mass of the disk divided by the mass of the star :param qStar: Toomre Q at the radius of the star :param m: Disk wave number, or mode :param eta: Gravity softening parameter :return: """ initParams = Params.InitialParameters(number, rRatio, p, q, mRatio, qStar, m, eta) derivedConstants = Params.DerivedConstants(initParams) analyticFunctions = Functions.AnalyticalFunctions(initParams, derivedConstants) discreteFunctions = Functions.DiscreteFunctions(analyticFunctions) discreteFunctions.init() wMatrix = LinAl.WMatrix(discreteFunctions) wMatrix.init() eigenSolver = LinAl.EigenvalueSolver(wMatrix) eigenSolver.initEigen() print(eigenSolver.eigenvalues)
def load_data(): x = np.arange(100,Params.T+1,100) nvals = Params.lock_dl_n kvals = range(2,11,1) output = np.matrix(np.zeros((len(kvals), len(nvals)))) Params.reset_params() for arg_list in Params.SensitivityParameters['Lock-v0']['decoding']: P = Params.Params(arg_list) collated = None for i in range(1,MAX_ITERS+1): P.iteration = i fname = P.get_output_file_name() try: f = open(fname) except Exception: continue tmp = np.loadtxt(f,delimiter=',',dtype=float) if collated is None: collated = np.matrix(tmp) else: collated = np.vstack((collated,tmp)) if collated is None: continue normalized = collated/x val = np.percentile(normalized,50,axis=0)[-1] output[kvals.index(P.num_cluster),nvals.index(P.n)] = val return(output)
def get_backend(main=True): global backend if main: if not backend: backend = Params.descriptor_storage_type(Params.RESOURCES) return backend return Params.descriptor_storage_type(Params.RESOURCES, 'r')
def display_msg(msg, status = None, color = None): Configure.g_maxlen = max(Configure.g_maxlen, len(msg)) if status: print "%s :" % msg.ljust(Configure.g_maxlen), Params.pprint(color, status) else: print "%s" % msg.ljust(Configure.g_maxlen)
def __init__(self, path=None): """ The path is an `wget -r` path. Meaning it has the parts: host/path?query. The cache implementation will determine the final local path name. """ super( File, self ).__init__() os.chdir(Params.ROOT) if path: rpath = self.apply_rules(path) self.init(rpath) # symlink to rewritten path #if path != rpath and not os.path.exists(path): # Params.log("Symlink: %s -> %s" %(path, rpath)) # os.makedirs(os.path.dirname(path)) # os.symlink(rpath, path) # check if target is symlink, must exist if os.path.islink(rpath): target = os.readlink(rpath) if not os.path.exists(target): Params.log("Warning: broken symlink, replacing: %s" % target) os.unlink(rpath) # check if target is partial, rename i = 1 if os.path.exists(rpath + Params.PARTIAL): while os.path.exists('%s.%s%s' % (rpath, i, Params.PARTIAL)): i+=1 shutil.copyfile(rpath+Params.PARTIAL, '%s.%s%s' %(rpath,i,Params.PARTIAL)) Params.log("Warning: backed up duplicate incomplete %s" % i) # XXX: todo: keep largest partial only assert len(self.path) < 255, \ "LBYL, cache location path to long for Cache.File! "
def send( self, sock ): assert not self.Done if self.__sendbuf: bytecnt = sock.send( self.__sendbuf ) self.__sendbuf = self.__sendbuf[ bytecnt: ] else: bytecnt = Params.MAXCHUNK if 0 <= self.__end < self.__pos + bytecnt: bytecnt = self.__end - self.__pos chunk = self.__protocol.read( self.__pos, bytecnt ) if self.__protocol.rewrite: delta, chunk = Rules.Rewrite.run(chunk) self.__protocol.size += delta try: self.__pos += sock.send( chunk ) except: Params.log("Error writing to client, aborted!") self.Done = True # Unittest 2: keep partial file #if not self.__protocol.cache.full(): # self.__protocol.cache.remove_partial() return self.Done = not self.__sendbuf and ( self.__pos >= self.__protocol.size >= 0 or self.__pos >= self.__end >= 0 )
def __init__(self, path): Params.log("RefHash.__init__ %r" % path, 5) super(RefHash, self).__init__(path) self.refhash = md5(path).hexdigest() self.path = Params.ROOT + self.refhash self.file = None if not os.path.exists(Params.ROOT + Params.PARTIAL): os.mkdir(Params.ROOT + Params.PARTIAL)
def display_msg(msg, status = None, color = None): sr = msg global g_maxlen g_maxlen = max(g_maxlen, len(msg)) if status: print "%s :" % msg.ljust(g_maxlen), Params.pprint(color, status) else: print "%s" % msg.ljust(g_maxlen)
def connect( addr ): assert Params.ONLINE, 'operating in off-line mode' if addr not in DNSCache: Params.log('Requesting address info for %s:%i' % addr, 2) try: DNSCache[ addr ] = socket.getaddrinfo( addr[ 0 ], addr[ 1 ], Params.FAMILY, socket.SOCK_STREAM ) except Exception, e: raise DNSLookupException(addr, e)
def prepare_nocache_response(self): "Blindly respond for NoCache rule matches. " for pattern, compiled in Params.NOCACHE: p = self.requri.find(':') # split scheme if compiled.match(self.requri[p + 3:]): Params.log('Not caching request, matches pattern: %r.' % pattern) self.Response = Response.BlindResponse return True
def rewrite(klass, pathref): if Params.JOIN: for pattern, regex, repl in Params.JOIN: m = regex.match(pathref) if m: capture = True pathref = regex.sub(repl, pathref) Params.log("Joined URL matching rule %r" % pattern, threshold=1) return pathref
def open_partial(self, offset=-1): self.path = Params.ROOT + Params.PARTIAL + os.sep + self.refhash self.mtime = os.stat( self.path ).st_mtime self.file = open( self.path, 'a+' ) if offset >= 0: assert offset <= self.tell(), 'range does not match file in cache' self.file.seek( offset ) self.file.truncate() Params.log('Resuming partial file in cache at byte %i' % self.tell(), 2)
def close( self ): self.path = Params.ROOT + Params.PARTIAL + os.sep + self.refhash size = self.tell() self.file.close() if self.mtime >= 0: os.utime( self.path, ( self.mtime, self.mtime ) ) if self.size == size: os.rename( self.path, Params.ROOT + self.refhash ) Params.log('Finalized %s' % self.path, 2)
def display_msg(msg, status=None, color=None): sr = msg global g_maxlen g_maxlen = max(g_maxlen, len(msg)) if status: print "%s :" % msg.ljust(g_maxlen), Params.pprint(color, status) else: print "%s" % msg.ljust(g_maxlen)
def prepare_nocache_response(self): "Blindly respond for NoCache rule matches. " for pattern, compiled in Params.NOCACHE: p = self.requri.find(':') # split scheme if compiled.match(self.requri[p+3:]): Params.log('Not caching request, matches pattern: %r.' % pattern) self.Response = Response.BlindResponse return True
def __handle_size( self, code, line ): if code == 550: self.Response = Response.NotFoundResponse return assert code == 213,\ 'server sends %i; expected 213 (file status)' % code self.size = int( line ) Params.log('File size: %s' % self.size) self.__sendbuf = 'MDTM %s\r\n' % self.__path self.__handle = FtpProtocol.__handle_mtime
def init(self, path): Params.log("PartialMD5Tree.init %r" % path, 5) if Params.ARCHIVE: path = time.strftime( Params.ARCHIVE, time.gmtime() ) + path path = os.path.join(Params.ROOT, path) s = Params.MAX_PATH_LENGTH - 34 if len(path) > Params.MAX_PATH_LENGTH: path = path[:s] + os.sep + '#' + md5(path[s:]).hexdigest() self.path = path
def connect(addr): assert Params.ONLINE, 'operating in off-line mode' if addr not in DNSCache: Params.log('Requesting address info for %s:%i' % addr, 2) try: DNSCache[addr] = socket.getaddrinfo(addr[0], addr[1], Params.FAMILY, socket.SOCK_STREAM) except Exception, e: raise DNSLookupException(addr, e)
def __handle_size(self, code, line): if code == 550: self.Response = Response.NotFoundResponse return assert code == 213,\ 'server sends %i; expected 213 (file status)' % code self.size = int(line) Params.log('File size: %s' % self.size) self.__sendbuf = 'MDTM %s\r\n' % self.__path self.__handle = FtpProtocol.__handle_mtime
def run(klass, chunk): delta = 0 Params.log("Trying to rewrite chunk. ", 3) for regex, repl in Params.REWRITE: if regex.search(chunk): new_chunk, count = regex.subn(repl, chunk) delta += len(new_chunk)-len(chunk) chunk = new_chunk else: Params.log("No match on chunk", 4) return delta, chunk
def step( self, throw=None ): stdout = sys.stdout stderr = sys.stderr try: sys.stdout = sys.stderr = self Fiber.step( self, throw ) if self.state: Params.log('Waiting at %s'% self, 1) finally: sys.stdout = stdout sys.stderr = stderr
def run(klass, chunk): delta = 0 Params.log("Trying to rewrite chunk. ", 3) for regex, repl in Params.REWRITE: if regex.search(chunk): new_chunk, count = regex.subn(repl, chunk) delta += len(new_chunk) - len(chunk) chunk = new_chunk else: Params.log("No match on chunk", 4) return delta, chunk
def recv(self, sock): assert not self.hasdata() chunk = sock.recv(Params.MAXCHUNK) assert chunk, 'server closed connection prematurely' self.__recvbuf += chunk while '\n' in self.__recvbuf: reply, self.__recvbuf = self.__recvbuf.split('\n', 1) Params.log('S: %s' % reply.rstrip(), 2) if reply[:3].isdigit() and reply[3] != '-': self.__handle(self, int(reply[:3]), reply[4:]) Params.log('C: %s' % self.__sendbuf.rstrip(), 2)
def recv( self, sock ): assert not self.hasdata() chunk = sock.recv( Params.MAXCHUNK ) assert chunk, 'server closed connection prematurely' self.__recvbuf += chunk while '\n' in self.__recvbuf: reply, self.__recvbuf = self.__recvbuf.split( '\n', 1 ) Params.log('S: %s' % reply.rstrip(), 2) if reply[ :3 ].isdigit() and reply[ 3 ] != '-': self.__handle( self, int( reply[ :3 ] ), reply[ 4: ] ) Params.log('C: %s' % self.__sendbuf.rstrip(), 2)
def Start(): print("Starting...") #create Params instance p = Params() #set random seed random.seed(30) # call Widgets to set Params using GUI p.setParams()
def get_cache(hostinfo, req_path): """ req_path is a URL path ref including query-part, the backend will determine real cache location """ # Prepare default cache location cache_location = '%s:%i/%s' % (hostinfo + (req_path, )) cache_location = cache_location.replace(':80', '') cache = Cache.load_backend_type(Params.CACHE)(cache_location) Params.log("Init cache: %s %s" % (Params.CACHE, cache), 3) Params.log('Prepped cache, position: %s' % cache.path, 2) # XXX: use unrewritten path as descriptor key, need unique descriptor per resource cache.descriptor_key = cache_location return cache
def get_cache(hostinfo, req_path): """ req_path is a URL path ref including query-part, the backend will determine real cache location """ # Prepare default cache location cache_location = '%s:%i/%s' % (hostinfo + (req_path,)) cache_location = cache_location.replace(':80', '') cache = Cache.load_backend_type(Params.CACHE)(cache_location) Params.log("Init cache: %s %s" % (Params.CACHE, cache), 3) Params.log('Prepped cache, position: %s' % cache.path, 2) # XXX: use unrewritten path as descriptor key, need unique descriptor per resource cache.descriptor_key = cache_location return cache
def control_proxy(self, status, protocol, request): head, body = request.recvbuf().split( '\r\n\r\n', 1 ) req = { 'args': request.headers } if body: try: req = Params.json_read(body) except: #print "JSON: ",request.recvbuf() raise # TODO: echos only self.prepare_response(status, Params.json_write(req), mime="application/json")
def init(self, path): Params.log("FileTree.init %r" % path, 5) path2 = path if Params.ARCHIVE: path2 = time.strftime( Params.ARCHIVE, time.gmtime() ) + path2 path2 = os.path.join(Params.ROOT, path2) if len(path2) >= Params.MAX_PATH_LENGTH: sep = Cache.min_pos(path2.find('#'), path2.find( '?' )) if sep != -1: if (len(path2[:sep])+34) < Params.MAX_PATH_LENGTH: FileTreeQH.init(self, path) else: PartialMD5Tree.init(self, path) else: FileTreeQ.init(self, path)
def __init__( self, request ): super(FtpProtocol, self).__init__( request ) if Params.STATIC and self.cache.full(): self.__socket = None Params.log("Static FTP cache : %s" % self.requri) self.cache.open_full() self.Response = Response.DataResponse return self.__socket = connect(request.hostinfo) self.__path = request.Resource.ref.path self.__path_old = request.envelope[1] # XXX self.__sendbuf = '' self.__recvbuf = '' self.__handle = FtpProtocol.__handle_serviceready
def set(self, path, srcrefs, headers): assert path and srcrefs and headers, \ (path, srcrefs, headers) assert isinstance(path, basestring) and \ isinstance(srcrefs, list) and \ isinstance(headers, dict) mt = headers.get('Content-Type', None) cs = None if mt: p = mt.find(';') if p > -1: match = re.search("charset=([^;]+)", mt[p:].lower()) mt = mt[:p].strip() if match: cs = match.group(1).strip() ln = headers.get('Content-Language', []) if ln: ln = ln.split(',') srcref = headers.get('Content-Location', None) #if srcref and srcref not in srcrefs: # srcrefs += [srcref] features = {} metadata = {} for hd in ('Content-Type', 'Content-Language', 'Content-MD5', 'Content-Location', 'Content-Length', 'Content-Encoding', 'ETag', 'Last-Modified', 'Date', 'Vary', 'TCN', 'Cache', 'Expires'): if hd in headers: metadata[hd] = headers[hd] self.__be[path] = Params.json_write( (srcrefs, mt, cs, ln, metadata, features)) self.__be.sync()
def __init__(self, request): super(FtpProtocol, self).__init__(request) if Params.STATIC and self.cache.full(): self.__socket = None Params.log("Static FTP cache : %s" % self.requri) self.cache.open_full() self.Response = Response.DataResponse return self.__socket = connect(request.hostinfo) self.__path = request.Resource.ref.path self.__path_old = request.envelope[1] # XXX self.__sendbuf = '' self.__recvbuf = '' self.__handle = FtpProtocol.__handle_serviceready
def paramObjects(fixed_params,vP_): p = params.Params() p.initializer(vP_,n = fixed_params[0]['n'], Dx = fixed_params[0]['$D_{x}$'], mux1 = fixed_params[0]['$\mu_{x_{1}}$'],\ mux2 = fixed_params[0]['$\mu_{x_{2}}$'], sx1 = fixed_params[0]['$\sigma_{x_{1}}$'], sx2 = fixed_params[0]['$\sigma_{x_{2}}$'],\ dmux = vP_.delta_mu_x) p.packaging(vP_) return p
def main(args): random.seed(args.seed+args.iteration*29) np.random.seed(args.seed+args.iteration*29) import torch torch.manual_seed(args.seed+args.iteration*37) env = get_env(args.env, args) alg = get_alg(args.alg, args, env) P = Params.Params(vars(args)) fname = P.get_output_file_name() if os.path.isfile(fname): print("[EXPERIMENT] Already completed") return None reward_vec = train(env,alg,args) print("[EXPERIMENT] Learning completed") f = open(fname,'w') f.write(",".join([str(z) for z in reward_vec])) f.write("\n") f.close() print("[EXPERIMENT] Done") return None
def set(self, path, srcrefs, headers): assert path and srcrefs and headers, \ (path, srcrefs, headers) assert isinstance(path, basestring) and \ isinstance(srcrefs, list) and \ isinstance(headers, dict) mt = headers.get('Content-Type', None) cs = None if mt: p = mt.find(';') if p > -1: match = re.search("charset=([^;]+)", mt[p:].lower()) mt = mt[:p].strip() if match: cs = match.group(1).strip() ln = headers.get('Content-Language',[]) if ln: ln = ln.split(',') srcref = headers.get('Content-Location', None) #if srcref and srcref not in srcrefs: # srcrefs += [srcref] features = {} metadata = {} for hd in ('Content-Type', 'Content-Language', 'Content-MD5', 'Content-Location', 'Content-Length', 'Content-Encoding', 'ETag', 'Last-Modified', 'Date', 'Vary', 'TCN', 'Cache', 'Expires'): if hd in headers: metadata[hd] = headers[hd] self.__be[path] = Params.json_write((srcrefs, mt, cs, ln, metadata, features)) self.__be.sync()
def __init__(self): WebSocketServer.__init__(self,clientClass = Player) self.teams = {'tizef':[],'tidu':[],'admin':[]} self.params = Params() self.threadCheckBattle = threading.Thread(target=self.checkBattle) self.threadCheckBattle.daemon = True self.threadCheckBattle.start()
def GetNuFluxes(E_th,Nuc): # Reads each neutrino flux data file # the energies are stored in E_nu_all, fluxes in Flux_all # Figure out which backgrounds give recoils above E_th E_r_max = MaxNuRecoilEnergies(Nuc) # Max recoil energy for neutrino sel = range(1,n_nu_tot+1)*(E_r_max>E_th) sel = sel[sel!=0]-1 n_nu = count_nonzero(E_r_max>E_th) E_nu_all = zeros(shape=(n_Enu_vals,n_nu)) Flux_all = zeros(shape=(n_Enu_vals,n_nu)) Flux_err = zeros(shape=(n_nu)) Flux_norm = zeros(shape=(n_nu)) Solar = zeros(n_nu,dtype=bool) ii = 0 for s in sel: if mono[s]: E_nu_all[0,ii] = NuMaxEnergy[s] Flux_all[0,ii] = NuFlux[s] else: data = loadtxt(nufile_dir+nuname[s]+nufile_root,delimiter=',') E_nu_all[:,ii],Flux_all[:,ii] = data[:,0],data[:,1] Flux_all[:,ii] = Flux_all[:,ii]*NuFlux[s] Flux_norm[ii] = NuFlux[s] Flux_err[ii] = NuUnc[s] # Select rate normalisation uncertainties Solar[ii] = whichsolar[s] ii = ii+1 NuBG = Params.Neutrinos(n_nu,Solar,E_nu_all,Flux_all,Flux_norm,Flux_err) return NuBG
def __parse_head(self, chunk): eol = chunk.find('\n') + 1 if eol == 0: return 0 line = chunk[:eol] Params.log('Server responds ' + line.rstrip(), threshold=1) fields = line.split() assert (2 <= len( fields )) \ and fields[ 0 ].startswith( 'HTTP/' ) \ and fields[ 1 ].isdigit(), 'invalid header line: %r' % line self.__status = int(fields[1]) self.__message = ' '.join(fields[2:]) self.__args = {} self.__parse = HttpProtocol.__parse_args return eol
def __parse_head( self, chunk ): eol = chunk.find( '\n' ) + 1 if eol == 0: return 0 line = chunk[ :eol ] Params.log('Server responds '+ line.rstrip(), threshold=1) fields = line.split() assert (2 <= len( fields )) \ and fields[ 0 ].startswith( 'HTTP/' ) \ and fields[ 1 ].isdigit(), 'invalid header line: %r' % line self.__status = int( fields[ 1 ] ) self.__message = ' '.join( fields[ 2: ] ) self.__args = {} self.__parse = HttpProtocol.__parse_args return eol
def testAll(self): metrics_list = {} for design in designs: json_file = os.path.join( os.path.dirname(os.path.abspath(__file__)), "%s.json" % (design)) params = Params.Params() params.load(json_file) # control numpy multithreading os.environ["OMP_NUM_THREADS"] = "%d" % (params.num_threads) metrics_list[design] = [] for device_name in ["gpu"] * 5 + ["cpu"] * 2: for deterministic_name in ["indeterministic"]: params.gpu = 0 if device_name == "cpu" else 1 params.deterministic_flag = 0 if deterministic_name == "indeterministic" else 1 params.global_place_flag = 1 params.legalize_flag = 1 params.detaield_place_flag = 1 params.detailed_place_engine = "" logging.info("%s, %s, %s" % (design, device_name, deterministic_name)) logging.info("parameters = %s" % (params)) # run placement tt = time.time() metrics = Placer.place(params) logging.info("placement takes %.3f seconds" % (time.time() - tt)) # verify global placement results metrics_list[design].append(( metrics[-3][-1][-1].hpwl.cpu().numpy(), metrics[-2].hpwl.cpu().numpy(), metrics[-1].hpwl.cpu().numpy(), )) m = np.array(metrics_list[design]) metrics_list[design] = m gp, lg, dp = m[:, 0], m[:, 1], m[:, 2] gp_mean, lg_mean, dp_mean = np.mean(gp), np.mean(lg), np.mean(dp) rtol = lambda x, avg: max(avg - np.min(x), np.max(x) - avg) / avg gp_rtol, lg_rtol, dp_rtol = rtol(gp, gp_mean), rtol(lg, lg_mean), rtol( dp, dp_mean) logging.info( f"Avg metrics for {design}\n{m}\nGP={gp_mean:g} ({gp_rtol}), LG={lg_mean:g} ({lg_rtol}), DP={dp_mean:g} ({dp_rtol})" ) logging.info("Overall Summary") for design in designs: m = metrics_list[design] gp, lg, dp = m[:, 0], m[:, 1], m[:, 2] gp_mean, lg_mean, dp_mean = np.mean(gp), np.mean(lg), np.mean(dp) rtol = lambda x, avg: max(avg - np.min(x), np.max(x) - avg) / avg gp_rtol, lg_rtol, dp_rtol = rtol(gp, gp_mean), rtol(lg, lg_mean), rtol( dp, dp_mean) logging.info( f"Avg metrics for {design}\n{m}\nGP={gp_mean:g} ({gp_rtol}), LG={lg_mean:g} ({lg_rtol}), DP={dp_mean:g} ({dp_rtol})" )
def install_lv2(self): if not getattr(self, 'lv2', None): return self.meths.remove('install_target') if not Params.g_install: return if not self.env['LV2_INSTALL_DIR']: self.env['LV2_INSTALL_DIR'] = get_lv2_install_dir() if not self.env['LV2_INSTALL_DIR']: Params.fatal('Cannot locate LV2 plugins directory') display_msg('LV2 installation directory', self.env['LV2_INSTALL_DIR'], 'GREEN') bundle_files = self.ttl bundle_files.append(self.target + '.so') install_files('LV2_INSTALL_DIR', self.target + '.lv2', bundle_files, self.env)
def step( self, throw=None ): self.state = None try: if throw: assert hasattr( self.__generator, 'throw' ), throw self.__generator.throw( AssertionError, throw ) state = self.__generator.next() assert isinstance( state, (SEND, RECV, WAIT) ), 'invalid waiting state %r' % state self.state = state except Restart: raise except KeyboardInterrupt: raise except StopIteration: del self.__generator pass except AssertionError, msg: if not str(msg): msg = traceback.format_exc() Params.log('Assertion failure: %s'% msg)
def recv( self, sock ): assert not self.Done chunk = sock.recv( Params.MAXCHUNK ) assert chunk, 'chunked data error: connection closed prematurely' self.__recvbuf += chunk while '\r\n' in self.__recvbuf: head, tail = self.__recvbuf.split( '\r\n', 1 ) chunksize = int( head.split( ';' )[ 0 ], 16 ) if chunksize == 0: self.__protocol.size = self.__protocol.tell() Params.log('Connection closed at byte %i' % self.__protocol.size, threshold=2) self.Done = not self.hasdata() return if len( tail ) < chunksize + 2: return assert tail[ chunksize:chunksize+2 ] == '\r\n', \ 'chunked data error: chunk does not match announced size' Params.log('Received %i byte chunk' % chunksize, threshold=1) self.__protocol.write( tail[ :chunksize ] ) self.__recvbuf = tail[ chunksize+2: ]
def recv( self, sock ): """ Read chuck from server response. Hash or rewrite if needed. """ assert not self.Done chunk = sock.recv( Params.MAXCHUNK ) if chunk: self.__protocol.write( chunk ) #if self.__protocol.capture: # self.__hash.update( chunk ) if Params.LIMIT: self.__nextrecv = time.time() + len( chunk ) / Params.LIMIT else: if self.__protocol.size >= 0: assert self.__protocol.size == self.__protocol.tell(), \ 'connection closed prematurely' else: self.__protocol.size = self.__protocol.tell() Params.log('Connection closed at byte %i' % self.__protocol.size, threshold=2) self.Done = not self.hasdata()
def prepare_direct_response(self, request): """ Serve either a proxy page, a replacement for blocked content, of static content. All directly from local storage. Returns true on direct-response ready. """ host, port = request.hostinfo verb, path, proto = request.envelope if port == Params.PORT: Params.log("Direct request: %s" % path) assert host in LOCALHOSTS, "Cannot service for %s" % host self.Response = Response.DirectResponse return True # XXX: Respond by writing message as plain text, e.g echo/debug it: #self.Response = Response.DirectResponse # Filter request by regex from patterns.drop filtered_path = "%s/%s" % (host, path) m = Rules.Drop.match(filtered_path) if m: self.set_blocked_response(path) Params.log('Dropping connection, ' 'request matches pattern: %r.' % m,1) return True if Params.STATIC and self.cache.full(): Params.log('Static mode; serving file directly from cache') self.cache.open_full() self.Response = Response.DataResponse return True
def __parse_head( self, chunk ): """ Start parsing request by splitting the envelope or request line, defer to __parse_args once first line has been received. """ eol = chunk.find( '\n' ) + 1 if eol == 0: return 0 line = chunk[ :eol ] Params.log('Client sends %r'%Params.print_str(line, 96), threshold=1) fields = line.split() assert len( fields ) == 3, 'invalid header line: %r' % line self.__verb, self.__reqpath, self.__prototag = fields assert self.__reqpath, fields self.__headers = {} self.__parse = self.__parse_args return eol