def run(): text = "cagrUFSC2calendar transforma sua grade de horarios em um formato para calendario (.ics)\n\ Por padrão, as repetições dos eventos das matérias estão ligadas à data do fim do semestre\n\ da graduação na UFSC. Entretanto, você pode definir um numero personalizado de repetições\n\ com --repeat NUM ou uma data de fim --end Y-m-d" parser = argparse.ArgumentParser( description=text, formatter_class=argparse.RawTextHelpFormatter) parser.add_argument("file", help="grade de horarios em HTML") parser.add_argument("output", help="nome do arquivo de saida") parser.add_argument("--repeat", help="quantidade de repetições dos eventos") parser.add_argument("--end", help="data final para computar repetições") args = parser.parse_args() FILE = args.file OUTPUT = args.output REPEAT = args.repeat END_DATE = args.end if not END_DATE: END_DATE = "2020-7-14" print("Starting to extract information from", FILE, "...") daily_events, day2abrev, day2key, code2name = extract(FILE) print("Building calendar...") build(OUTPUT, END_DATE, REPEAT, daily_events, day2abrev, day2key, code2name)
def test_literal(self): self.assertEqual(build('1'), node.ProgramNode(subs=[ node.ValueNode(Token('1', TokenType.INTEGER)) ])) self.assertEqual(build('3.1415'), node.ProgramNode(subs=[ node.ValueNode(Token('3.1415', TokenType.REAL)) ]))
def build(self, _btn): if builder.is_building(): builder.build_cancel() else: self.build_btn.set_label('Cancel') self.build_btn.get_style_context().add_class('destructive-action') builder.build(GLib.idle_add, self.on_build_stop)
def build( _ctx ): sources = { 'main', } libraries = set() if _ctx.osName == common.WINDOWS: libraries |= { 'opengl32', } libraries |= { common.generateCoreLibraryName( 'font' ), common.generateCppLibraryName( 'window' ), common.generateDpCoreLibraryName( 'opengl' ), common.generateDpCoreLibraryName( 'common' ), common.generateDpCoreLibraryName( 'file' ), } builder.build( _ctx, 'font', sources, libraries = libraries, )
def build( binding, site_package_path = None ): if site_package_path is not None: import journal warning = journal.warning( 'binding_builder.mm' ) warning.log( 'mm can only export python modules to predefined $EXPORT_ROOT/modules' ) pass from builder import build build( binding ) return
def build_stuff(): if "onlypackage" in argv: builder.build() check_output(package_command) else: check_output(rm_command) builder.build() check_output(package_command) check_output(rm_command2) check_output(scp_command)
def build(binding, site_package_path=None): if site_package_path is not None: import journal warning = journal.warning('binding_builder.mm') warning.log( 'mm can only export python modules to predefined $EXPORT_ROOT/modules' ) pass from builder import build build(binding) return
def run(data): '''run a job.''' try: git_url = data['git_url'] git_treeish = data['git_treeish'] web_root = data['web_root'] except KeyError as e: print 'Invalid job: %s' % e return builder.build(git_url, git_treeish, web_root)
def start(): print_title() delay_print( "\n\n\tHello, welcome to the spigot plugin builder. Select an option: \n\n", 0.03) delay_print("\t\t1. Build a plugin\n\n", 0.03) delay_print("\t\t2. Edit plugin configuration settings\n\n", 0.03) delay_print("\t\t3. Create custom plugin using blank settings\n\n", 0.03) answer = input("") if int(answer) == 1: build()
def build( _ctx ): sources = { 'main', } libraries = { common.generateCoreLibraryName( 'file' ), } builder.build( _ctx, 'file', sources, libraries = libraries, )
def build( _ctx ): sources = { 'main', } libraries = { common.generateLibraryName( 'common' ), } builder.build( _ctx, 'args', sources, libraries = libraries, )
def build( _ctx ): sources = { 'main', } libraries = { common.generateCppLibraryName( 'window' ), } builder.build( _ctx, 'window', sources, libraries = libraries, )
def test_parse_known_values_clean_with_unicode(self): """parse should give known result with known input""" self.maxDiff = None encoding = 'utf-8' if sys.version_info[0] == 2 else None for dic in self.knownValuesClean + self.knownValuesCleanWithUnicode: result = parse(build(dic, encoding=encoding), unquote=True, encoding=encoding) self.assertEqual(dic, result)
def test(lab): print "\n\nUNIT TESTS" pathString = path.buildPath(lab) f = open(pathString + 'config.cfg') # ignore variant and programm aim f.readline() f.readline() #run unit testing line = f.readline().rstrip('\n') flag = 0 while line: print("Test: " + line) makeStatus = builder.build(pathString, line) string = "make status: " + successOrFail(makeStatus) testStatus = 0 # Print make status print(string) if makeStatus == 0: testStatus = starter.executeAim(pathString + line) string = "test status: " + successOrFail(testStatus) # Print test status print(string) flag = flag | makeStatus | testStatus line = f.readline().rstrip('\n') f.close() return flag
def build( _ctx ): sources = { 'main', } libraries = { common.generateCoreLibraryName( 'common' ), common.generateCoreLibraryName( 'time' ), } builder.build( _ctx, 'absolutetime', sources, libraries = libraries, )
def buildHexFile(req): hash = makeConfigHash(req.form) ret = 0 stderr = "" output = "" if not hasCache(hash): conf = None try: conf = BuildConfig(defaultconfig) conf.importForm(req.form) import builder ret, output, stderr = builder.build(basedir, os.path.join(cachedir, hash), conf); except Exception as e: ret = -1 stderr = "Failed to parse config!\n" + str(e) res = str(ret) + "\n" if ret == 0: res += hash else: res += output + stderr return res
def solve(): try: jsdata = request.form['problem_input'] lines = str(jsdata).split('\n') args = DEFAULTS args['lines'] = lines args['n_models'] = int(request.form['n_models']) figs = build(args, show_plot=False, encode_fig=True) urls = list() for fig in figs: img = BytesIO() fig.savefig(img, format='png') fig.close() img.seek(0) plot_url = base64.b64encode(img.getvalue()).decode() urls.append(f"data:image/png;base64,{plot_url}") return jsonify(srcs=urls) # return f"data:image/png;base64,{plot_url}" except Exception as e: return Response("Invalid input", status=400)
def build( _ctx ): sources = { 'main', } libraries = { common.generateLibraryName( 'common' ), common.generateLibraryName( 'input' ), } builder.build( _ctx, 'gamepad', sources, libraries = libraries, )
def build_deps(self, targets=["all"], arguments=[]): if sys.platform.startswith("win"): has_nmake = False try: has_nmake = subprocess.call(['nmake', '/?'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) == 0 except: pass if not has_nmake: logging.error('Cannot run nmake, have you run "%VS90COMNTOOLS%vsvars32.bat"?') sys.exit(1) for dep in self.deps: builder = Dependency(self, dep) args = [] action = "Getting" target_name = builder.name + '-' + builder.props['version'] if dep["name"] in targets or "all" in targets: if 'clean' in arguments: args.append('clean') action = "Cleaning" logging.info(action + " %s" % target_name) if not builder.build(args=args): logging.error("Build failed for %s. Exiting..." % builder.name) sys.exit(1) else: logging.info("Skipping %s" % target_name)
def build( _ctx ): sources = { 'main', } libraries = { common.generateLibraryName( 'common' ), common.generateLibraryName( 'window' ), } builder.build( _ctx, 'keyboard', sources, libraries = libraries, )
def build( _ctx ): sources = { 'main', } libraries = { common.generateLibraryName( 'common' ), common.generateLibraryName( 'file' ), } builder.build( _ctx, 'truncatefile_simple', sources, libraries = libraries, )
def build( _ctx ): sources = { 'main', } libraries = { common.generateLibraryName( 'common' ), common.generateLibraryName( 'window' ), common.generateLibraryName( 'opengl' ), } builder.build( _ctx, 'opengl_simple', sources, libraries = libraries, )
def Main(): if len(sys.argv) == 1: print "Usage: %s file.js" % sys.argv[0] exit() try: data = file(sys.argv[1]).read() out = build(data) print out except SyntaxError_ as e: print e
def test(lab): global flag print("\nPROGRAM TESTS") #get variant and program aim from student's config programPath = path.buildPath(lab) f = open(programPath + 'config.cfg') variant = f.readline().rstrip('\n') programAim = f.readline().rstrip('\n') f.close; testsPath = path.buildTestPath(int(lab[-1])) builder.build(programPath, programAim) os.chmod(programPath, 0o777) os.chmod(programPath + programAim, 0o777) # Common tests print('Common tests:') execute(testsPath, 'config.xml', programPath, programAim) # Variant tests print('\nVariant tests:') #execute(testsPath, 'config' + str(variant) + '.xml', programPath, programAim) return flag
def compare(self, js, cs, name=''): out = build(js) if out.strip() == cs.strip(): self.assertEqual(out.strip(), cs.strip()) else: print "" print "----------------------------------------------------------------------" print "%s" % name print "" print "Output:" print "> " + out.strip().replace(' ', '_').replace("\n", "\n> ") print "" print "Expected:" print "> " + cs.strip().replace(' ', '_').replace("\n", "\n> ") self.assertTrue(False)
def build( _ctx ): sources = { 'main', } libraries = None if _ctx.osName == common.LINUX: libraries = set() elif _ctx.osName == common.WINDOWS: libraries = { 'opengl32', } libraries |= { common.generateCoreLibraryName( 'opengl' ), common.generateCppLibraryName( 'window' ), } builder.build( _ctx, 'opengl', sources, libraries = libraries, )
def pack(gameID, channelID, packID, tempFilePath): log_utils.debug("开始打包: gameID:" + gameID + ";channelID:" + channelID + ";packID:" + packID + ";temp file path:" + tempFilePath) # 设置临时环境变量 os.environ['JAVA_HOME'] = file_utils.getJavaDir() os.environ['CLASSPATH'] = '' config_utils.init_config(tempFilePath, gameID) game = db_utils.get_game_by_id(gameID) if game == None: log_utils.error("game not exists in db. gameID:" + gameID) return 1 channel = db_utils.get_channel_by_id(channelID) if game == None: log_utils.error("channel not exists in db. channel id:" + channelID) return 1 packlog = db_utils.get_packlog_by_id(packID) if packlog == None: log_utils.error("channel packlog not exists in db. packID:" + packID) return 1 keystore = db_utils.get_keystore_by_id(channel["keystoreID"]) if channel["signApk"] == 1 and keystore == None: log_utils.error( "curr config need sign apk but keystore not exists in db. keystore id:" + channel["keystoreID"]) return 1 log_utils.info("now to package %s...", channel["channelName"]) # load channel config.xml ret = config_utils.load_channel_config(game, channel, packlog) if ret: log_utils.error("load channel config failed. " + channel['channelName']) return 1 ret = builder.build(game, channel, packID + ".apk", keystore) if ret: return 0 return 1
def pack(gameID, channelID, packID, tempFilePath): log_utils.debug("now to pack. gameID:"+gameID+";channelID:"+channelID+";packID:"+packID+";temp file path:"+tempFilePath) print("begin to pack....") sys.stdout.flush() config_utils.init_config(tempFilePath, gameID) print("begin to pack 2....") sys.stdout.flush() game = db_utils.get_game_by_id(gameID) if game == None: log_utils.error("game not exists in db. gameID:"+gameID) return 1 channel = db_utils.get_channel_by_id(channelID) if game == None: log_utils.error("channel not exists in db. channel id:"+channelID) return 1 packlog = db_utils.get_packlog_by_id(packID) if packlog == None: log_utils.error("channel packlog not exists in db. packID:"+packID) return 1 keystore = db_utils.get_keystore_by_id(channel["keystoreID"]) if channel["signApk"] == 1 and keystore == None: log_utils.error("curr config need sign apk but keystore not exists in db. keystore id:"+channel["keystoreID"]) return 1 log_utils.info("now to package %s...", channel["channelName"]) #load channel config.xml ret = config_utils.load_channel_config(game, channel, packlog) if ret: log_utils.error("load channel config failed. "+ channel['channelName']) return 1 ret = builder.build(game, channel, packID+".apk", keystore) if ret: return 0 return 1
def main(target="help"): try: ok = builder.build(target, load_config()) if ok: builder.report("everything is okay") else: builder.report("something went wrong") sys.exit(1) except Exception as e: import traceback builder.report("something went wrong [%s]", e.__class__.__name__) for earg in e.args: print(earg) builder.report("stack trace") trace = sys.exc_info()[2] traceback.print_tb(trace, None, sys.stdout) sys.exit(1)
def _run_fight(problem, solutions, tests): calls = [] files = [] for id, solution in enumerate(solutions): filename = f"solution_{id}.{extensions[solution['lang']]}" filepath = f'sources/{filename}' with open(filepath, "w") as sol: sol.write(solution['source']) calls.append(build(filename, filepath, solution['lang'])) files.append('') problem_folder = os.path.join(config.PROBLEM_FOLDER, problem) problem_config = None with open(os.path.join(problem_folder, 'config/problem.json')) as file: problem_config = json.load(file) test_files = [f'{problem_folder}/tests/{x["filename"]}' for x in filter(lambda x: x['id'] in tests, problem_config['tests'])] for file in test_files: return_code = sp.call(["bash", os.path.join(problem_folder, "scripts/check.sh"), '--players_cmd'] + calls + ['--players_file'] + files + ['--test_file'] + [file]) if return_code != 0: logger.critical(f'Checker failed with exit code {return_code}.')
def test_build(self): result = build(self.request_data) self.assertEquals(result, self.known_result)
#!/usr/bin/env python import builder from encoder import * enc = Encoder() listing = [Print(PAGE()), Print("~", PAGE())] program = enc.encode(listing, start=10, step=10) files = [("CODE", 0xe00, 0xe00, program)] builder.build(files)
# Example: python markov.py books/aladdin.txt 25 # *Larger text sources may take longer to build if __name__ == '__main__': args = sys.argv usage = 'Usage: %s ([input-file-name] <num-words>)' % (args[0], ) # Usage string if len(args) != 3 and len(args) != 2: raise ValueError( usage) # If the number of arguments doesn't match, raise an error if len(args) == 3: # If there is a file name and word count num_words = int(args[2]) # Set word count if not path.exists("chain.p"): # Check for existing chain start = time.clock() chain = builder.build( args[1]) # If no existing chain, build a new one else: chain = restrictive_pickle.restricted_loads( open("chain.p", 'rb').read()) start = time.clock() chain = builder.build( args[1], chain) # If chain exists, load it and build on top of it pickle.dump(chain, open("chain.p", 'wb')) # Dump the chain to a file print("Time to run: {}".format(time.clock() - start)) elif len(args) == 2: # If there is just word count num_words = int(args[1]) # Set word count chain = restrictive_pickle.restricted_loads( open("chain.p", 'rb').read()) # Use restricted loading
def test_parse_known_values_with_unicode(self): """parse should give known result with known input (quoted)""" self.maxDiff = None for dic in self.knownValues + self.knownValuesWithUnicode: result = parse(build(dic, encoding='utf-8'), encoding='utf-8') self.assertEqual(dic, result)
def test_parse_known_values_clean(self): """parse should give known result with known input""" self.maxDiff = None for dic in self.knownValuesClean: result = parse(build(dic), unquote=True) self.assertEqual(dic, result)
def test_end_to_end(self): parsed = parse('a[]=1&a[]=2') result = build(parsed) self.assertEquals(result, "a[]=1&a[]=2")
def __init__(self, **kw): update(self, world=build('morse')) home_pos = self.world.robot1_instance.pos
def __init__(self, **kw): update(self, world=build('mock'))
def test_end_to_end(self): self.maxDiff = None querystring = build(self.request_data) result = parse(querystring) self.assertEquals(result, self.request_data)
import builder import os import sys import webbrowser usage = "script_build_chat.py <number of msgs in chat>" if __name__ == "__main__": if len(sys.argv) != 2: print usage sys.exit() builder.build(int(sys.argv[1])) print "Saved new bot_chat.html" webbrowser.open_new_tab('file://{0}'.format( os.path.realpath('bot_chat.html'))) else: print "don't import this"
def test_parse_known_values(self): """parse should give known result with known input (quoted)""" self.maxDiff = None for dic in self.knownValues: result = parse(build(dic)) self.assertEqual(dic, result)
# for i in to_add: addRelease(i) # Start setting up builder here. builder = builder.Builder() failed = [] succeeded = [] errored = [] print(color(f"\nBuilding {len(to_build)} things").bold) for plugin in to_build: print(f"\nBuilding {color(plugin['plugin']['Name']).bold}") try: started = datetime.datetime.now() files = None files = builder.build(plugin["plugin"], commithash=plugin["commit"]["sha"]) except Exception as error: duration = datetime.datetime.now() - started print("An error occurred!") print(error) traceback.print_tb(error.__traceback__) if files: print(f"Files: {files}") print( f"{color('Building of').red} {color(plugin['plugin']['Name']).red.bold} {color('errored').red}" ) errored.append(plugin) print(f"Took {humanize.naturaldelta(duration)}") continue duration = datetime.datetime.now() - started
def test_build(self): result = build(self.request_data) self.assertEquals(parse(result), self.request_data)
def main(n, wavelength, width, beamsize, regen=False): config_path = open('config/vars.txt', 'rb') test = config_path.readline() config_path.close() if not (abs(float(test) - width) < 0.1 / 25400): regen = True if os.path.exists('config/geometry.pickle') and regen == False: geometry_path = open('config/geometry.pickle', 'rb') world = cPickle.load(geometry_path) geometry_path.close() else: world = build(width) world.flatten() world.bvh = load_bvh(world) #Create bounding volume hierarchy sim = Simulation(world) def init_source(n, wavelength, width, beamsize): """Generates laser profile of n photons, with wavelength, emanating from position pos_offset with direction 'direction'.""" pos, direction = mfunctions.get_source(n, width, beamsize, wavelength / 25400000.0) #Note: Chroma only natively handles integer wavelengths, so we convert to inches here instead of earlier in the code. source_center = mfunctions.get_center('161') + np.array( (0, -0.1, 0)) #Position source just in front of slits pos = pos + np.tile(source_center, (n, 1)) pol = np.cross(direction, (0, 0, 1)) #Polarisation wavelengths = np.repeat(wavelength, n) return Photons(pos, direction, pol, wavelengths) start = [] end = [] print 'Simulating photons...' for ev in sim.simulate([init_source(n, wavelength, width, beamsize)], keep_photons_beg=True, keep_photons_end=True, run_daq=False, max_steps=100): #print ev.photons_end.flags start.append(ev.photons_beg.pos) end.append(ev.photons_end.pos) print 'Saving data to file...' photon_id = list(range(n)) flags = ev.photons_end.flags wavs = ev.photons_end.wavelengths ##### Saving data to file ##### current = dt.datetime.now() filename = 'results/' + current.strftime( '%Y-%m-%d_%H:%M') + '_%d:%d:%.2f:%.2f.dat' % (n, wavelength, width * 25400, beamsize * 25400) out_file = open(filename, 'w') out_file.write('\t'.join([ 'ID', 'xi', 'yi', 'zi', 'xf', 'yf', 'zf', 'wavelength (nm)', 'flag\n' ])) for i in range(n): output = [photon_id[i]] + [item for item in start[0][i]] + [ item for item in end[0][i] ] + [int(wavs[i]), flags[i]] out_file.write('\t'.join([str(item) for item in output]) + '\n') out_file.close() print 'Generating seed file...' seed = np.random.uniform( size=n) #This is the seed file for the analysis script np.savetxt('config/seed.dat', seed, delimiter=',') print 'Done.'
import sys import builder import generator import random def randomizer(bound): return random.randint(1, bound) if __name__ == '__main__': args = sys.argv usage = 'Usage: %s (<text-file-name> <num-words>)' % (args[0], ) if (len(args) < 3): raise ValueError(usage) file_name = args[1] num_words = int(args[2]) chain = builder.build(file_name) outstr = generator.generate(chain, randomizer, num_words, builder.NONWORD) print(outstr)
def main(suppress=False): ##### Loading in Results ##### print '-'*90 print 'ANALYSIS SCRIPT FOR VUV SIMULATION'.center(90,'=') print '-'*90 print 'Available files:\n' filenames = [] display = ['#','Date:','Time:','n:',u'\u03bb (nm)'.encode('utf-8'),u'd (\u03bcm)'.encode('utf-8'),u's (\u03bcm)'.encode('utf-8')] print '{:<3}{:<12}{:<9}{:<9}{:<9}{:<9}{:<9}'.format(*display) for pos,filename in enumerate(os.listdir('results/')): filenames.append(filename) date = re.findall('\d+\-\d+\-\d+',filename)[0] time = re.findall('\d{2}\:\d{2}',filename)[0] args = re.findall('\d+\:\d+:\d+\.\d+\:\d+\.\d+',filename)[0] display = [str(pos),date,time] + args.split(':') print '{:<3}{:<12}{:<9}{:<9}{:<8}{:<8}{:<9}'.format(*display) print '(d - Slit Width, s - Beam Width)' print '='*90 proceed = False while proceed == False: usrin = raw_input('Please select a file: ').strip() if usrin not in [str(i) for i in list(range(len(filenames)))]: print 'Bad input!' else: proceed = True selection = filenames[int(usrin)] n,wavelength,width,beamsize = [float(i) for i in re.findall('\d+\:\d+:\d+\.\d+\:\d+\.\d+',selection)[0].split(':')] #print '\nSelected: '+ selection + '\n' ##### Data Loading and Results ##### itm = 25.4 #Inches to millimetre conversion tol = 0.05 #Tolerance for selecting photons res = 10 #Seed resolution seed = np.genfromtxt('config/seed.dat',delimiter=',') #display seed x = 0.01 #marker params h = x*np.sqrt(3)/2 pyramid = make.linear_extrude([-x/2,0,x/2],[-h/2,h/2,-h/2],h,[0]*3,[0]*3) beg_marker = Solid(pyramid,vacuum,vacuum,color=0x32a8a2) #photon start marker end_marker = Solid(pyramid,vacuum,vacuum,color=0xfc0303) #photon end marker dir_marker = Solid(pyramid,vacuum,vacuum,color=0x00ff00) #Direction data = p.read_csv('results/'+selection,sep = '\t').values[:] photon_ids = data[:,0].astype(int) beg_pos = data[:,1:4] end_pos = data[:,4:7] wavelengths = data[:,7].astype(int) flags = data[:,8].astype(int) if suppress == False: if os.path.exists('config/geometry.pickle'): print 'Loading geometry from file...' geometry_path = open('config/geometry.pickle','rb') world = cPickle.load(geometry_path) world.add_solid(dir_marker,displacement = mfunctions.get_center('161')+np.array((0,1,0))) geometry_path.close() else: world = builder.build() #Regenerate geometry sipm_ids = [] pmt_ids = [] for p_id in photon_ids: if (flags[p_id] & 0x1 << 2) == 4: if abs(end_pos[p_id,2]-9.08933)<tol: #Photons detected at SiPM sipm_ids.append(p_id) elif abs(end_pos[p_id,0]-23.6555999)<tol: #Photons detected at PMT pmt_ids.append(p_id) if suppress == False and seed[p_id] <= 1.0/res: world.add_solid(beg_marker,displacement=beg_pos[p_id]) world.add_solid(end_marker,displacement=end_pos[p_id]) pcount = len(pmt_ids) scount = len(sipm_ids) print 'Total photons:\t\t', n print 'Detections (PMT):\t', pcount print 'Detections (SiPM):\t',scount print 'Relative detection rate: {:.2f}%'.format(100*float(scount+pcount)/len(photon_ids)) if suppress == False: view(world) elif suppress == True and scount == 0: print 'No photons detected!' exit() else: detectedx = np.asarray(list(end_pos[i,0]*itm for i in sipm_ids)) #Compiling points to plot and converting to mm detectedy = np.asarray(list(end_pos[i,1]*itm for i in sipm_ids)) detectedwavs = np.array(list(wavelengths[i] for i in pmt_ids)) meanx = np.mean(detectedx) #Mean detected positions meany = np.mean(detectedy) detectedx -= meanx #Centering plot about detected photons detectedy -= meany sampleset = np.sqrt(detectedx**2 + detectedy**2) #Heatmap info binwidth = 2*ss.iqr(sampleset)/np.cbrt(len(sampleset)) #Freedman-Diaconis rule bins = (max(sampleset)-min(sampleset))/binwidth heatmap,xedges,yedges = np.histogram2d(detectedx,detectedy,bins=bins) extent = [xedges[0],xedges[-1],yedges[0],yedges[-1]] fig = plt.figure() ax1 = fig.add_subplot(121) ax2 = fig.add_subplot(122) ax1.axis('equal') ax2.axis('equal') fig.suptitle(u'n: {} | \u03bb: {}nm | d: {}\u03bcm | s: {}\u03bcm'.format(int(n),int(wavelength),width,beamsize)) r = 0.059813*itm #Iris radius theta = np.linspace(0,2*np.pi,1000) iris_center = np.array((18.4429,-21.8872))*itm iris_x = r*np.cos(theta)+iris_center[0]-meanx iris_y = r*np.sin(theta)+iris_center[1]-meany ax1.scatter(detectedx,detectedy,s=0.3,c='k',label='Photon Hits') #ax1.plot(iris_x,iris_y,c='r',label='Iris Overlay') ax1.set_xlabel('x (mm)') ax1.set_ylabel('y (mm)') ax1.set_title('SiPM Hits') ax1.legend() im = ax2.imshow(heatmap.T,extent=extent,origin='lower',cmap='hot_r',interpolation='gaussian') ax2.set_xlabel('x (mm)') ax2.set_ylabel('y (mm)') ax2.set_title('SiPM Heat Map') fig.colorbar(im,ax=ax2) plt.show()
def test_parse_normalized(self): result = parse(build(self.knownValues), normalized=True) self.assertEqual(self.knownValuesNormalized, result)
def get_tags(session): return (session.query(HashTagNetwork).join(Network).filter( sa.or_( HashTagNetwork.last_scraped == None, sa.func.trunc( sa.extract('epoch', unow_tz()) - sa.extract('epoch', HashTagNetwork.last_scraped)) > Network.parsing_frequency))) def main(scheduler): with session_scope(Session) as session: tags = get_tags(session) scheduler.schedule_many(list(tags)) def make_scheduler(): return ScrapyScheduler('hashtag', settings.SCRAPYD_NODES) if __name__ == '__main__': scheduler = make_scheduler() egg = build() scheduler.load_egg(egg) try: while 1: main(scheduler) dog_sleep(10) except KeyboardInterrupt: sys.exit()