def render(self): elapsed_time = 0 print "Starting", self.jobs, "tracer workers." jobs = [] start_time = time.time() for i in range(self.jobs): t = Tracer(self.width, self.height, i, self.jobs, self.canvas, self.world, self.camera, self.recursion_depth, self.multisampling, self.x_samples, self.y_samples) t.name = "Tracer-"+str(i) t.start() jobs.append(t) for j in jobs: j.join() end_time = time.time() elapsed_time += end_time - start_time print "Time usage:" print "Rendering\t\t", end_time - start_time start_time = time.time() self.canvas.write() end_time = time.time() print "Writing\t\t\t", end_time - start_time elapsed_time += end_time - start_time print "Total time\t\t", elapsed_time
def setUp(self): tracer = Tracer(16) tracer.get_command_bytes=Mock(return_value=fixture_data) tracer.add_crc = Mock(return_value=fixture_data) tracer.verify_crc = Mock(return_value=True) tracer.get_result = Mock(return_value=Command(0x12, fixture_data)) self.ts = TracerSerial(tracer, None)
def main(): sys.stdout = Tracer(sys.stdout) sys.stderr = Tracer(sys.stderr) app = QtGui.QApplication(sys.argv) application = Application(app) application.start() timer = QtCore.QTimer() timer.singleShot(1, application.applyTheme) sys.exit(app.exec_())
def __init__(self, debug=True, console=True): Tracer.__init__(self) self.console = console self.debug = debug self.logs = OrderedDict() self.log_count = 0 self.relays = dict() self.graph = dict() self.buffer = list()
def __init__(self, c, julia_itr, center=(0,0,0), scale=1.0, **kwargs): Tracer.__init__(self, position=center, scaling=scale) self.c = c self.julia_itr = julia_itr self.bndR = 4.0 self.max_itr = 100 self.precision = 0.001 self.max_step = 0.1 for (k, v) in kwargs.items(): setattr(self, k, v)
def execute(self, function, *args, **kwargs): output = None tracer = Tracer(output, self.threaded) with tracer: ret = function(*args, **kwargs) self.graph = dict() for node in tracer.nodes(): if node.name != '__main__' and not node.name.startswith('tracer'): self.graph[node.name] = node.hash return ret
class TestTracer(TestCase): def setUp(self): self.t = Tracer(16) def test_get_command_bytes(self): result = self.t.get_command_bytes(Command(0x12, fixture_data)) self.assertEqual(bytearray(b'\x10\x12\x03' + fixture_data), result) def test_get_result(self): result = self.t.get_result(bytearray(b'\x00\xA0\x18') + query_result) self.assertEqual(QueryResult, type(result))
def __init__(self, triangle_mesh, max_depth=3, max_faces_per_leaf=5): original_coordinates = triangle_mesh.coordinates self.triangle_mesh = triangle_mesh Tracer.__init__(self) self.coordinates = original_coordinates self.max_depth = max_depth self.max_faces_per_leaf = max_faces_per_leaf if self.max_depth >= self.__class__.MAX_DEPTH: raise RuntimeError("max_depth >= MAX_DEPTH") self.build()
def __init__(self, tracer_code, normal_code=None, center=(0,0,0), bndR=None, max_itr=1000, precision=1e-5, self_intersection=True): Tracer.__init__(self, position=center) self.max_itr = max_itr self.precision = precision self.no_self_intersection = not self_intersection self.tracer_code = tracer_code.strip() self.normal_code = normal_code self.unique_tracer_id = str(id(self))
def execute_task(self): tracer = Tracer() span = tracer.create_continuation_span(span_name='w2-{}'.format( self.id), context_id=self.id) span.set_tag('job-id', self.id) span.log_kv({ 'event': 'debug', 'message': 'Start execution for job id: {}'.format(self.id) }) try: childSpan = tracer.create_span(span_name='load-data', parent_span=span) self.load_data() childSpan.finish() childSpan = tracer.create_span(span_name='process-data', parent_span=span) self.process_data() childSpan.finish() childSpan = tracer.create_span(span_name='save-data', parent_span=span) self.save_data() childSpan.finish() except Exception as e: span.set_tag('error', e) finally: span.log_kv({ 'event': 'debug', 'message': 'End executionfor job id: {}'.format(self.id) }) span.finish() tracer.flush_spans()
def sub_start_capture(self): # Start tracer thread, wait for console input to stop if self.login_required and not self.init_login(): self.logger.debug("Could not login. Stop the capture thread.") self._stop.set() return self.set_data("tcaps", datetime.datetime.now()) self.logger.debug("data_map:%s" % (self.data_map)) self.init_capture_file() self.logger.info("Start capture (capture_file:'%s')." % (self.cap_file_path)) url_start = self.base_url + '/cgi-bin/capture_notimeout' + self.start_str + "&ifaceorminor=" + self.cap_interface if self.SID != '': url = url_start + "&sid=%s" % self.SID else: url = url_start self.logger.debug( "Send capture start request to the box (url:'%s', capture_file:'%s')." % (url, self.cap_file_path)) Tracer(url, self.cap_file_path).start() self.logger.debug( "Send capture start request to the box finished (url:'%s', capture_file:'%s')." % (url, self.cap_file_path))
def solve_with_options(algorithm_to_run, seed, run_time, inst): print( f'''Running algorithm {algorithm_to_run} on file {inst} with a time limit of {run_time} seconds and a random seed of {seed}''' ) np.random.seed(seed) random.seed(np.random.randint(999999)) instance_name, city_data = load_data(inst) tracer = Tracer(method=algorithm_to_run, instance=instance_name, seed=seed, cutoff=run_time) score, solution = None, None if algorithm_to_run == 'LS1': score, solution = genetic_algorithm.solve( data=city_data, timer=early_stop_checker(seconds=run_time), tracer=tracer) elif algorithm_to_run == 'BnB': score, solution = BnB.solve(data=city_data, timer=early_stop_checker(seconds=run_time), tracer=tracer) elif algorithm_to_run == 'LS2': score, solution = two_opt.solve( data=city_data, timer=early_stop_checker(seconds=run_time), tracer=tracer) elif algorithm_to_run == 'LS3': score, solution = genetic_algorithm_opt_2_hybrid.solve( data=city_data, timer=early_stop_checker(seconds=run_time), tracer=tracer) elif algorithm_to_run == 'Approx': score, solution = nearest_neighbor.solve( data=city_data, timer=early_stop_checker(seconds=run_time), tracer=tracer) if not os.path.exists('output'): os.makedirs('output') save_solution_file(score, solution, method=algorithm_to_run, instance=instance_name, seed=seed, cutoff=run_time) tracer.write_to('output/')
def begin_trace(args=None, pid=None): # Load the functions defined in the autogenerated # ltraceosx/data/funcs.json and overridden by special.json in the same # directory. data_dir = os.path.join(os.path.dirname(__file__), 'data') funcs_filename = os.path.join(data_dir, 'funcs.json') special_filename = os.path.join(data_dir, 'special.json') funcs = json.loads(open(funcs_filename).read()) funcs.update(json.loads(open(special_filename).read())) # Attach to, or launch, program tracer = Tracer() if pid is not None: path = debug.path_for_pid(pid) if not os.path.exists(path): raise IOError("could not find file %r for pid %r." % (path, pid)) tracer.attach(pid) attached = True else: path = args[0] if not os.path.exists(path): raise IOError("could not find file %r." % (path,)) tracer.start(args) # Get information about the stubs section stubs = load_stubs(path, 0x1000007 if tracer.is64 else 0x7) stubs_content = stubs['stubs_content'] # Find stubs in memory stubs_address = tracer.search_memory(stubs_content[:24]) if stubs_address is None: raise LtraceException("couldn't find stubs in memory.") # Add breakpoints to every entry in the stubs section. calling_convention = SystemV64ABI if tracer.is64 else Cdecl for i in range(0, len(stubs_content), 6): name = stubs['stubs_names'][i / 6] if name not in funcs: print 'Warning: no prototype for function: %r' % name data = funcs.get(name, {}) breakpoint = LibraryCallBreakpoint(name, data, calling_convention) tracer.add_breakpoint(stubs_address + i, breakpoint) del funcs # Go! tracer.run() return 0
def __init__(self, loggingServer, loggingDb, loggingUsername, loggingPassword, instanceUsername, instancePassword, storageAccountKey, environment): self.loggingServerName = loggingServer self.loggingDatabaseName = loggingDb self.loggingUsername = loggingUsername self.loggingPassword = loggingPassword self.instanceUsername = instanceUsername self.instancePassword = instancePassword self.storageAccountKey = storageAccountKey self.environment = environment self.tracer = Tracer(loggingServer, loggingDb, loggingUsername, loggingPassword, environment) self.orch = Orchestrator(loggingServer, loggingDb, loggingUsername, loggingPassword, environment, self.tracer)
def _start_tracer(self): """Start a new Tracer object, and store it in self.tracers.""" tracer = Tracer() tracer.data = self.data tracer.should_trace = self.should_trace tracer.should_trace_cache = self.should_trace_cache tracer.start() self.tracers.append(tracer)
def marzullo_stream(threshold): global start global end start = time.perf_counter() # Initialize our cache cache = IntervalCache(100000, threshold) trace = Tracer() def generateTransaction(min, max): cache.add(min, max) trace.start(generateTransaction) end = time.perf_counter() return cache
def __init__( self, vertices, faces, normals=None, center=(0, 0, 0), scale=1.0, auto_scale=False, auto_flip_normal=False, shading="flat", auto_smooth_normals=False, ): center = numpy.reshape(numpy.array(center), (1, 3)) self.vertices = to_vector_array(vertices) self.faces = to_vector_array(faces, dtype=numpy.int32) if auto_scale: current_center, current_size = self.get_bounding_cube() self.vertices = self.vertices - current_center[numpy.newaxis, :] if current_size > 0.0: scale = scale / float(current_size * 0.5) print "auto-scaling to %.1f%%" % (scale * 100) Tracer.__init__(self, position=center, scaling=scale) self._auto_flip_normal = auto_flip_normal self.shading = shading self.unique_tracer_id = "_" + self.shading + "_autoflip_%s" % self.auto_flip_normal if self.shading != "flat": if auto_smooth_normals: normals = generate_smooth_normals(self.vertices, self.faces) self.normals = to_vector_array(normals) assert (self.normals.shape[0], self.vertices.shape[0]) else: self.normals = None assert normals is None
def optimize(data, timer, tracer:Tracer): remaining_cities = list(data) first_city = random.choice(remaining_cities) greedy_path = [first_city] current_cost = utilities.tour_cost(remaining_cities) while len(remaining_cities) > 0 and timer(q=current_cost): best_city_index = None best_score = float('inf') for i, city in enumerate(remaining_cities): score = utilities.distance(greedy_path[-1], city) if score < best_score: best_city_index = i best_score = score greedy_path.append(remaining_cities.pop(best_city_index)) current_cost = utilities.tour_cost(greedy_path + remaining_cities) tracer.next_result(current_cost) final_cost = utilities.tour_cost(greedy_path) return final_cost, greedy_path
def load_on(): try: port = FakePort(fake_load_on) tracer = Tracer(0x16) t_ser = TracerSerial(tracer, port) t_ser.send_command(0xAA, 0x01, 0x01) #data = t_ser.receive_result(13) data = t_ser.receive_result() load_state = data.load_state return render_template('load_on.html', load_state=load_state) except (IndexError, IOError) as e: return jsonify({'error': str(e)}), 503
def setUp(self): tracer = Tracer(16) tracer.get_command_bytes = Mock(return_value=fixture_data) tracer.add_crc = Mock(return_value=fixture_data) tracer.verify_crc = Mock(return_value=True) tracer.get_result = Mock(return_value=Command(0x12, fixture_data)) self.ts = TracerSerial(tracer, None)
def start(): global project, tracer, inspector try: project = Project.from_directory(find_project_directory(os.getcwd())) execution = Execution(project) inspector = Inspector(execution) tracer = Tracer(inspector) tracer.btracer.setup() sys.settrace(tracer.tracer) except PythoscopeDirectoryMissing: print "Can't find .pythoscope/ directory for this project. " \ "Initialize the project with the '--init' option first. " \ "Pythoscope tracing disabled for this run."
def render(self, scene, camera, width, height, super_sampling=1, logging=True, gpu=False): """Renders a scene""" if gpu: tracer = Tracer_gpu() else: tracer = Tracer() self.__tiles = Queue() self.__rendered_tiles = Queue() self.__logging = logging # setup rendering threads if logging: print("render using {0} threads with {1}x{1} tiles".format( self.__threads, self.__tilesize)) workers = [] for i in range(self.__threads): thread = threading.Thread(target=self.__render_portion, args=(tracer, scene, camera, width, height, super_sampling)) thread.start() workers.append(thread) # split image into tiles -> threads will begin computing self.__total_tiles = 0 for y in range(0, height, self.__tilesize): for x in range(0, width, self.__tilesize): self.__tiles.put( (x, y, x + self.__tilesize, y + self.__tilesize)) self.__total_tiles += 1 # wait and stop workers for i in range(self.__threads): self.__tiles.put(None) for thread in workers: thread.join() # merge results image = {} while not self.__rendered_tiles.empty(): for tile in self.__rendered_tiles.get(): for k, v in tile.items(): image[k] = v return image
def solve(randsol, fitness, solver=None, budget=None, effort=1.5, fine_ops=True, str_trace=True): ##### PARAMETERS ### create a tracer object tr = Tracer(rs=randsol, tt=str_trace) ### associate wrapper and tracer tr.acquire_wrapper(wr) ### create an instance of the search operators ops = OPS(fitness, fine_ops, tr) ##### BUDGET AND EFFORT if not budget: # If budget is given, use that to determine the iteration # budget. This allows us to specify the number of iterations # directly, which is useful for fair benchmarking. Otherwise, # estimate genotype size and calculate budget. budget = int(avg_size_trace(tr)**effort) # print(" budget = ", budget) ##### SOLVER if solver in solver_name: #### import solver module solver_module = __import__(solver_name[solver]) #### get solver class Solver_Class = getattr( solver_module, solver) # assumes class name same as solver acronym #### create an instance of the solver search_algorithm = Solver_Class(ops, budget) #### run the solver (sol, fit) = search_algorithm.run() #### collect data solve.data = search_algorithm.data #### release wrapper tr.release_wrapper(wr) #### return solution return (sol.pheno, fit) else: print("solver %s not available!" % solver) print("solver = RS | HC | LA | EA | PS")
def test_sanity(self): class A(object): def __init__(self,x): self.x = x def foo(self,y): return self.x+y def bar(self): return 'bar!' tracer = Tracer(A(10),self.logger) x = tracer.foo(3) self.assertEqual(x,13) self.assertEqual(self.logger.lines,[ "foo called with a=(3,), kw={}", "foo returned 13", ]) x = tracer.bar() self.assertEqual(x,'bar!') self.assertEqual(self.logger.lines[2:],[ "bar called with a=(), kw={}", "bar returned bar!", ])
def load_on(): try: port = Serial('/dev/ttyAMA0', 9600, timeout=1) tracer = Tracer(0x16) t_ser = TracerSerial(tracer, port) t_ser.send_command(0xAA, 0x01, 0x01) #data = t_ser.receive_result(13) data = t_ser.receive_result() port.close() load_state = data.load_state return render_template('load_on.html', load_state=load_state) except (IndexError, IOError) as e: port.reset_input_buffer() port.reset_output_buffer() return jsonify({'error': str(e)}), 503
def get_data(): try: port = Serial('/dev/ttyAMA0', 9600, timeout=1) port.reset_input_buffer() port.reset_output_buffer() tracer = Tracer(0x16) t_ser = TracerSerial(tracer, port) t_ser.send_command(0xA0) #sleep(1) #data = t_ser.receive_result(36) data = t_ser.receive_result() port.close() # operating parameters batt_voltage = data.batt_voltage batt_full_voltage = data.batt_full_voltage batt_overdischarge_voltage = data.batt_overdischarge_voltage batt_temp = data.batt_temp pv_voltage = data.pv_voltage charge_current = data.charge_current load_on = data.load_on load_amps = data.load_amps load_overload = data.load_overload load_short = data.load_short batt_overdischarge = data.batt_overdischarge batt_full = data.batt_full batt_overload = data.batt_overload batt_charging = data.batt_charging return render_template('data.html', batt_voltage=batt_voltage, batt_full_voltage=batt_full_voltage, batt_overdischarge_voltage=batt_overdischarge_voltage, batt_temp=batt_temp, pv_voltage=pv_voltage, charge_current=charge_current, load_on=load_on, load_amps=load_amps, load_overload=load_overload, load_short=load_short, batt_overdischarge=batt_overdischarge, batt_full=batt_full, batt_overload=batt_overload, batt_charging=batt_charging) except (IndexError, IOError) as e: port.reset_input_buffer() port.reset_output_buffer() return jsonify({'error': str(e)}), 503
def Create(cls, plugin_config, **kwargs): tracing_config = plugin_config.TRACING_CONFIG span_decorator = kwargs.pop('span_decorator', None) service_name = plugin_config.TRACING_SERVICE_NAME validate = plugin_config.TRACING_VALIDATE config = Config(config=tracing_config, service_name=service_name, validate=validate) tracer = config.initialize_tracer() tracer_interceptor = open_tracing_server_interceptor( tracer, log_payloads=plugin_config.TRACING_LOG_PAYLOAD, span_decorator=span_decorator) return Tracer(tracer, tracer_interceptor, intercept_server)
def update_tracers(tracers, program_names): existing = set(tracers.keys()) running = set(p.pid for p in process_iter() if process_matches(p, program_names)) forbidden = set(t.gdb.process.pid for t in tracers.itervalues()).union(set([getpid()])) running -= forbidden new = running - existing finished = existing - running for pid in finished: del tracers[pid] for pid in new: try: tracers[pid] = Tracer(pid) except (NoSuchProcess, AccessDenied): pass
def get_data(): try: port = FakePort(fake) tracer = Tracer(0x16) t_ser = TracerSerial(tracer, port) t_ser.send_command(0xA0) #data = t_ser.receive_result(36) data = t_ser.receive_result() # operating parameters batt_voltage = data.batt_voltage batt_full_voltage = data.batt_full_voltage batt_overdischarge_voltage = data.batt_overdischarge_voltage batt_temp = data.batt_temp pv_voltage = data.pv_voltage charge_current = data.charge_current load_on = data.load_on load_amps = data.load_amps load_overload = data.load_overload load_short = data.load_short batt_overdischarge = data.batt_overdischarge batt_full = data.batt_full batt_overload = data.batt_overload batt_charging = data.batt_charging return render_template( 'data.html', batt_voltage=batt_voltage, batt_full_voltage=batt_full_voltage, batt_overdischarge_voltage=batt_overdischarge_voltage, batt_temp=batt_temp, pv_voltage=pv_voltage, charge_current=charge_current, load_on=load_on, load_amps=load_amps, load_overload=load_overload, load_short=load_short, batt_overdischarge=batt_overdischarge, batt_full=batt_full, batt_overload=batt_overload, batt_charging=batt_charging) except (IndexError, IOError) as e: return jsonify({'error': str(e)}), 503
intruder_type = intruder.NonFlocker FPR = np.zeros((3, 2)) TPR = np.zeros((3, 2)) for i in range(0, len(layouts)): layout = layouts[i] for j in range(0, len(intruders)): intruder_type = intruders[j] TPRs = [] FPRs = [] for k in range(0, 3): w = layout(width, height, good_count, bad_count, p_std, v_std, intruder_type) w.positions = [pygame.Vector2(0, 0)] * bird_count charter = Tracer(width, 10, 100, good_count, bad_count) for step in range(0, layout_time[i]): w.update(1) charter.track(w) P = charter.TP + charter.FN N = charter.TN + charter.FP FPRs.append(charter.FP / N) TPRs.append(charter.TP / P) TPR[i][j] = sum(TPRs) / len(TPRs) FPR[i][j] = sum(FPRs) / len(FPRs) print(i, j) for j in range(0, len(intruders)): print('\\multirow{2}{5em}{Nonflocker} & TPR &', end='') for i in range(0, len(layouts)):
], Camera(np.array([3, 2, 4]), np.array([-1, 0.5, 0]))) def create_bonus(): return Scene([ Plan(np.array([0, 1, 0]), 0, CheckSurface()), Sphere(MirrorSurface(), np.array([0, 1, -0.25]), 1), Sphere(RedSurface(), np.array([-1, 0.5, 1.5]), 0.5) ], [ Light(np.array([-2, 2.5, 0]), np.array([0.8, 0.8, 0.8])), Light(np.array([1.5, 2.5, 1.5]), np.array([0.8, 0.8, 0.8])) ], Camera(np.array([3, 2, 4]), np.array([-1, 0.5, 0]))) width = 250 height = 250 img = Image.new('RGB', (width, height), "black") pixels = img.load() start = time.time() tracer = Tracer(5) tracer.render(create_def(), pixels, width, height) end = time.time() print("Time spent : {}".format(end - start)) img.show() img.save("result.bmp")
def __init__(self, pos, R): Tracer.__init__(self, position=pos) self.R = R
print(sue.pay()) print(bob.name) print(bob.pay()) print([bob.fetches, sue.fetches]) ################################################## from tracer import Tracer # In module tracer.py @Tracer class MyList(list): pass # MyList = Tracer(MyList) x = MyList([1, 2, 3]) # will call Wrapper() x.append(4) # will call __getattr__, append x.wrapped WrapList = Tracer(list) # Handmade decoration x = WrapList([4, 5, 6]) x.append(7) x.wrapped ################################################## class Tracer: def __init__(self, aClass): # At the stage of decoration @ self.aClass = aClass # Uses attributes of an instance def __call__(self, *args, **kwargs): # At the stage of an instance creation self.wrapped = self.aClass(*args, **kwargs) # THE ONLY ONE (RECENT) INSTANCE FOR EACH CLASS! return self def __getattr__(self, attrname): print('Trace: ' + attrname) return getattr(self.wrapped, attrname)
def __init__(self, normal, h): Tracer.__init__(self) self.normal = normalize_tuple(normal) self.h = h
def setUp(self): self.t = Tracer(16)
def make_functions( self, template_env ): funcs = Tracer.make_functions( self, template_env ) for component in self.components: subfuncs = component.make_functions(template_env) funcs = dict( funcs.items() + subfuncs.items() ) return funcs
grid.addWidget( buttons, 4, 1 ) self.setLayout( grid ) @staticmethod def get_band_n_rate( oldband, oldrate, parent = None ) : popup = QTracerWindow.BandAndRateWindow( oldband, oldrate, parent ) accepted = popup.exec_() newband, newrate = popup.band_entry.text(), popup.rate_entry.text() if accepted == QtGui.QDialog.Accepted : try : if len( newband ) != 0 : int( newband ) float( newrate ) except ValueError : QtGui.QMessageBox.critical( parent, 'Error', 'Bad entry !' ) return accepted == QtGui.QDialog.Accepted, newband, newrate if __name__ == '__main__' : main_tracer = Tracer() main_tracer.run( show=False ) app = QtGui.QApplication( sys.argv ) main_window = QTracerWindow( main_tracer ) sys.exit( app.exec_() )
def __init__(self, origin, components): Tracer.__init__(self, position=origin) self.components = components self.unique_tracer_id = '_' + '_'.join([c.__class__.__name__ \ for c in self.components])
post_parser.add_argument('lng', type=float, help='No lng given') post_parser.add_argument('day', type=int, help='No Day given') post_parser.add_argument('month', type=int, help='No Month Given') post_parser.add_argument('year', type=int, help='No Year Given') post_parser.add_argument('list', type=str, help='No help given') post_parser.add_argument('covid', type=bool, help='No covid boolean given') resource_fields = { 'address': fields.String, 'name': fields.String, 'adjList': fields.String, 'lat': fields.Float, 'lng': fields.Float, 'covid': fields.Boolean } tracer = Tracer(len(User.query.all())) def sendWarning(ad): result = User.query.filter_by(address=ad).first() result.covid = True db.session.commit() class DataBase(Resource): @marshal_with(resource_fields) def get(self, a): result = User.query.filter_by(address=a).first() if not result: abort(404, message='Address does not exists') return result
def processAlgorithm(self, parameters, context, feedback): try: dir_path = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(0, dir_path) import porepy as pp from flow import Flow_Model1 from tracer import Tracer from fcts import read_network, bc_flag from math import ceil import numpy as np except Exception as e: feedback.reportError( QCoreApplication.translate('Error', '%s' % (e))) feedback.reportError(QCoreApplication.translate('Error', '')) feedback.reportError( QCoreApplication.translate( 'Error', 'Please install porepy dependencies')) return {} #Parameters layer = self.parameterAsLayer(parameters, self.Network, context) h = self.parameterAsDouble(parameters, self.boundSize, context) * pp.METER steps = self.parameterAsInt(parameters, self.steps, context) endv = self.parameterAsInt(parameters, self.end, context) * pp.SECOND tol = 1e-8 * pp.METER d = self.parameterAsInt(parameters, self.Direction, context) lP = parameters[self.lowPressure] * pp.PASCAL hP = parameters[self.highPressure] * pp.PASCAL mu = parameters[self.mu] * pp.PASCAL * pp.SECOND if d == 0: direction = "left_to_right" elif d == 1: direction = "right_to_left" elif d == 2: direction = "bottom_to_top" else: direction = "top_to_bottom" if lP > hP: feedback.reportError( QCoreApplication.translate( 'Error', 'Low pressure value is higher than high pressure value.')) return {} params = {'INPUT': layer, 'OUTPUT': 'memory:'} explode = st.run("native:explodelines", params, context=context, feedback=feedback) layer2 = explode['OUTPUT'] #Create new field to fracture line newFields = [ 'ID', 'Pressure', 'Flux', 'Azimuth', 'Tracer', 'StartTime', 'EndTime' ] if layer.fields().indexFromName('Transmisiv') == -1: feedback.reportError( QCoreApplication.translate( 'Error', 'Please calculate the transmissivity using the Aperture tool or define a new transmissivity field labelled "Transmisiv" in mD.m' )) return {} fields = QgsFields() for field in layer.fields(): if field.name() not in newFields: fields.append(QgsField(field.name(), field.type())) for field in newFields[:-2]: fields.append(QgsField(field, QVariant.Double)) fields.append(QgsField('StartTime', QVariant.DateTime)) fields.append(QgsField('EndTime', QVariant.DateTime)) (writer, dest_id) = self.parameterAsSink(parameters, self.outLine, context, fields, QgsWkbTypes.LineString, layer.sourceCrs()) #Define fracture geometries outDir = os.path.join(tempfile.gettempdir(), 'PorePy') if not os.path.exists(outDir): os.mkdir(outDir) fname = ''.join( random.choice(string.ascii_lowercase) for i in range(10)) outName = os.path.join(outDir, '%s.txt' % (fname)) k, l = {}, { } #k is the pereambility in m2, l is the fracture length in m data = {} P = 1000000 #Tolerance for the point precision feedback.pushInfo( QCoreApplication.translate('Info', 'Reading Fracture Network')) field_check = layer2.fields().indexFromName('origLen') if field_check != -1: feedback.reportError( QCoreApplication.translate( 'Info', 'Warning: Applying the origLen field to calculate fracture length' )) W = False with open(outName, 'w') as f: f.write('ID,startx,starty,endx,endy') f.write('\n') for enum, feature in enumerate(layer2.getFeatures()): try: geom = feature.geometry().asPolyline() except Exception: geom = feature.geometry().asMultiPolyline()[0] start, end = geom[0], geom[-1] startx, endx = ceil(start.x() * P) / P, ceil(end.x() * P) / P starty, endy = ceil(start.y() * P) / P, ceil(end.y() * P) / P t = feature['Transmisiv'] if t == 0: W = True if field_check != -1: lValue = feature['origLen'] if type(lValue) != float: feedback.reportError( QCoreApplication.translate( 'Info', 'Warning: origLen field contains non-float values' )) return {} l[feature.id()] = lValue / feature.geometry().length() else: l[feature.id()] = feature.geometry().length() if type(t) != float: feedback.reportError( QCoreApplication.translate( 'Info', 'Warning: Transmisivity field contains non-float values' )) return {} k[feature.id()] = t * pp.MILLIDARCY * pp.METER row = '%s,%s,%s,%s,%s' % (feature.id(), startx, starty, endx, endy) f.write(row) #ID,startx,starty,endx,endy f.write('\n') rows = [] for field in layer.fields(): if field.name() not in newFields: rows.append(feature[field.name()]) data[feature.id()] = rows if len(data) == 0: feedback.reportError( QCoreApplication.translate( 'Info', 'No fractures found in the input dataset')) return {} elif enum > 2000: feedback.reportError( QCoreApplication.translate( 'Info', 'Warning - Fracture network exceeds 2000 branches. To improve performance consider subsampling and/or simplifying the Fracture Network using the "Simplify Network" tool.' )) if W: feedback.reportError( QCoreApplication.translate( 'Info', 'Warning - Transmisivity value(s) of 0 in the fracture network will not produce flow' )) network, mask, pts_shift = read_network(outName, tol=tol) mesh_args = { "mesh_size_frac": h, "mesh_size_bound": h, 'file_name': outDir } feedback.pushInfo( QCoreApplication.translate('Info', 'Creating Mesh from %s' % (network))) try: gb = network.mesh( mesh_args, dfn=True, tol=tol, ) except Exception as e: feedback.reportError(QCoreApplication.translate('Info', str(e))) feedback.reportError(QCoreApplication.translate('Info', '')) feedback.reportError( QCoreApplication.translate( 'Info', 'Failure creating the fracture network mesh. Please check that NetworkGT was properly configured to use gmsh meshing according to the installation guidelines' )) flow = Flow_Model1(gb) param_flow = { "tol": tol, "k": np.array([k[m] for m in mask]) / mu, "length_ratio": np.array([l[m] for m in mask]), "flow_direction": direction, "low_value": lP, "high_value": hP, "north": np.array([0, 1, 0]) } param_tracer = { "tol": tol, "num_steps": steps, "end_time": endv, "flow_direction": direction, "low_value": lP, "high_value": hP } flow.set_data(param_flow, bc_flag) feedback.pushInfo( QCoreApplication.translate('Info', 'Solving Fluid Flow')) flow.solve() # get the results for qgis if steps > 1: feedback.pushInfo( QCoreApplication.translate('Info', 'Solving Tracer')) tracer = Tracer(gb) tracer.set_data(param_tracer, bc_flag) tracer.solve() feedback.pushInfo( QCoreApplication.translate('Info', 'Creating Feature Layer')) fet = QgsFeature() for g, d in gb: # avoid to consider the 0d grids if g.dim == 0: continue # get the data for the current grid p = d[pp.STATE][flow.pressure] norm_flux = d[pp.STATE][flow.norm_flux] azimuth = d[pp.STATE][flow.azimuth] # get the cell to nodes map cell_nodes = g.cell_nodes() indptr = cell_nodes.indptr indices = cell_nodes.indices # all the data that need to be exported are given as cell_SOMETHING for c in np.arange(g.num_cells): nodes_loc = indices[indptr[c]:indptr[c + 1]] # each column gives a node for the segment # NOTE: the nodes are translated compared to the original network pnt = g.nodes[:2, nodes_loc] + pts_shift # value of the computed fields cell_pressure = p[c] # flux norm cell_norm_flux = norm_flux[c] # the fracture id and data cell_frac_id = mask[g.frac_num] rows = data[cell_frac_id].copy() # value of the azimuth # NOTE: velocity zero gives nan as azimuth angle cell_azimuth = math.degrees(azimuth[c]) if cell_norm_flux == 0: rows.extend([ float(cell_frac_id), float(cell_pressure), float(cell_norm_flux), NULL ]) else: #cell_azimuth %= 360 rows.extend([ float(cell_frac_id), float(cell_pressure), float(cell_norm_flux), float(cell_azimuth) ]) points = [ QgsPointXY(pnt[0][0], pnt[1][0]), QgsPointXY(pnt[0][1], pnt[1][1]) ] geom = QgsGeometry.fromPolylineXY(points) fet.setGeometry(geom) if steps > 1: time = datetime.datetime(1, 1, 1, 0, 0, 0) deltaTime = datetime.timedelta(seconds=endv / steps) for time_step, current_time in enumerate(tracer.all_time): var_name = tracer.variable + "_" + str(time_step) tr = d[pp.STATE][var_name] cell_tracer = tr[c] newRows = rows.copy() newRows.append(float(round(cell_tracer, 6))) newRows.append(str(time)) time += deltaTime newRows.append(str(time)) fet.setAttributes(newRows) writer.addFeature(fet, QgsFeatureSink.FastInsert) else: fet.setAttributes(rows) writer.addFeature(fet, QgsFeatureSink.FastInsert) try: os.remove(outName) #Delete temp csv file except Exception: pass return {self.outLine: dest_id}
def __init__( self, pos = (0, 0, 0) ): Tracer.__init__(self, position=pos)
def __init__(self, eq, center=(0, 0, 0), scale=1.0, bndR=None, max_itr=1500, precision=0.001, self_intersection=True): Tracer.__init__(self, position=center, scaling=scale) self.unique_tracer_id = str(id(self)) self.center = tuple(center) self.no_self_intersection = not self_intersection self.precision = precision self.max_itr = max_itr self.bndR = bndR import sympy import sympy.core.numbers xyz = sympy.symbols('x y z') t = sympy.symbols('t') ray = sympy.symbols('ray_x ray_y ray_z') origin = sympy.symbols('origin_x origin_y origin_z') pos = sympy.symbols('pos.x pos.y pos.z') self.eq = sympy.sympify(eq) self.ray_paramd = self.eq.subs([ (xyz[i], ray[i]*t + origin[i]) \ for i in range(3) ]) pos_eq = self.eq.subs([(xyz[i], pos[i]) for i in range(3)]) self.gradient = [sympy.diff(pos_eq, pos[i]) for i in range(3)] self.derivative = sympy.diff(self.ray_paramd, t) # Must replace some expressions to make them OpenCL class Printer(sympy.printing.str.StrPrinter): def _print_Pow(self, expr): return ImplicitSurface.print_pow(expr) old_ptr = sympy.Basic.__str__ sympy.Basic.__str__ = Printer().doprint # Print as an interval arithmetic macro expression class IAPrinter(sympy.printing.str.StrPrinter): def _print_Float(self, expr): return "%gf" % expr def _print_Pow(self, expr): base = expr.args[0] exponent = expr.args[1] return "ia_pow%d(%s)" % (int(exponent), base) def _print_mul_rec(self, args): a = args[0] b = args[1] if b.is_number: a, b = b, a if len(args) > 2: bstr = self._print_mul_rec(args[1:]) else: bstr = str(b) if a.is_number: if b.is_number: return "((%s)*(%s))" % (a, b) if a >= 0: return "ia_mul_pos_exact(%s,%s)" % (bstr, a) else: return "ia_mul_neg_exact(%s,%s)" % (bstr, a) else: return "ia_mul(%s,%s)" % (a, bstr) def _print_Mul(self, expr): return self._print_mul_rec(expr.args) sympy.Basic.__str__ = lambda self: IAPrinter().doprint(self) self.f_code = self.compute_f_code() self.df_code = self.compute_df_code() sympy.Basic.__str__ = lambda self: Printer().doprint(self) self.gradient_code = [str(self.gradient[i]) for i in range(3)] sympy.Basic.__str__ = old_ptr
def accept(self, event=None): self.accepted = True self.close() @staticmethod def get_band_n_rate(oldband, oldrate, parent=None): popup = TkTracerWindow.BandAndRateWindow(oldband, oldrate, parent) popup.window.grab_set() popup.window.wait_window(popup.window) if popup.accepted: try: if len(popup.band_value) != 0: int(popup.band_value) float(popup.rate_value) except ValueError: messagebox.showerror('Error', 'Bad entry !') return popup.accepted, popup.band_value, popup.rate_value if __name__ == '__main__': main_tracer = Tracer() main_tracer.run(show=False) app = tk.Tk() main_window = TkTracerWindow(app, main_tracer) app.mainloop()
from flask import Flask, jsonify from time import sleep from serial import Serial import sys sys.path.append('/app/tracer/python') from tracer import Tracer, TracerSerial, QueryCommand port = Serial('/dev/ttyAMA0', 9600, timeout=1) port.flushInput() port.flushOutput() tracer = Tracer(0x16) t_ser = TracerSerial(tracer, port) query = QueryCommand() # Rest API app = Flask(__name__) @app.route('/', methods=['GET']) def get_data(): try: t_ser.send_command(query) data = t_ser.receive_result() return jsonify(batt_voltage=data.batt_voltage, pv_voltage=data.pv_voltage, charge_current=data.charge_current, load_amps=data.load_amps) except (IndexError, IOError) as e: port.flushInput()
good_count, bad_count, 1.0, 0.01, intruder.NonFlocker, y_max_distance=1000 + (200 * 9)) # w = layouts.Formation(width, height, 100) # w = layouts.EmptyWorld(width, height, 0) w.positions = [pygame.Vector2(0, 0)] * bird_count w.equilibrium = [0.0] * 100 screen = pygame.display.set_mode((width, height + data_height)) data_screen = screen.subsurface(pygame.Rect(0, height, width, data_height)) charter = Tracer(width, 200, 100, good_count, bad_count) pygame.init() bird_width = 40 fixed_wing_img = pygame.transform.scale( pygame.image.load('images/fixed_wing.png'), (bird_width, bird_width)) bad_img = pygame.transform.scale(pygame.image.load('images/bad.png'), (bird_width, bird_width)) target_img = pygame.transform.scale(pygame.image.load('images/flag.png'), (20, 20)) font = pygame.font.SysFont(None, 72) def main(): iteration_count = 0