def __init__(self, name="default", send_to_trio=None, receive_from_trio=None): self.api_thread = None self.status = {} self.in_flight_events = {} self._set_frequencies = False self._set_tx_power = False self._set_bandwidth = False self._set_geo_region = False self._settings = goTenna.settings.GoTennaSettings( rf_settings=goTenna.settings.RFSettings(), geo_settings=goTenna.settings.GeoSettings(), ) self._do_encryption = True self._awaiting_disconnect_after_fw_update = [False] self.gid = (None, ) self.geo_region = None self.events = events.Events(send_to_trio, receive_from_trio) self.gateway = 0 self.handle_message_thread = threading.Thread( target=messages.handle_message, args=[self, self.events.msg]) self.jumbo_thread = threading.Thread() self.cli = False self.bytes_sent = 0 self.bytes_received = 0 self.name = name
def __init__(self, parent=None, name=None, grand=None, fl=0): QFrame.__init__(self, parent) self.grand = grand if not name: self.setName("PrinterWidget") self.setPaletteBackgroundColor(QColor(255, 255, 255)) #This two lines are to get KIcons empty = KCModule(self, "empty") empty.setGeometry(-20, -20, 5, 5) self.p = QPainter(self) self.languageChange() self.Events = events.Events() self.connection = self.Events.connection self.buttonlist = [] self.refreshPrinters() self.resize( QSize(380, self.height()).expandedTo(self.minimumSizeHint())) self.clearWState(Qt.WState_Polished)
def onModuleLoad(self): self.tabs = TabPanel() tab_overview = TabContainer() self.tabs.add(tab_overview, 'Overview') self.tab_events = TabRaphaelContainer() self.tab_events.set_headline('Events Example') self.tab_events.set_raphael(events.Events(width=600, height=300)) self.tab_events.set_status('Execute events on Raphael Elemnts') self.tabs.add(self.tab_events, 'Events') self.tab_graffle = TabRaphaelContainer() self.tab_graffle.set_headline( 'This is a simple example of the Raphael Graffle') self.tab_graffle.set_raphael(graffle.Graffle(width=600, height=300)) self.tabs.add(self.tab_graffle, 'Graffle') self.tab_spinner = TabRaphaelContainer() self.tab_spinner.set_headline('This Raphael Spinner Example') self.tab_spinner.set_raphael(spinner.Spinner(width=600, height=300)) self.tabs.add(self.tab_spinner, 'Spinner') self.tabs.selectTab(0) self.tabs.setWidth("100%") self.tabs.setHeight("100%") RootPanel().add(self.tabs)
def process(mfloat_path, mfloat, begin, end): absFilePath = os.path.abspath(__file__) scriptpath, scriptfilename = os.path.split(absFilePath) statisticspath = os.path.join(scriptpath, "statistics") if not os.path.exists(statisticspath): os.mkdir(statisticspath) # Concatenate LOG and BIN files that need it utils.concatenate_files(mfloat_path) # Decrypt all BIN files decrypt.decrypt_all(mfloat_path) # Build list of all mermaid events recorded by the float mevents = events.Events(mfloat_path) # Build list of all profiles recorded ms41s = profile.Profiles(mfloat_path) # Process data for each dive mdives = dives.get_dives(mfloat_path, mevents, ms41s) # Compute files for each dive for dive in mdives: # Create the directory if not os.path.exists(dive.export_path): os.mkdir(dive.export_path) # Generate log dive.generate_datetime_log() # Generate mermaid environment file dive.generate_mermaid_environment_file() # Generate S41 params file dive.generate_s41_environment_file() # Generate dive plot dive.generate_dive_plotly(generate_csv_file) # Compute clock drift correction for each event for dive in mdives: dive.correct_events_clock_drift() # Compute location of mermaid float for each event (because the station is moving) # the algorithm use gps information in the next dive to estimate surface drift i = 0 while i < len(mdives) - 1: mdives[i].compute_events_station_location(mdives[i + 1]) i += 1 # Generate plot and sac files for dive in mdives: dive.generate_events_plot() dive.generate_profile_plotly(generate_csv_file) #if events_plotly: #dive.generate_events_plotly() #dive.generate_events_sac() #dive.generate_statistics(statisticspath) # Plot vital data kml.generate(mfloat_path, mfloat, mdives) vitals.plot_battery_voltage(mfloat_path, mfloat + ".vit", begin, end) vitals.plot_internal_pressure(mfloat_path, mfloat + ".vit", begin, end) vitals.plot_pressure_offset(mfloat_path, mfloat + ".vit", begin, end) return (mdives)
def convertEvents(self): print("Creating events...") savefile = Config().getSaveData() parser = Config().getParser() hoi4path = Config().getHoi4Path() self.events = events.Events(self.universe) self.events.makeEvents()
def __init__(self): self.jobs = JOBS.Jobs() self.current_day = None # TODO: Necessary? # self.opening_menu = MENU.OpeningMenu() #Character('random') creates charecter on start menu becasue #its an error. self.character = None self.locations = LOCATIONS.Locations() self.events = EVENTS.Events() self.notices = Notices()
def __init__(self, x,y, current_name):# Где начался бой pygame.init() self.w_event = events.Events() self.fraction = attacker # першими ходять атакуючі battle_cell=core.load_cell(x,y,current_name) self.b_map_name = battle_cell[2] self.battle = battle.Battle() self.resources = resources.Resources() self.file = current_name self.indent_x=100 self.indent_y=50
def __init__(self, parent): super().__init__(parent) self.parent = parent self.parent.title('Terminarz') self.parent.iconphoto(self.parent, tk.PhotoImage(file='icon.png')) self.grid() self.events = events.Events() self.parent.protocol('WM_DELETE_WINDOW', self.close) self.addWindow = None self.modifyWindow = None self.prevColumn = '' self.prevPrevColumn = '' self.gui()
def __init__(self,grand,parent = None,name = None,modal = 0,fl = 0): QDialog.__init__(self,parent,name,modal,fl) if not name: self.setName("cupsDialog") self.Events = events.Events() self.grand = grand self.applybutton = QPushButton(self,"applybutton") self.applybutton.setGeometry(QRect(220,220,90,30)) self.cancelbutton = QPushButton(self,"cancelbutton") self.cancelbutton.setGeometry(QRect(320,220,90,30)) self.okbutton = QPushButton(self,"okbutton") self.okbutton.setGeometry(QRect(120,220,90,30)) self.cupsbox = QGroupBox(self,"cupsbox") self.cupsbox.setGeometry(QRect(20,20,400,190)) self.remoteany = QCheckBox(self.cupsbox,"remoteany") self.remoteany.setGeometry(QRect(20,20,371,31)) self.shareprinters = QCheckBox(self.cupsbox,"shareprinters") self.shareprinters.setGeometry(QRect(20,50,371,31)) self.remoteadmin = QCheckBox(self.cupsbox,"remoteadmin") self.remoteadmin.setGeometry(QRect(20,80,371,31)) self.usercancel = QCheckBox(self.cupsbox,"usercancel") self.usercancel.setGeometry(QRect(20,110,371,31)) self.debuglog = QCheckBox(self.cupsbox,"debuglog") self.debuglog.setGeometry(QRect(20,140,371,31)) self.languageChange() self.loadServerSettings() self.resize(QSize(436,272).expandedTo(self.minimumSizeHint())) self.clearWState(Qt.WState_Polished) self.connect(self.applybutton, SIGNAL("clicked()"), self.applyServerSettings) self.connect(self.okbutton, SIGNAL("clicked()"), self.applyServerSettings) self.connect(self.okbutton, SIGNAL("clicked()"), self, SLOT("close()")) self.connect(self.cancelbutton, SIGNAL("clicked()"), self, SLOT("close()"))
def invert_main(): mer_file_path = "tool_invert_mer/" mevents = events.Events(mer_file_path) for event in mevents.events: print event.file_name with open(mer_file_path + event.file_name, 'r') as f: content = f.read() environment = re.findall("<ENVIRONMENT>.+</PARAMETERS>", content, re.DOTALL)[0] event.set_environment(environment) event.find_measured_sampling_frequency() event.correct_date() event.invert_transform() event.plotly(mer_file_path) event.plot(mer_file_path) event.to_sac_and_mseed(mer_file_path, station_number="00", force_without_loc=True)
def __init__(self): super(Create, self).__init__() self.events = events.Events() self.set_name('MyWindow') self.set_resizable(False) self.menu = menu.Menu() self.menu.set_window(self) self.menu.create_menu() self.settings = settings.Handler() self.settings.reloaded() self.set_decorated(False) self.set_position(1) self.set_title(globals.APP_NAME) self.set_border_width(0) self.set_opacity(0) #self.resize(800,600) #self.fullscreen() table = Gtk.Table(3, 6, False) self.new_desktop_list = desktops.Create().get_desktops() # Create a new notebook, place the position of the tabs self.notebook = Gtk.Notebook() self.notebook.set_show_border(False) self.notebook.set_tab_pos(2) self.add_desktops() table.attach(self.notebook, 0, 6, 0, 1) transparency.Transparent.makeTransparent(table) self.add(table) self.connect("destroy", Gtk.main_quit) self.connect("button_press_event", self.events.menu_popup) self.connect("visibility_notify_event", self.events.window_visibility_event) self.connect("key-release-event", self.events.on_key_release) #self.show_menu() self.show_all()
def main(): # 创建队列和世界实例 q_instance = queue.Queue() world = World() # 创建snake和food和event实例, 并以守护线程运行 snake = Snake(world.canvas, q_instance) snake.setDaemon(True) snake.start() food = Food(world.canvas, q_instance) food.setDaemon(True) food.start() event = events.Events(q_instance, snake, food, world.canvas) event.setDaemon(True) event.start() # 启动tkinter主循环 tkinter.mainloop()
def main(): # Set working directory in "scripts" if "scripts" in os.listdir("."): os.chdir("scripts") # Set the path for the float datapath = "../server/" object_json_array = list() # Search Mermaid floats mfloats = [p.split("/")[-1][:-4] for p in glob.glob(datapath)] for mfloat in mfloats: # Build list of all mermaid events recorded by the float mevents = events.Events(datapath) # Process data for each dive mdives = dives.get_dives(datapath, mevents) # Compute files for each dive for dive in mdives: object_json_array.append(dive.generateJSON()) #listbox = Listbox(root, listvariable=choices, selectmode="multiple") print json.dumps(object_json_array, indent=4)
def main_script(self): eve = events.Events() print("start main script") while True: if self.timer >= settings.timeout: print("Time out, restart game!") self.start_game() screen.screen_monitor() event_num = eve.find_case_num(path=self.path2) if event_num == 1: if self.pre_event_num != 1: self.timer = 0 self.start_game() elif event_num == 2: if self.pre_event_num != 2: self.timer = 0 self.end_game() elif event_num == 3: if self.pre_event_num != 3: self.timer = 0 self.end_game() self.pre_event_num = self.event_num time.sleep(1) self.timer += 1
def getEvents(self): eventsObj = events.Events(self.workingDirectory) forceThresholdNewton = get_force_threshold_in_newton( self.extra_settings) forceThresholdNormalised = forceThresholdNewton / \ float(self.subjectMetadata["bodyWeight"]) # X Newton / BW kg eventData = eventsObj.calculateEvents(forceThresholdNormalised)[0] eventLabels = eventsObj.calculateEvents(forceThresholdNormalised)[1] maxNumEvents = 0 maxEventList = [] for measurementName in self.measurementNames: if measurementName in eventLabels.keys(): numEvents = len(eventLabels[measurementName]) if numEvents > maxNumEvents: maxEventList = eventLabels[measurementName] eventsDict = {} for label in maxEventList: set = "" if label.lower().startswith("l"): set = "left" else: set = "right" eventsDict[label] = { "id": label, "set": set, "data": qtools.setEventData(eventData, self.measurementNames, label) } ev = qtools.loadEvents(maxEventList, eventsDict) return ev
def convertEvents(self): print("Creating events...") self.events = events.Events(self.savefile, self.hoi4path, self.parser, self.universe) self.events.makeEvents()
def main(): # Set the path for the float mfloat_path = "../processed/" + mfloat + "/" # Get float number mfloat_nb = re.findall("(\d+)$", mfloat)[0] # Copy appropriate files in the directory for f in glob.glob("../processed/"+mfloat+"/*/*.LOG.h"): shutil.copy(f, mfloat_path) for f in glob.glob("../processed/"+mfloat+"/*/*.MER.env"): shutil.copy(f, mfloat_path) for f in glob.glob("../processed/"+mfloat+"/*.LOG.h"): shutil.move(f, f[0:len(f)-2]) for f in glob.glob("../processed/"+mfloat+"/*.MER.env"): shutil.move(f, "../processed/"+mfloat+"/"+f[len(f)-21:len(f)-4]) # Build list of all mermaid events recorded by the float mevents = events.Events(mfloat_path) # Process data for each dive mdives = dives.get_dives(mfloat_path, mevents) # Filter dives between begin and end date for fd in filterDate: fname = fd[0] begin = fd[1] end = fd[2] if fname == mfloat: mdives = [dive for dive in mdives if begin <= dive.date <= end] # Software version print "" print "Software version" for dive in mdives: if dive.is_init: formatted_log = dive.log_content #utils.format_log(dive.log_content) print re.findall(".+soft.+", formatted_log)[0] # Find errors and warnings print "" print "List of errors" for dive in mdives: if dive.is_complete_dive: formatted_log = dive.log_content #utils.format_log(dive.log_content) for err in re.findall(".+<ERR>.+", formatted_log): print err print "" print "List of warnings" for dive in mdives: if dive.is_complete_dive: formatted_log = dive.log_content #utils.format_log(dive.log_content) for wrn in re.findall(".+<WRN>.+", formatted_log): print wrn # Synchronisations GPS print "" print "Synchronisations GPS" pps_detect_list = list() gpsack_list = list() gpsoff_list = list() position_list = list() for dive in mdives: if dive.is_complete_dive: formatted_log = dive.log_content #utils.format_log(dive.log_content) pps_detect_list += re.findall(".+PPS.+", formatted_log) gpsack_list += re.findall(".+GPSACK.+", formatted_log) gpsoff_list += re.findall(".+GPSOFF.+", formatted_log) position_list += re.findall(".+N\d+deg\d+\.\d+mn, E\d+deg\d+\.\d+mn.+", formatted_log) if len(pps_detect_list) != len(gpsack_list) and len(gpsack_list) != len(gpsoff_list) \ and len(gpsoff_list) != len(position_list): print "LENGTH ERROR !!!!" else: for pps_detect in pps_detect_list: print pps_detect for gpsack in gpsack_list: print gpsack for gpsoff in gpsoff_list: print gpsoff for position in position_list: print position # Get dive number dive_nb = 0 for dive in mdives: if dive.is_complete_dive: dive_nb += 1 # Temps de pompe pour le bladder full print "" print "Temps de pompe pour le bladder full (s):" temps_bladder_full = list() for dive in mdives: if dive.is_complete_dive: start_filling_date = utils.find_timestampedUTC_values("filling external bladder", dive.log_content)[0][1] bladder_full_date = utils.find_timestampedUTC_values("external bladder full", dive.log_content)[0][1] bdf_time = int(UTCDateTime(bladder_full_date) - UTCDateTime(start_filling_date)) temps_bladder_full.append(bdf_time) for bdft in temps_bladder_full: print bdft temps_bladder_full_moyen = int(float(sum(temps_bladder_full)) / dive_nb) print "Temps moyen (s): " + str(temps_bladder_full_moyen) print "Temps moyen (h:min:s): 00:" + str(temps_bladder_full_moyen/60) + ":" + str(temps_bladder_full_moyen % 60) # Consommation de la pompe pendant le bladder full print "" print "Consommation de la pompe pendant le bladder full (amperes):" amp_val_list = list() for dive in mdives: if dive.is_complete_dive: start_filling_date = utils.find_timestampedUTC_values("filling external bladder", dive.log_content)[0][1] bladder_full_date = utils.find_timestampedUTC_values("external bladder full", dive.log_content)[0][1] bladder_full_power = utils.find_timestampedUTC_values("battery.+", dive.log_content) max_pwr = "Aucune mesure dans " + dive.log_name max_amp = 0 for bfp in bladder_full_power: bfp_date = bfp[1] if bladder_full_date > bfp_date > start_filling_date: amp_val = int(re.findall("(\d+)uA", bfp[0])[0]) if amp_val > max_amp: # On cherche la valeur la plus elevee de la plongee max_amp = amp_val max_pwr = str(bfp_date) + ": " + str(round(float(amp_val) / 1000000., 2)) + "A" # Pour chaque plongee on affiche la valeur de courant max et on enrigstre sa valeur dans une liste print max_pwr amp_val_list += [max_amp] print "Consommation moyenne: " + str(round(float(sum(amp_val_list)) / len(amp_val_list) / 1000000., 2)) + "A" # Temps de bypass print "" print "Temps de bypass (s):" temps_bypass = [] nb_ouverture_secondaire_bypass = [] for dive in mdives: if dive.is_complete_dive: bypass_all_str = re.findall("BYPASS,\d+\]opening (\d+)", dive.log_content) if len(bypass_all_str) == 0: break bypass_first = int(bypass_all_str[0]) bypass_second = [int(x) for x in bypass_all_str[1:]] temps_bypass += [bypass_first + sum(bypass_second)] nb_ouverture_secondaire_bypass += [len(bypass_second)] for tb in temps_bypass: print tb / 1000 print "Nombre d'ouvreture secondaires: " + str(nb_ouverture_secondaire_bypass) # print "Temps total (s): " + str(sum(temps_bypass)/1000) temps_bypass_moyen = int(float(sum(temps_bypass)) / dive_nb)/1000 print "Temps moyen (s): " + str(temps_bypass_moyen) # Rapport (temps pour le bladder full) / (temps de bypass) print "" print "Rapport (temps pour le bladder full) / (temps de bypass):" print str(round(float(temps_bladder_full_moyen) / float(temps_bypass_moyen), 1)) # Temps de pompe en plongee print "" print "Temps de pompe en plongee (s):" temps_pompe = [] for dive in mdives: if dive.is_complete_dive: start_filling_date = utils.find_timestampedUTC_values("filling external bladder", dive.log_content)[0][1] temps_pompe_timestamp_str = utils.find_timestampedUTC_values("PUMP ,\d+\]during (\d+)", dive.log_content) liste_activation_pompe = [int(tp[0]) for tp in temps_pompe_timestamp_str if tp[1] < start_filling_date] temps_total_pompe_par_plongee = sum(liste_activation_pompe) temps_pompe += [temps_total_pompe_par_plongee] # print "Temps total (s): " + str(sum(temps_pompe)/1000) for tp in temps_pompe: print round(float(tp) / 1000, 3) temps_pompe_moyen = int(float(sum(temps_pompe)) / dive_nb)/1000 print "Temps moyen (s): " + str(temps_pompe_moyen) # Temps de valve print "" print "Temps de valve (s):" temps_valve = [] for dive in mdives: if dive.is_complete_dive: temps_valve_str = re.findall("VALVE ,\d+\]opening for (\d+)", dive.log_content) liste_activation_valve = [int(tv) for tv in temps_valve_str] temps_total_valve_par_plongee = sum(liste_activation_valve) temps_valve += [temps_total_valve_par_plongee] # print "Temps total (ms): " + str(sum(temps_valve)) for tv in temps_valve: print round(float(tv) / 1000, 3) temps_valve_moyen = float(sum(temps_valve)) / dive_nb / 1000 print "Temps moyen (s): " + str(round(temps_valve_moyen, 3)) # Rapport (temps pour le bladder full) / (temps de bypass) print "" print "Rapport (temps de pompe en plongee) / (temps de valve):" print str(round(float(temps_pompe_moyen) / float(temps_valve_moyen), 1)) # Clean directories for f in glob.glob(mfloat_path + "/" + mfloat + "*"): os.remove(f) for f in glob.glob(mfloat_path + "/" + mfloat_nb + "_*.LOG"): os.remove(f) for f in glob.glob(mfloat_path + "/" + mfloat_nb + "_*.MER"): os.remove(f)
def main(): # Set working directory in "scripts" os.chdir(os.path.join(automaid_path, "scripts", "")) # Create processed directory if it doesn't exist if not os.path.exists(processed_path): os.mkdir(processed_path) # Search MERMAID floats vitfile_path = os.path.join(server_path, "*.vit") mfloats = [p.split("/")[-1][:-4] for p in glob.glob(vitfile_path)] # Initialize empty dict to hold the instance of every last complete dive for # every MERMAID lastdive = dict() # For each MERMAID float for mfloat in sorted(mfloats): print("Processing {:s} .LOG & .MER files...".format(mfloat)) # Set the path for the float mfloat_path = os.path.join(processed_path, mfloat, "") # Get float number mfloat_nb = re.findall("(\d+)$", mfloat)[0] # Delete the directory if the redo flag is true if redo and os.path.exists(mfloat_path): shutil.rmtree(mfloat_path) # Create directory for the float if not os.path.exists(mfloat_path): os.mkdir(mfloat_path) # Remove existing files in the processed directory (the script may have been previously # executed, copied the files, then failed) for f in glob.glob(mfloat_path + "*.*"): os.remove(f) # Copy appropriate files in the directory and remove files outside of the time range files_to_copy = list() extensions = ["000", "001", "002", "003", "004", "005", "LOG", "MER"] for extension in extensions: files_to_copy += glob.glob(os.path.join(server_path, mfloat_nb + "*." + extension)) # Add .cmd, .out, and .vit files files_to_copy += glob.glob(os.path.join(server_path, mfloat + "*")) # Copy files for f in files_to_copy: shutil.copy(f, mfloat_path) # Really: collect all the .MER files (next we correlate their environments to .LOG files) print(" ...compiling a list of events from {:s} .MER files (GPS & seismic data)..." \ .format(mfloat)) mevents = events.Events(mfloat_path) # Determine the time range of analysis (generally; birth to death of a MERMAID) if mfloat in filterDate.keys(): begin = filterDate[mfloat][0] end = filterDate[mfloat][1] else: begin = datetime.datetime(1000, 1, 1) end = datetime.datetime(3000, 1, 1) # Collect all the .LOG files in order (generally 1 .LOG == 1 Dive) # Later we will combine multiple .LOG files in cases when they are fragmented # .LOG files can fragment due to ERR, EMERGENCY, REBOOT etc. # A fragmented .LOG is one that does not include a complete dive # A single-.LOG complete dive starts with '[DIVING]' and ends with '[SURFIN]' # A multiple-.LOG complete dive is a concatenation of fragmented dives # (i.e., a multi-.LOG complete dive may not actually contain a dive at all) # Therefore, concatenate all fragmented .LOG in-between single-LOG complete dives print(" ...matching those events to {:s} .LOG ('dive') files (GPS & dive metadata)..." \ .format(mfloat)) dive_logs = dives.get_dives(mfloat_path, mevents, begin, end) # Verify dive logs are sorted as expected if dive_logs!= sorted(dive_logs, key=lambda x: x.start_date): raise ValueError('`dive_logs` (single .LOG files) improperly sorted') # Verify events sublists are sorted as expected events_list = [e for d in dive_logs for e in d.events] if events_list != sorted(events_list, key=lambda x: x.corrected_starttime): raise ValueError('`dive_logs[*].events` improperly sorted') fragmented_dive = list() complete_dives = list() for i, dive_log in enumerate(dive_logs): # Attach reference to previous / next .LOG and .MER files prev_dive = dive_logs[i-1] if i > 0 else None next_dive = dive_logs[i+1] if i < len(dive_logs)-1 else None dive_log.attach_prev_next_dive(prev_dive, next_dive) # Create the directory if not os.path.exists(dive_log.processed_path): os.mkdir(dive_log.processed_path) # Reformat and write .LOG in individual dive directory dive_log.generate_datetime_log() # Write .MER environment in individual directories dive_log.generate_mermaid_environment_file() # Generate dive plot dive_log.generate_dive_plotly() # <-- timestamps not corrected for clockdrift # The GPS list is None outside of requested begin/end dates, within # which it defaults to an empty list if it is truly empty if dive_log.gps_list is None: continue # Often a single .LOG defines a complete dive: '[DIVING]' --> '[SURFIN]' # Use those "known" complete dives to compile list of in-between fragmented dives if dive_log.is_complete_dive: complete_dives.append(dives.Complete_Dive([dive_log])) else: fragmented_dive.append(dive_log) # If the next .LOG is a complete dive then this log is the last # fragmented/filler .LOG file in-between complete dives # Define the concatenation of all fragmented dives to be one complete dive # Note that this type of complete dive may not define a dive at all # However, we want to group these data so their (possibly legit) # GPS may be used to interpolate previous/succeeding dives if i < len(dive_logs)-1 and dive_logs[i+1].is_complete_dive: complete_dives.append(dives.Complete_Dive(fragmented_dive)) fragmented_dive = list() # Verify complete dives are sorted as expected if complete_dives != sorted(complete_dives, key=lambda x: x.start_date) \ or complete_dives != sorted(complete_dives, key=lambda x: x.end_date): raise ValueError('`complete_dives` (potentially concatenated .LOG files) improperly sorted') # Plot vital data kml.generate(mfloat_path, mfloat, complete_dives) vitals.plot_battery_voltage(mfloat_path, mfloat + ".vit", begin, end) vitals.plot_internal_pressure(mfloat_path, mfloat + ".vit", begin, end) vitals.plot_pressure_offset(mfloat_path, mfloat + ".vit", begin, end) if len(dive_logs) > 1: vitals.plot_corrected_pressure_offset(mfloat_path, complete_dives, begin, end) # Use completed (stitched together) dives to generate event metadata and # output data files etc. for i, complete_dive in enumerate(complete_dives): # Extend dive's GPS list by searching previous/next dive's GPS list prev_dive = complete_dives[i-1] if i > 0 else None next_dive = complete_dives[i+1] if i < len(complete_dives)-1 else None complete_dive.set_incl_prev_next_dive_gps(prev_dive, next_dive) # Validate that the GPS may be used to correct various MERMAID # timestamps, including diving/surfacing and event starttimes complete_dive.validate_gps(min_gps_fix, max_gps_time) # Apply clock corrections to the events associated with this # completed dive complete_dive.correct_clockdrifts() # Set output (.sac, .mseed) file names of the events associated with # this complete dive using the adjusted and corrected event dates complete_dive.set_processed_file_names() # Interpolate station locations at various points in the dive complete_dive.compute_station_locations(mixed_layer_depth_m, preliminary_location_ok) # Format station-location metadata for ObsPy and attach to complete dive object complete_dive.attach_events_metadata() # Write requested output files if not os.path.exists(complete_dive.processed_path): os.mkdir(complete_dive.processed_path) if events_png: complete_dive.generate_events_png() if events_plotly: complete_dive.generate_events_plotly() if events_sac: complete_dive.generate_events_sac() if events_mseed: complete_dive.generate_events_mseed() # NB, at this point, the total event lists associated with `dive_logs` # and `complete_dives` may differ because the former collects all events # and the latter winnows that list to only include unique events (via # `dives.set_processed_file_names`, which removes redundant events from # individual `complete_dives.events` lists); ergo, one may use the # existence of `event.station_loc` to determine what events in # `dive_logs` were actually retained in `complete_dives` (see e.g., # `events.write_traces_txt`) # Write csv and txt files containing all GPS fixes from .LOG and .MER gps.write_gps(dive_logs, creation_datestr, processed_path, mfloat_path) # Write text file detailing event-station location interpolation parameters gps.write_gps_interpolation_txt(complete_dives,creation_datestr, processed_path, mfloat_path) # Write text file detailing which SINGLE .LOG and .MER files define # (possibly incomplete) dives dives.write_dives_txt(dive_logs, creation_datestr, processed_path, mfloat_path) # Write text file and printout detailing which (and potentially # MULTIPLE) .LOG and .MER files define complete dives dives.write_complete_dives_txt(complete_dives, creation_datestr, processed_path, mfloat_path, mfloat) # Write a text file relating all SAC and mSEED to their associated .LOG # and .MER files events.write_traces_txt(dive_logs, creation_datestr, processed_path, mfloat_path) # Write a text file with our best-guess at the location of MERMAID at # the time of recording events.write_loc_txt(dive_logs, creation_datestr, processed_path, mfloat_path) # Write mseed2sac and automaid metadata csv and text files events.write_metadata(complete_dives, creation_datestr, processed_path, mfloat_path) # Write GeoCSV files geocsv_meta = geocsv.GeoCSV(complete_dives, creation_datestr) geocsv_meta.write(os.path.join(processed_path, mfloat_path, 'geo.csv')) # Clean directories for f in glob.glob(mfloat_path + "/" + mfloat_nb + "_*.LOG"): os.remove(f) for f in glob.glob(mfloat_path + "/" + mfloat_nb + "_*.MER"): os.remove(f) # Add dives to growing dict dives_dict[mfloat] = dive_logs # Done looping through all dives for each float #______________________________________________________________________________________# # Print a text file of corrected external pressures measured on the final # dive, and warn if any are approaching the limit of 300 mbar (at which # point adjustment is required) vitals.write_corrected_pressure_offset(dives_dict, processed_path)
def __init__(self,parent = None,name = None,fl = 0): QMainWindow.__init__(self,parent,name,fl) self.statusBar() if not name: self.setName("MainWindow") #Essential properties self.Events = events.Events() self.printerName = "" self.activePrinter = QPushButton(None) self.setCentralWidget(QWidget(self,"qt_central_widget")) self.propertiesbox = QGroupBox(self.centralWidget(),"propertiesbox") self.propertiesbox.setGeometry(QRect(10,280,590,130)) self.namel = QLabel(self.propertiesbox,"namel") self.namel.setGeometry(QRect(10,20,110,21)) self.statusl = QLabel(self.propertiesbox,"statusl") self.statusl.setGeometry(QRect(10,42,110,21)) self.locationl = QLabel(self.propertiesbox,"locationl") self.locationl.setGeometry(QRect(10,64,110,21)) self.uril = QLabel(self.propertiesbox,"uril") self.uril.setGeometry(QRect(10,86,110,21)) self.namel2 = QLabel(self.propertiesbox,"namel2") self.namel2.setGeometry(QRect(130,20,440,21)) self.statusl2 = QLabel(self.propertiesbox,"statusl2") self.statusl2.setGeometry(QRect(130,42,440,21)) self.locationl2 = QLabel(self.propertiesbox,"locationl2") self.locationl2.setGeometry(QRect(130,64,440,21)) self.uril2 = QLabel(self.propertiesbox,"uril2") self.uril2.setGeometry(QRect(130,86,440,21)) self.printerframe = printerwidget2.PrinterWidget2(self.centralWidget(),"printerframe",self) self.printerframe.setGeometry(QRect(10,10,390,260)) self.printerframe.setFrameShape(QFrame.StyledPanel) self.printerframe.setFrameShadow(QFrame.Raised) self.helpbutton = KPushButton(self.centralWidget(),"helpbutton") #self.helpbutton = QPushButton(self.centralWidget(),"helpbutton") self.helpbutton.setGeometry(QRect(540,250,60,21)) self.helpbutton.setFocusPolicy(QPushButton.NoFocus) self.advanceButton = QPushButton(self.centralWidget(),"advanceButton") self.advanceButton.setGeometry(QRect(410,240,110,30)) self.advanceButton.setFocusPolicy(QPushButton.NoFocus) self.sharebutton = QPushButton(self.centralWidget(),"sharebutton") self.sharebutton.setGeometry(QRect(430,15,150,40)) self.sharebutton.setFocusPolicy(QPushButton.NoFocus) self.windowsbox = QGroupBox(self.centralWidget(),"windowsbox") self.windowsbox.setGeometry(QRect(410,60,190,170)) self.iplabel = QLabel(self.windowsbox,"iplabel") self.iplabel.setGeometry(QRect(10,110,150,21)) self.customedit = QLineEdit(self.windowsbox,"customedit") self.customedit.setGeometry(QRect(10,131,150,30)) self.customedit.setEnabled(False) self.allowbox = QCheckBox(self.windowsbox,"allowbox") self.allowbox.setGeometry(QRect(10,26,170,20)) self.allowbox.setFocusPolicy(QCheckBox.NoFocus) self.allowcombo = QComboBox(0,self.windowsbox,"allowcombo") self.allowcombo.setGeometry(QRect(10,70,151,30)) self.allowcombo.setFocusPolicy(QComboBox.NoFocus) self.allowlabel = QLabel(self.windowsbox,"allowlabel") self.allowlabel.setGeometry(QRect(10,48,170,21)) self.languageChange() self.insertItems() self.resize(QSize(616,442).expandedTo(self.minimumSizeHint())) self.clearWState(Qt.WState_Polished) #Connection adjustments self.connect(self.sharebutton,SIGNAL("clicked()"),self.sharefunction) self.connect(self.sharebutton,SIGNAL("clicked()"),self.replace) self.connect(self.allowcombo, SIGNAL("activated(const QString&)"),self.adjustedit) self.connect(self.advanceButton, SIGNAL("clicked()"), self.advanceFunction) self.connect(self.helpbutton, SIGNAL("clicked()"),self.helpDialog)
def __init__(self): self.event = events.Events() self.max_counter = 2
def __init__(self, actor, node): super(CalvinSys, self).__init__() self._node = node self.events = calvin_events.Events(actor, node) self.io = calvin_io.Io(actor, node) self.network = calvin_network.Network(actor, node)
class StateReader(InteropService): NotifyEvent = events.Events() LogEvent = events.Events() __Instance = None _hashes_for_verifying=None @staticmethod def Instance(): if StateReader.__Instance is None: StateReader.__Instance = StateReader() return StateReader.__Instance def __init__(self): super(StateReader, self).__init__() self.Register("Neo.Runtime.GetTrigger", self.Runtime_GetTrigger) self.Register("Neo.Runtime.CheckWitness", self.Runtime_CheckWitness) self.Register("Neo.Runtime.Notify", self.Runtime_Notify) self.Register("Neo.Runtime.Log", self.Runtime_Log) self.Register("Neo.Blockchain.GetHeight", self.Blockchain_GetHeight) self.Register("Neo.Blockchain.GetHeader", self.Blockchain_GetHeader) self.Register("Neo.Blockchain.GetBlock", self.Blockchain_GetBlock) self.Register("Neo.Blockchain.GetTransaction", self.Blockchain_GetTransaction) self.Register("Neo.Blockchain.GetAccount", self.Blockchain_GetAccount) self.Register("Neo.Blockchain.GetValidators", self.Blockchain_GetValidators) self.Register("Neo.Blockchain.GetAsset", self.Blockchain_GetAsset) self.Register("Neo.Blockchain.GetContract", self.Blockchain_GetContract) self.Register("Neo.Header.GetHash", self.Header_GetHash) self.Register("Neo.Header.GetVersion", self.Header_GetVersion) self.Register("Neo.Header.GetPrevHash", self.Header_GetPrevHash) self.Register("Neo.Header.GetMerkleRoot", self.Header_GetMerkleRoot) self.Register("Neo.Header.GetTimestamp", self.Header_GetTimestamp) self.Register("Neo.Header.GetConsensusData", self.Header_GetConsensusData) self.Register("Neo.Header.GetNextConsensus", self.Header_GetNextConsensus) self.Register("Neo.Block.GetTransactionCount", self.Block_GetTransactionCount) self.Register("Neo.Block.GetTransactions", self.Block_GetTransactions) self.Register("Neo.Block.GetTransaction", self.Block_GetTransaction) self.Register("Neo.Transaction.GetHash", self.Transaction_GetHash) self.Register("Neo.Transaction.GetType", self.Transaction_GetType) self.Register("Neo.Transaction.GetAttributes", self.Transaction_GetAttributes) self.Register("Neo.Transaction.GetInputs", self.Transaction_GetInputs) self.Register("Neo.Transaction.GetOutputs", self.Transaction_GetOutputs) self.Register("Neo.Transaction.GetReferences", self.Transaction_GetReferences) self.Register("Neo.Attribute.GetData", self.Attribute_GetData) self.Register("Neo.Attribute.GetUsage", self.Attribute_GetUsage) self.Register("Neo.Input.GetHash", self.Input_GetHash) self.Register("Neo.Input.GetIndex", self.Input_GetIndex) self.Register("Neo.Output.GetAssetId", self.Output_GetAssetId) self.Register("Neo.Output.GetValue", self.Output_GetValue) self.Register("Neo.Output.GetScriptHash", self.Output_GetScriptHash) self.Register("Neo.Account.GetVotes", self.Account_GetVotes) self.Register("Neo.Account.GetBalance", self.Account_GetBalance) self.Register("Neo.Account.GetScriptHash", self.Account_GetScriptHash) self.Register("Neo.Asset.GetAssetId", self.Asset_GetAssetId) self.Register("Neo.Asset.GetAssetType", self.Asset_GetAssetType) self.Register("Neo.Asset.GetAmount", self.Asset_GetAmount) self.Register("Neo.Asset.GetAvailable", self.Asset_GetAvailable) self.Register("Neo.Asset.GetPrecision", self.Asset_GetPrecision) self.Register("Neo.Asset.GetOwner", self.Asset_GetOwner) self.Register("Neo.Asset.GetAdmin", self.Asset_GetAdmin) self.Register("Neo.Asset.GetIssuer", self.Asset_GetIssuer) self.Register("Neo.Contract.GetScript", self.Contract_GetScript) self.Register("Neo.Storage.GetContext", self.Storage_GetContext) self.Register("Neo.Storage.Get", self.Storage_Get) #OLD API self.Register("AntShares.Runtime.GetTrigger", self.Runtime_GetTrigger) self.Register("AntShares.Runtime.CheckWitness", self.Runtime_CheckWitness) self.Register("AntShares.Runtime.Notify", self.Runtime_Notify) self.Register("AntShares.Runtime.Log", self.Runtime_Log) self.Register("AntShares.Blockchain.GetHeight", self.Blockchain_GetHeight) self.Register("AntShares.Blockchain.GetHeader", self.Blockchain_GetHeader) self.Register("AntShares.Blockchain.GetBlock", self.Blockchain_GetBlock) self.Register("AntShares.Blockchain.GetTransaction", self.Blockchain_GetTransaction) self.Register("AntShares.Blockchain.GetAccount", self.Blockchain_GetAccount) self.Register("AntShares.Blockchain.GetValidators", self.Blockchain_GetValidators) self.Register("AntShares.Blockchain.GetAsset", self.Blockchain_GetAsset) self.Register("AntShares.Blockchain.GetContract", self.Blockchain_GetContract) self.Register("AntShares.Header.GetHash", self.Header_GetHash) self.Register("AntShares.Header.GetVersion", self.Header_GetVersion) self.Register("AntShares.Header.GetPrevHash", self.Header_GetPrevHash) self.Register("AntShares.Header.GetMerkleRoot", self.Header_GetMerkleRoot) self.Register("AntShares.Header.GetTimestamp", self.Header_GetTimestamp) self.Register("AntShares.Header.GetConsensusData", self.Header_GetConsensusData) self.Register("AntShares.Header.GetNextConsensus", self.Header_GetNextConsensus) self.Register("AntShares.Block.GetTransactionCount", self.Block_GetTransactionCount) self.Register("AntShares.Block.GetTransactions", self.Block_GetTransactions) self.Register("AntShares.Block.GetTransaction", self.Block_GetTransaction) self.Register("AntShares.Transaction.GetHash", self.Transaction_GetHash) self.Register("AntShares.Transaction.GetType", self.Transaction_GetType) self.Register("AntShares.Transaction.GetAttributes", self.Transaction_GetAttributes) self.Register("AntShares.Transaction.GetInputs", self.Transaction_GetInputs) self.Register("AntShares.Transaction.GetOutpus", self.Transaction_GetOutputs) self.Register("AntShares.Transaction.GetReferences", self.Transaction_GetReferences) self.Register("AntShares.Attribute.GetData", self.Attribute_GetData) self.Register("AntShares.Attribute.GetUsage", self.Attribute_GetUsage) self.Register("AntShares.Input.GetHash", self.Input_GetHash) self.Register("AntShares.Input.GetIndex", self.Input_GetIndex) self.Register("AntShares.Output.GetAssetId", self.Output_GetAssetId) self.Register("AntShares.Output.GetValue", self.Output_GetValue) self.Register("AntShares.Output.GetScriptHash", self.Output_GetScriptHash) self.Register("AntShares.Account.GetVotes", self.Account_GetVotes) self.Register("AntShares.Account.GetBalance", self.Account_GetBalance) self.Register("AntShares.Account.GetScriptHash", self.Account_GetScriptHash) self.Register("AntShares.Asset.GetAssetId", self.Asset_GetAssetId) self.Register("AntShares.Asset.GetAssetType", self.Asset_GetAssetType) self.Register("AntShares.Asset.GetAmount", self.Asset_GetAmount) self.Register("AntShares.Asset.GetAvailable", self.Asset_GetAvailable) self.Register("AntShares.Asset.GetPrecision", self.Asset_GetPrecision) self.Register("AntShares.Asset.GetOwner", self.Asset_GetOwner) self.Register("AntShares.Asset.GetAdmin", self.Asset_GetAdmin) self.Register("AntShares.Asset.GetIssuer", self.Asset_GetIssuer) self.Register("AntShares.Contract.GetScript", self.Contract_GetScript) self.Register("AntShares.Storage.GetContext", self.Storage_GetContext) self.Register("AntShares.Storage.Get", self.Storage_Get) def Runtime_GetTrigger(self, engine): engine.EvaluationStack.PushT(engine.Trigger) return True def CheckWitnessHash(self, engine, hash): if self._hashes_for_verifying is None: container = engine.ScriptContainer self._hashes_for_verifying = container.GetScriptHashesForVerifying() return True if hash in self._hashes_for_verifying else False def CheckWitnessPubkey(self, engine, pubkey): scripthash = Contract.CreateSignatureRedeemScript(pubkey) return self.CheckWitnessHash(engine, Crypto.ToScriptHash( scripthash)) def Runtime_CheckWitness(self, engine): hashOrPubkey = engine.EvaluationStack.Pop().GetByteArray() if len(hashOrPubkey) == 66 or len(hashOrPubkey) == 40: hashOrPubkey = binascii.unhexlify(hashOrPubkey) result = False if len(hashOrPubkey) == 20: result = self.CheckWitnessHash(engine, UInt160(data=hashOrPubkey)) elif len(hashOrPubkey) == 33: point = ECDSA.decode_secp256r1(hashOrPubkey, unhex=False).G result = self.CheckWitnessPubkey(engine, point) else: result = False engine.EvaluationStack.PushT(result) return True def Runtime_Notify(self, engine): state = engine.EvaluationStack.Pop() args = NotifyEventArgs( engine.ScriptContainer, UInt160(engine.CurrentContext.ScriptHash()), state ) self.NotifyEvent.on_change(args) return True def Runtime_Log(self, engine): message = engine.EvaluationStack.Pop().GetByteArray() print("[neo.SmartContract.StateReader] -> RUNTIME.Log: %s " % message) return True def Blockchain_GetHeight(self, engine): if Blockchain.Default() is None: engine.EvaluationStack.PushT(0) else: engine.EvaluationStack.PushT( Blockchain.Default().Height ) return True def Blockchain_GetHeader(self, engine): data = engine.EvaluationStack.Pop().GetByteArray() header = None if len(data) <= 5: height = BigInteger.FromBytes(data) if Blockchain.Default() is not None: header = Blockchain.Default().GetHeaderBy(height_or_hash=height) elif height == 0: header = Blockchain.GenesisBlock().Header elif len(data) == 32: hash = UInt256(data=data) if Blockchain.Default() is not None: header = Blockchain.Default().GetHeaderBy(height_or_hash=hash) elif hash == Blockchain.GenesisBlock().Hash: header = Blockchain.GenesisBlock().Header engine.EvaluationStack.PushT( StackItem.FromInterface(header)) return True def Blockchain_GetBlock(self, engine): data = engine.EvaluationStack.Pop() if data: data = data.GetByteArray() else: return False block = None if len(data) <= 5: height = BigInteger.FromBytes(data) if Blockchain.Default() is not None: block = Blockchain.Default().GetBlockByHeight(height) elif height == 0: block = Blockchain.GenesisBlock() elif len(data) == 32: hash = UInt256(data=data).ToBytes() if Blockchain.Default() is not None: block = Blockchain.Default().GetBlockByHash(hash=hash) elif hash == Blockchain.GenesisBlock().Hash: block = Blockchain.GenesisBlock().Header engine.EvaluationStack.PushT( StackItem.FromInterface(block)) return True def Blockchain_GetTransaction(self, engine): data = engine.EvaluationStack.Pop().GetByteArray() tx = None if Blockchain.Default() is not None: tx, height = Blockchain.Default().GetTransaction( UInt256(data=data)) engine.EvaluationStack.PushT(StackItem.FromInterface(tx)) return True def Blockchain_GetAccount(self, engine): hash = UInt160(data=engine.EvaluationStack.Pop().GetByteArray()) address = Crypto.ToAddress(hash).encode('utf-8') account = self._accounts.TryGet(address) if account: engine.EvaluationStack.PushT(StackItem.FromInterface(account)) return True return False def Blockchain_GetValidators(self, engine): validators = Blockchain.Default().GetValidators() items = [ StackItem(validator.encode_point(compressed=True)) for validator in validators] engine.EvaluationStack.PushT(items) raise NotImplementedError() def Blockchain_GetAsset(self, engine): data = engine.EvaluationStack.Pop().GetByteArray() asset = None if Blockchain.Default() is not None: asset = Blockchain.Default().GetAssetState(UInt256(data=data)) engine.EvaluationStack.PushT(StackItem.FromInterface(asset)) return True def Blockchain_GetContract(self, engine): hash = UInt160(data = engine.EvaluationStack.Pop().GetByteArray()) contract = None if Blockchain.Default() is not None: contract = Blockchain.Default().GetContract(hash) engine.EvaluationStack.PushT(StackItem.FromInterface(contract)) return True def Header_GetHash(self, engine): header = engine.EvaluationStack.Pop().GetInterface('neo.Core.BlockBase.BlockBase') if header is None: return False engine.EvaluationStack.PushT(header.Hash.ToArray()) return True def Header_GetVersion(self, engine): header = engine.EvaluationStack.Pop().GetInterface('neo.Core.BlockBase.BlockBase') if header is None: return False engine.EvaluationStack.PushT(header.Version) return True def Header_GetPrevHash(self, engine): header = engine.EvaluationStack.Pop().GetInterface('neo.Core.BlockBase.BlockBase') if header is None: return False engine.EvaluationStack.PushT(header.PrevHash.ToArray()) return True def Header_GetMerkleRoot(self, engine): header = engine.EvaluationStack.Pop().GetInterface('neo.Core.BlockBase.BlockBase') if header is None: return False engine.EvaluationStack.PushT(header.MerkleRoot.ToArray()) return True def Header_GetTimestamp(self, engine): header = engine.EvaluationStack.Pop().GetInterface('neo.Core.BlockBase.BlockBase') if header is None: return False engine.EvaluationStack.PushT(header.Timestamp) return True def Header_GetConsensusData(self, engine): header = engine.EvaluationStack.Pop().GetInterface('neo.Core.BlockBase.BlockBase') if header is None: return False engine.EvaluationStack.PushT(header.ConsensusData) return True def Header_GetNextConsensus(self, engine): header = engine.EvaluationStack.Pop().GetInterface('neo.Core.BlockBase.BlockBase') if header is None: return False engine.EvaluationStack.PushT(header.NextConsensus.ToArray()) return True def Block_GetTransactionCount(self, engine): block = engine.EvaluationStack.Pop().GetInterface('neo.Core.Block.Block') if block is None: return False engine.EvaluationStack.PushT( len(block.Transactions)) return True def Block_GetTransactions(self, engine): block = engine.EvaluationStack.Pop().GetInterface('neo.Core.Block.Block') if block is None: return False txlist = [StackItem.FromInterface(tx) for tx in block.FullTransactions] engine.EvaluationStack.PushT(txlist) return True def Block_GetTransaction(self, engine): block = engine.EvaluationStack.Pop().GetInterface('neo.Core.Block.Block') index = engine.EvaluationStack.Pop().GetBigInteger() if block is None or index < 0 or index > len(block.Transactions): return False tx= StackItem.FromInterface(block.FullTransactions[index]) engine.EvaluationStack.PushT(tx) return True def Transaction_GetHash(self, engine): tx = engine.EvaluationStack.Pop().GetInterface('neo.Core.TX.Transaction.Transaction') if tx is None: return False engine.EvaluationStack.PushT(tx.Hash.ToArray()) return True def Transaction_GetType(self, engine): tx = engine.EvaluationStack.Pop().GetInterface('neo.Core.TX.Transaction.Transaction') if tx is None: return False engine.EvaluationStack.PushT(tx.Type) return True def Transaction_GetAttributes(self, engine): tx = engine.EvaluationStack.Pop().GetInterface('neo.Core.TX.Transaction.Transaction') if tx is None: return False attr = [StackItem.FromInterface(attr) for attr in tx.Attributes] engine.EvaluationStack.PushT(attr) return True def Transaction_GetInputs(self, engine): tx = engine.EvaluationStack.Pop().GetInterface('neo.Core.TX.Transaction.Transaction') if tx is None: return False inputs = [StackItem.FromInterface(input) for input in tx.inputs] engine.EvaluationStack.PushT(inputs) return True def Transaction_GetOutputs(self, engine): tx = engine.EvaluationStack.Pop().GetInterface('neo.Core.TX.Transaction.Transaction') if tx is None: return False outputs = [] for output in tx.outputs: stackoutput = StackItem.FromInterface(output) outputs.append(stackoutput) engine.EvaluationStack.PushT(outputs) return True def Transaction_GetReferences(self, engine): tx = engine.EvaluationStack.Pop().GetInterface('neo.Core.TX.Transaction.Transaction') if tx is None: return False refs = [StackItem.FromInterface(tx.References[input]) for input in tx.inputs] engine.EvaluationStack.PushT(refs) return True def Attribute_GetUsage(self, engine): attr = engine.EvaluationStack.Pop().GetInterface('neo.Core.TX.TransactionAttribute.TransactionAttribute') if attr is None: return False engine.EvaluationStack.PushT(attr.Usage) return True def Attribute_GetData(self, engine): attr = engine.EvaluationStack.Pop().GetInterface('neo.Core.TX.TransactionAttribute.TransactionAttribute') if attr is None: return False engine.EvaluationStack.PushT(attr.Data) return True def Input_GetHash(self, engine): input = engine.EvaluationStack.Pop().GetInterface('neo.Core.TX.Transaction.TransactionInput') if input is None: return False engine.EvaluationStack.PushT(input.PrevHash.ToArray()) return True def Input_GetIndex(self, engine): input = engine.EvaluationStack.Pop().GetInterface('neo.Core.TX.Transaction.TransactionInput') if input is None: return False engine.EvaluationStack.PushT(input.PrevIndex) return True def Output_GetAssetId(self, engine): output = engine.EvaluationStack.Pop().GetInterface('neo.Core.TX.Transaction.TransactionOutput') if output is None: return False engine.EvaluationStack.PushT(output.AssetId.ToArray()) return True def Output_GetValue(self, engine): output = engine.EvaluationStack.Pop().GetInterface('neo.Core.TX.Transaction.TransactionOutput') if output is None: return False engine.EvaluationStack.PushT(output.Value.GetData()) return True def Output_GetScriptHash(self, engine): output = engine.EvaluationStack.Pop().GetInterface('neo.Core.TX.Transaction.TransactionOutput') if output is None: return False engine.EvaluationStack.PushT(output.ScriptHash.ToArray()) return True def Account_GetScriptHash(self, engine): account = engine.EvaluationStack.Pop().GetInterface('neo.Core.State.AccountState.AccountState') if account is None: return False engine.EvaluationStack.PushT(account.ScriptHash.ToArray()) return True def Account_GetVotes(self, engine): account = engine.EvaluationStack.Pop().GetInterface('neo.Core.State.AccountState.AccountState') if account is None: return False votes = [StackItem.FromInterface(v.EncodePoint(True)) for v in account.Votes] engine.EvaluationStack.PushT(votes) return True def Account_GetBalance(self, engine): account = engine.EvaluationStack.Pop().GetInterface('neo.Core.State.AccountState.AccountState') assetId = UInt256( data=engine.EvaluationStack.Pop().GetByteArray()) if account is None: return False balance = account.BalanceFor(assetId) engine.EvaluationStack.PushT(balance.GetData()) return True def Asset_GetAssetId(self, engine): asset = engine.EvaluationStack.Pop().GetInterface('neo.Core.State.AssetState.AssetState') if asset is None: return False engine.EvaluationStack.PushT(asset.AssetId.ToArray()) return True def Asset_GetAssetType(self, engine): asset = engine.EvaluationStack.Pop().GetInterface('neo.Core.State.AssetState.AssetState') if asset is None: return False engine.EvaluationStack.PushT(asset.AssetType) return True def Asset_GetAmount(self, engine): asset = engine.EvaluationStack.Pop().GetInterface('neo.Core.State.AssetState.AssetState') if asset is None: return False engine.EvaluationStack.PushT(asset.Amount.GetData()) return True def Asset_GetAvailable(self, engine): asset = engine.EvaluationStack.Pop().GetInterface('neo.Core.State.AssetState.AssetState') if asset is None: return False engine.EvaluationStack.PushT(asset.Available.GetData()) return True def Asset_GetPrecision(self, engine): asset = engine.EvaluationStack.Pop().GetInterface('neo.Core.State.AssetState.AssetState') if asset is None: return False engine.EvaluationStack.PushT(asset.Precision) return True def Asset_GetOwner(self, engine): asset = engine.EvaluationStack.Pop().GetInterface('neo.Core.State.AssetState.AssetState') if asset is None: return False engine.EvaluationStack.PushT(asset.Owner.EncodePoint(True)) return True def Asset_GetAdmin(self, engine): asset = engine.EvaluationStack.Pop().GetInterface('neo.Core.State.AssetState.AssetState') if asset is None: return False engine.EvaluationStack.PushT(asset.Admin.ToArray()) return True def Asset_GetIssuer(self, engine): asset = engine.EvaluationStack.Pop().GetInterface('neo.Core.State.AssetState.AssetState') if asset is None: return False engine.EvaluationStack.PushT(asset.Issuer.ToArray()) return True def Contract_GetScript(self, engine): contract = engine.EvaluationStack.Pop().GetInterface('neo.Core.State.ContractState.ContractState') if contract is None: return False engine.EvaluationStack.PushT(contract.Code.Script) return True def Storage_GetContext(self, engine): hash = UInt160( data= engine.CurrentContext.ScriptHash()) context = StorageContext(script_hash=hash) engine.EvaluationStack.PushT(StackItem.FromInterface(context)) return True def Storage_Get(self, engine): context = None try: item =engine.EvaluationStack.Pop() context = item.GetInterface('neo.SmartContract.StorageContext.StorageContext') shash = context.ScriptHash except Exception as e: print("could not get storage context %s " % e) return False if not self.CheckStorageContext(context): return False key = engine.EvaluationStack.Pop().GetByteArray() storage_key = StorageKey(script_hash=context.ScriptHash, key = key) item = self._storages.TryGet(storage_key.GetHashCodeBytes()) keystr = key valStr = bytearray(0) if item is not None: valStr = bytearray(item.Value) if len(key) == 20: keystr = Crypto.ToAddress(UInt160(data=key)) try: valStr = int.from_bytes(valStr, 'little') except Exception as e: print("couldnt convert %s to number: %s " % (valStr, e)) if item is not None: print("[Neo.Storage.Get] [Script:%s] [%s] -> %s " % (context.ScriptHash,keystr, valStr)) engine.EvaluationStack.PushT( bytearray(item.Value)) else: print("[Neo.Storage.Get] [Script:%s] [%s] -> 0 " % (context.ScriptHash, keystr)) engine.EvaluationStack.PushT( bytearray(0)) return True
def __init__(self, path): e = events.Events() s = Script(e, path) self._capture = capture.Capture(e)