def test_datetime(): """Test loading the :class:`datetime.datetime` from a string""" # Check the type o = Observation("Scratch", "Starbucks", "2016-01-11 12:04:02") assert isinstance(o.time, datetime.datetime) # Bad time with pytest.raises(ValueError): Observation("Scratch", "Starbucks", "2016-01-11 12:00:") # Bad date with pytest.raises(ValueError): Observation("Scratch", "Starbucks", "2016-01- 12:00:00") # Good date/time o = Observation("Scratch", "Starbucks", "2016-01-11 12:04:02") # Check that fields are assigned properly assert o.time.day == 11 assert o.time.month == 1 assert o.time.year == 2016 assert o.time.hour == 12 assert o.time.minute == 4 assert o.time.second == 2 assert o.time.microsecond == 0
def test_load_timeline(): """Tests the return values of load_timeline """ # Call load_timeline time_tuple = load_timeline('test.csv') # Create as dictionary as to what time_tuple[0] should be diction = {"Bob": "shoes", "Jane": "Mits", "Dalton": "One Billion Dollars"} # Create a ObservationTimeline as to what time_tuple[1] should be timeline = ObservationTimeline() timeline.add(Observation("Bob", "Starbucks", "1970-01-02 02:53:00")) timeline.add(Observation("Jane", "Starbucks", "1970-01-02 03:53:00")) timeline.add(Observation("Dalton", "Starbucks", "1970-01-02 04:53:00")) timeline.add(Observation("Aaron", "My house", "1970-01-02 05:53:00")) timeline.add(Observation("Mike", "His House", "1970-01-02 06:53:00")) # Check to see if the two are equal assert diction == time_tuple[0] for x in range(len(timeline.observations)): assert (timeline.observations[x].name == time_tuple[1].observations[x].name) assert (timeline.observations[x].location == time_tuple[1].observations[x].location) assert (timeline.observations[x].time == time_tuple[1].observations[x].time)
def observe_world(self): positions = self.flappybird.getWorldPositionObjets() self.current_observation = Observation(self.flappybird.counter, self.flappybird.dead) self.current_observation.set_relative_positions(positions) self.just_restarted = self.current_observation.just_restarted( positions)
def test_ge(): """Test >= operator""" o1 = Observation("Scratch", "Starbucks", "2016-01-11 12:00:00") o2 = Observation("Grounder", "Starbucks", "2016-01-11 12:30:00") assert not (o1 >= o2) assert o1 >= o1 assert o2 >= o2 assert o2 >= o1
def from_hash(cls, json): observation_before = Observation.from_hash(json['observation_before']) jump = json['jump'] new_theory = cls(observation_before, jump) observation_after = Observation.from_hash(json['observation_after']) new_theory.set_observation_after(observation_after) new_theory.set_utility(json['utility']) new_theory.set_uses(json['times_used']) return new_theory
def mongoUpload(): while True: try: result = sensor.read() if result.is_valid(): Observation.upload(toFaren(result.temperature), result.humidity) else: logging.warning(f"Invalid result at {datetime.datetime.now()}") except: logging.error(f"Upload error at {datetime.datetime.now()}")
def addObservation(): try: print("Please type details of Observation\n") animalNo = input("AnimalNo: ") staffID = input("StaffId: ") animals = appliaction.getAllAnimal() staffs = appliaction.getAllStaff() flagAnimal = 1 flagStaff = 1 now = datetime.now() year = now.strftime("%Y") month = now.strftime("%m") day = now.strftime("%d") time = now.strftime("%H:%M") date = day + "/" + month + "/" + year for animal in animals: if animalNo == animal.animalNo: flagAnimal = 0 animalObject = animal if flagAnimal == 1: print("\nThere is no such animal, try again!!") for staff in staffs: if staffID == staff.staff_id: staffObject = staff flagStaff = 0 if flagStaff == 1: print("\nThere is no such staff, try again!!") if flagStaff == 0 and flagAnimal == 0: count = 0 observationDetails = appliaction.getAllObservationDetails() for observed in observationDetails: if str(animalNo) == str( observed.animal.animalNo) and str(date) == str( observed.date): count += 1 if count == 3: print( "Uh-oh, you cannot observe an animal more than 3 times a day!!" ) else: weight = input("Animal Weight (kg): ") temperature = input("Temperature (C*): ") note = input("Note: ") newObservation = Observation() newObservation.set_observation(animalObject, weight, temperature, note, staffObject) appliaction.addObservationingToList(newObservation) print("\nAn observation record added successfully\n") except: print("\nSome Error occured, try again.\n") main()
def _poll(self): """Start PWS monitor service""" while True: #TODO Apply filters to obersvation data #TODO Backfill and resume after connection failure if self._obs is None: ts = datetime.datetime.utcnow() obs = self.console.measure() self._obs = Observation(ts, obs, maxes=['wind_speed']) else: self._obs.update(self.console.measure()) gevent.sleep(self.poll_interval)
def make_ad_hoc_observation(self): """Make an ad hoc Observation, save it, and return it.""" rightnow = datetime.now() newobs = Observation( status=-1, dyad=self, due=rightnow, due_original=rightnow, label="Ad hoc observation" ) newobs.save() return newobs
def __init__(self, talent=0.0, historyLength=3, dtype=np.float): self.dtype = dtype self.ObservSpace = Observation() self.ActionSpace = Action() self.age = 0 self.entropy = config.entropyBorn self.belief = self.initBelief() self.beliefGrad = np.zeros_like(self.belief) self.history = self.initHistory(historyLength) self.talent = talent
def set_patient(self, bundle, prefix=None): """Generates and appends the Patient entry to the transaction""" if GENERATION_MAP["patient"]: patient = Patient.mpi[self.pid] if prefix: patient.pid = prefix + "-" + patient.pid # look up patient photos if self.pid in Document.documents: for d in [ doc for doc in Document.documents[self.pid] if doc.type == 'photograph' ]: data = fetch_document(self.pid, d.file_name) binary_id = uid(None, "%s-photo" % d.id, prefix) self.appendEntry( bundle, Binary({ "mime_type": d.mime_type, "content": data['base64_content'], "id": binary_id })) patient.photo_title = d.title patient.photo_code = d.mime_type patient.photo_binary_id = binary_id patient.photo_hash = data["hash"] patient.photo_size = data["size"] patientJSON = patient.toJSON(prefix) bundle = self.set_documents(bundle, prefix) self.appendEntry(bundle, Entry(patientJSON)) if patient.gestage: self.appendEntry( bundle, Entry( Observation( { "id": uid(None, "%s-gestage" % self.pid, prefix), "pid": self.pid, "date": patient.dob, "code": "18185-9", "name": "Gestational age at birth", "scale": "Qn", "value": patient.gestage, "units": "weeks", "unitsCode": "wk", "categoryCode": "exam", "categoryDisplay": "Exam" }, prefix))) return bundle
def test_nirc2_img(psf_grid_raw, psf_grid_pos, outname='tmp.fits'): time_start = time.time() nirc2 = NIRC2() print('Reading GC Label.dat: {0} sec'.format(time.time() - time_start)) stars = GCstars() psfgrid = PSF_grid_NIRC2_Kp(psf_grid_raw, psf_grid_pos) print('Making Image: {0} sec'.format(time.time() - time_start)) wave_index = 0 background = 3.0 # elect_nicerons /sec obs = Observation(nirc2, stars, psfgrid, wave_index, background, origin=np.array([512, 512])) print('Saving Image: {0} sec'.format(time.time() - time_start)) obs.save_to_fits(outname, clobber=True) return
def work_with_data(): files = work_with_files.find_files(os.getcwd(), ".txt") lasers_data = [] tmp = [] T = [70 + 273.2] * 3 + [80 + 273.2] * 5 + [90 + 273.2] * 5 for file in files: current_sample = work_with_files.get_sample(file) for value in current_sample: tmp.append(Observation(time=value.split("\t")[0], value=value.split("\t")[1])) lasers_data.append(work_with_files.centring_value(tmp)) tmp = [] return lasers_data, T
def test_my_rendezvous_more(): """Test our rendezvous generator""" timeline = ObservationTimeline() timeline.add(Observation("Skeletor", "Starbucks", "1970-01-02 02:53:00")) timeline.add(Observation("Doc Oc", "Starbucks", "1970-01-02 03:05:00")) timeline.add(Observation("D", "Starbucks", "1970-01-02 03:55:00")) timeline.add(Observation("E", "Starbucks", "1970-01-02 04:50:00")) timeline.add(Observation("F", "Starbucks", "1970-01-02 05:40:00")) timeline.add(Observation("G", "Starbucks", "1970-01-02 06:30:00")) for i, suspect_pair in enumerate(timeline.rendezvous()): # Gotta be a tuple of length 2 print("boo") assert isinstance(suspect_pair, tuple) assert len(suspect_pair) == 2 # Unpack 'em s1, s2 = suspect_pair # Check the types assert isinstance(s1, Observation) assert isinstance(s2, Observation) # We only looped one time, so 'i' was set to zero, and that's it. assert i == 4
def getObs(self, type="LR"): obs = [] obs.append("_empty_") #obs.append("_dummy_") for word in self.wordList: obs.append(word) #obs.append("_dummy_") obs.append("_empty_") return Observation(obs, self.id)
def set_observations(self, dat): ''' Set the observations list ''' if dat != None: self.observations = [] dat.sort(key=itemgetter(self.key - 1)) for e in dat: author = User(e[2], e[3], e[11], e[12], e[13]) location = Location(e[7], e[8], e[9], e[10]) subject = e[0] + ' ' + e[1] observation = Observation(subject, author, location, e[5], e[6]) #self.observations.append(e) self.observations.append(observation)
def format(metric_value, messageId): """"Format message to send.""" # message to sent to the server API (follow the json schema) message = Message(probeId=201, resourceId=202, messageId=messageId, sentTime=int(time.time()), data=None) # add cpu metric dt = Data(type="measurement", descriptionId=203, metricId=10, observations=None) obs = Observation(time=int(time.time()), value=metric_value) dt.add_observation(observation=obs) message.add_data(data=dt) # return message formatted in json return json.dumps(message.reprJSON(), cls=ComplexEncoder)
def set_labs(self, bundle, prefix=None): """Attaches the patient's lab results to the bundle""" if GENERATION_MAP["LabResults"] and self.pid in Lab.results: for o in Lab.results[self.pid]: pid = self.pid # if prefix: # pid = prefix + "-" + pid _json = o.toJSON() _json["id"] = uid(None, "%s-lab" % o.id, prefix) _json["pid"] = pid _json["categoryCode"] = "laboratory" _json["categoryDisplay"] = "Laboratory" # print _json bundle["entry"].append(Entry(Observation(_json, prefix))) return bundle
def eachAircraft(): groundstation = Groundstation() aircraft_seen = {} conn = sqlite3.connect('flightevents.db') nmea = open('data.nmea', 'r') for line in nmea: try: commas = line.count(',') sentence = pynmea2.parse(line, check=True) except pynmea2.ChecksumError: # ignore sentences that produce a checksum error continue except pynmea2.ParseError: # ignore sentences that can't be parsed continue # ignore Flarm PFLAU sentences if Observation.is_pflau_sentence(sentence): continue # The groundstation must have received the UTC time from the GPS # before we permit any processing of Flarm PFLAA observations. if (groundstation.valid_time() and Observation.is_pflaa_sentence(sentence)): observation = Observation() if observation.set(conn, groundstation, sentence): aircraft_id = observation.get_aircraft_id() if aircraft_id not in aircraft_seen: aircraft_seen[aircraft_id] = Aircraft(aircraft_id) aircraft_seen[aircraft_id].append_observations(observation) elif sentence.sentence_type == 'RMC': # this sentence contains the current date groundstation.set_date(sentence.datestamp) groundstation.set(sentence) elif (sentence.sentence_type == 'GGA' and groundstation.valid_date() and commas == 14): # this sentence has the groundstation timestamp, lat, lon, elevation groundstation.set(sentence) conn.commit() conn.close() print("%s" % list(aircraft_seen.keys())) groundstation.report() return
def generateHO(self, file_ids, extraExt='', obsColumn=0): """Yields tuples (positive, negative) """ for file_id in file_ids: ho_fn = file_id + '.hddn' ho = HiddenObservation().read(ho_fn) ho_map = self.mapHiddenObservation(ho, self.conceptMap) o_fn = file_id + extraExt + '.obs' o_file = file(o_fn) obs = [] try: for line in o_file: line = line.split() obs.append(int(line[obsColumn])) finally: o_file.close() o_map = Observation(obs, ho.id) yield ho_map, o_map
def load_raws_observations(obs_file, glat, glon, grid_dist_km): """ Loads all of the RAWS observations valid at the time in question and converts them to Observation objects. """ # load observations & register them to grid orig_obs = [] if os.path.exists(obs_file): orig_obs = np.loadtxt(obs_file, dtype=np.object, delimiter=',') else: print('WARN: no observation file found.') obss = [] omin, omax = 0.6, 0.0 # format of file # 0 1 2 3 4 5 6 7 8 9 10 11 # yyyy,mm,dd,hh,MM,ss,lat,lon,elevation,var_id,value,variance for oo in orig_obs: ts = datetime(int(oo[0]), int(oo[1]), int(oo[2]), int(oo[3]), int(oo[4]), int(oo[5]), tzinfo=pytz.timezone('GMT')) lat, lon, elev = float(oo[6]), float(oo[7]), float(oo[8]) obs, ovar = float(oo[10]), float(oo[11]) i, j = find_closest_grid_point(lat, lon, glat, glon) # compute distance to grid points dist_grid_pt = great_circle_distance(lon, lat, glon[i, j], glat[i, j]) # check & remove nonsense zero-variance (or negative variance) observations if ovar > 0 and dist_grid_pt < grid_dist_km / 2.0: obss.append( Observation(ts, lat, lon, elev, oo[9], obs, ovar, (i, j))) omin = min(omin, obs) omax = max(omax, obs) print('INFO: loaded %d observations in range %g to %g [%d available]' % (len(obss), omin, omax, len(obss))) return obss
def random_timed_observations(count=100): """A helper function that returns Observations with random times Note that the names and locations will all be the same. :param int count: The number of Observations to return :return: a list of Observation instances in a random order. """ # Generate a list of random, unique Observations observations = [] time = datetime.fromtimestamp(0) for _ in range(count): time += timedelta(minutes=random.randint(1, 59)) observations.append(Observation("Skeletor", "Starbucks", str(time))) # Shuffle up the Observations random.shuffle(observations) return observations
def test_my_add(): """Test that adding more Observations keeps them in order.""" # Generate observations with random times timeline = ObservationTimeline() timeline.add(Observation("Doc Oc", "Starbucks", "1970-01-02 03:49:00")) timeline.add(Observation("Doc Oc", "Starbucks", "1970-01-02 02:51:00")) timeline.add(Observation("Doc Oc", "Starbucks", "1970-01-02 03:52:00")) timeline.add(Observation("Doc Oc", "Starbucks", "1970-01-02 04:52:00")) timeline.add(Observation("Doc Oc", "Starbucks", "1970-01-02 05:52:00")) timeline.add(Observation("Doc Oc", "Starbucks", "1970-01-02 03:50:00")) # Check that our observations are in order # (Go look up zip()) for prev, current in zip(timeline.observations, timeline.observations[1:]): assert prev.time < current.time
def test_rendezvous(): """Test our rendezvous generator""" timeline = ObservationTimeline() 'timeline.add(Observation("Skeletor", "Starbucks", "1970-01-02 02:53:00"))' timeline.add(Observation("Doc Oc", "Starbucks", "1970-01-02 03:52:00")) for i, suspect_pair in enumerate(timeline.rendezvous()): # Gotta be a tuple of length 2 assert isinstance(suspect_pair, tuple) assert len(suspect_pair) == 2 # Unpack 'em s1, s2 = suspect_pair # Check the types assert isinstance(s1, Observation) assert isinstance(s2, Observation) # Check that we've got our suspects assert "Skeletor" in (s1.name, s2.name) assert "Doc Oc" in (s1.name, s2.name) # We only looped one time, so 'i' was set to zero, and that's it. assert i == 0
def __setitem__(self, key, value): """ ??? """ if self.receiver_type == 1: # C1 -> C1 + b # P2 -> P2 + b if value[0] != 0.0: value[0] += self.p1c1_bias if value[2] != 0.0: value[2] += self.p1c1_bias elif self.receiver_type == 2: # C1 -> C1 + b if value[0] != 0.0: value[0] += self.p1c1_bias elif self.receiver_type == 3: pass else: raise ValueError('unknown receiver type {}'.format( self.receiver_type)) if value[1] == 0.0 and self.replace_p1_with_c1: # replace P1 with C1 (with bias correction if necessary) value[1] = value[0] # replace empty values (==0.0) with None value = [None if x == 0.0 else x for x in value] super(P1C1ObsTimeSeries, self).__setitem__(key, Observation(*value))
type=int, help= 'The number bins to show the dedispersed profile with. Higher means higher time resolution, but noisier folds.' ) parser.add_argument('-p', '--pulsarcat', default='./small-data-files/pulsarcat.csv', help='The csv file containing pulsar data') parser.add_argument('--skiprfi', action='store_true', help='Use this to skip time-based rfi-peak removal.') args = parser.parse_args() # Create an object containing all useful pulsar properties print("Loading data") obs = Observation(args.datafile) pulsar = obs.pulsar twodarray = obs.data # read the literature value of the period and the dispersion measure period = pulsar.period DM = pulsar.DM # Time resolution of the telescope dt = (512 * 64) / (70e6) # Array with the bandwith frequencyarray = obs.freq # this part should be RFI flagging something like: # noflag = flagging(twodarray)
async def observe(self, server): async for connection in Observation(server): self.connections.append(connection)
'--nbins', default=500, type=int, help= 'The number of phase bins to fold with. Higher means higher time resolution, but noisier folds.' ) parser.add_argument('-p', '--pulsarcat', default='./small-data-files/pulsarcat.csv', help='The csv file containing pulsar data') args = parser.parse_args() DMplay = True # Create an object containing all useful pulsar properties obs = Observation(args.datafile) pulsar = obs.pulsar twodarray = obs.data # read the literature value of the period and the dispersion measure period = pulsar.period DM = pulsar.DM # Time resolution of the telescope dt = (512 * 64) / (70e6) tens = 10 * int(np.round(1 / dt)) thirs = int(3 * tens) sixts = int(6 * tens) # Array with the bandwith
class Observer(object): def __init__(self, console_path=CNSL_PLUGIN_PATH, find_cnsl=True, emitter_path=EMIT_PLUGIN_PATH, find_emitters=True, poll_interval=5, emit_interval=60): """Iterate over available PWS console plugins. Once a plugin is found that returns a connection object from its discover method, create an instance of the discovered console. """ #TODO Load config file self.poll_interval = poll_interval self.emit_interval = emit_interval self._obs = None self.db = Database() self.console = None self._console_path = console_path self.emitters = [] self._emitter_path = emitter_path if find_cnsl: self.find_console() if find_emitters: self.find_emitters() def find_console(self): """Look for available console.""" plugin_manager = ObsPluginManager() plugin_manager.setPluginPlaces([self._console_path]) plugin_manager.collectPlugins() for plugin in plugin_manager.getAllPlugins(): logging.debug('Found potential console plugin: {0}'.format(plugin.plugin_object)) if hasattr(plugin.plugin_object, 'discover'): logging.debug('Class {0} has discover method'.format(plugin.plugin_object)) self.console = plugin.plugin_object.discover() if self.console is not None: break if not self.console: logging.warning('No consoles found.') def find_emitters(self): """Look for available emitter plugins""" plugin_manager = ObsPluginManager() plugin_manager.setPluginPlaces([self._emitter_path]) plugin_manager.collectPlugins() for plugin in plugin_manager.getAllPlugins(): logging.debug('Found potential console plugin: {0}'.format(plugin.plugin_object)) if hasattr(plugin.plugin_object, 'connect'): logging.debug('Class {0} has connect method'.format(plugin.plugin_object)) emitter = plugin.plugin_object.connect() if emitter is not None: self.emitters.append(emitter) if not self.emitters: logging.warning('No emitters found.') def _emit(self): while True: if self._obs is not None: obs = self._obs.as_dict() self.db.save(obs) for emitter in self.emitters: emitter.send(obs) self._obs = None gevent.sleep(self.emit_interval) def _poll(self): """Start PWS monitor service""" while True: #TODO Apply filters to obersvation data #TODO Backfill and resume after connection failure if self._obs is None: ts = datetime.datetime.utcnow() obs = self.console.measure() self._obs = Observation(ts, obs, maxes=['wind_speed']) else: self._obs.update(self.console.measure()) gevent.sleep(self.poll_interval) def start(self): threads = [gevent.spawn(self._poll), gevent.spawn(self._emit)] print threads gevent.joinall(threads)
def calc_worker(DM, period, live_x, live_y, folded_x, folded_y, live=False, nbins=500, countmod=10, mixfreq=405): """ nbins: number of bins to plot countmod: after how many new data points to update the plot-array """ data = np.zeros(512, dtype=int) if live: # define data type unsigned int unsignint = np.dtype(np.uint32) # construct the socekt s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # get socket info sinfo = socket.getaddrinfo('0.0.0.0', 22102) # bind with the backend s.bind(('0.0.0.0', 22102)) #s.connect(('10.1.2.3',22102)) # receive one package a = s.recv(2048) else: obs = Observation( "data/obs-10-04-2018/B0329+54_10-04-2018-withP.fits.gz") DM = obs.pulsar.DM period = 0.71458 # define a counter counter = 0 # how many modulus of counters do we want. freq = calc_central_freqs(mixfreq, throwhighestfreqaway=(False if live else True)) num_freqs = len(freq) shift = 4.148e3 * DM * (1 / freq[0]**2 - 1 / freq**2) #binshifts = np.zeros(len(shift),dtype=int) binshifts = np.rint(shift / dt).astype(int) sizet = binshifts[-1] dmdata = np.zeros((sizet, len(shift))) maxshift = binshifts[-1] plotarray = np.ones(1000) xpoints = np.arange(len(plotarray)) newdatapoint = 0 normdata = np.zeros((20000, num_freqs)) for j in range(0, 20000): if live: # get the package of the current time a = s.recv(2048) # save the data in the array for i in range(1, 512): data[i - 1] = int.from_bytes(a[4 * (i - 1):4 * i], byteorder='big') #print(len(normdata[j]),len(data[256:])) normdata[j] = data[256:] else: normdata[j] = obs.data[j] norm = np.sum(normdata, axis=0) / 20000 # construct the most ugly while loop construction foldedarray = np.zeros(nbins) normalarray = np.zeros(nbins) while True: if live: # get the package of the current time a = s.recv(2048) # save the data in the array for i in range(1, 512): data[i - 1] = int.from_bytes(a[4 * (i - 1):4 * i], byteorder='big') localdata = data[256:] else: localdata = obs.data[counter] time = counter * dt delay_dispersion = -4.148e3 * DM * freq**(-2) time += delay_dispersion whichbin = time * nbins / period % nbins lowernorm = np.ceil(whichbin) - whichbin highernorm = 1 - lowernorm indexlow = np.array(np.floor(whichbin), dtype=int) indexhigh = (indexlow + 1) % nbins np.add.at(normalarray, indexlow, lowernorm) np.add.at(normalarray, indexhigh, highernorm) np.add.at(foldedarray, indexlow, lowernorm * localdata / norm) np.add.at(foldedarray, indexhigh, highernorm * localdata / norm) for i in range(0, len(shift) - 1): dmdata[(counter + binshifts[i]) % maxshift, i] = localdata[i] / norm[i] if norm[i] == 0: print('Hellppp!', i) newdatapoint += np.sum(dmdata[counter % maxshift, 70:200]) # if the current time is a plot time, plot if counter % countmod == 0: plotarray = np.roll(plotarray, -1) plotarray[-1] = newdatapoint / countmod to_plot = foldedarray / normalarray live_x[:] = np.linspace(-countmod * 1000 * dt, 0, 1000) live_y[:] = plotarray folded_x[:] = np.linspace(0, period, nbins, endpoint=False) folded_y[:] = to_plot / to_plot.sum() newdatapoint = 0 counter += 1
] #Make final table EPIC_obs_table = Table( names=('ObsId', 'RevolutionId', 'ExposureID', 'Start', 'End', 'Duration_Obs', 'EPIC_rate_soft', 'EPIC_erate_soft', 'EPIC_rate_hard', 'EPIC_erate_hard', 'hr', 'fvar_soft', 'efvar_soft', 'xs_soft', 'exs_soft', 'nxs_soft', 'enxs_soft', 'VA_soft', 'eVA_soft', 'fvar_hard', 'efvar_hard', 'xs_hard', 'exs_hard', 'nxs_hard', 'enxs_hard', 'VA_hard', 'eVA_hard'), dtype=('i', 'i', 'U9', 'U30', 'U30', 'd', 'd', 'd', 'd', 'd', 'd', 'd', 'd', 'd', 'd', 'd', 'd', 'd', 'd', 'd', 'd', 'd', 'd', 'd', 'd', 'd', 'd')) i = 0 for obsid in epic_observations: obs = Observation(obsid=obsid, target_dir=target_dir) print('-----------------------------------------------------------') #Process each observation obs.cifbuild() obs.odfingest() obs.epproc() obs.filter_epic(pileup=True) obs.epiclccorr(pileup=True) #always correct for pile-up obs.epic_lightcurve(mjdref=mjdref) obs.fracvartest(instrument='epic') obs.pn_spectrum(pileup=True) obs.pn_xspec(target_REDSHIFT) #Save attributes of observation into the EPIC_table (to edit) EPIC_obs_table.add_row(
async def communicate(self, client, connection_id): # TODO : implement client slice observation # and put the entire state tree into store # state = {'connections': {[id]: {'id'}}, ...} async for snapshot in Observation(self.store): await client.send(json.dumps({**snapshot, 'id': connection_id}))
def __init__(self, iface, point, observation): precision = MySettings().value("obsDefaultPrecisionDistance") Observation.__init__(self, iface, "distance", point, observation, precision)
def __init__(self, iface, point, observation): settings = MySettings() self.length = settings.value("obsOrientationLength") precision = settings.value("obsDefaultPrecisionOrientation") Observation.__init__(self, iface, "orientation", point, observation, precision)