def download_series(series_url, download_path, file_format): ''' given a series url, write all works in a series to files ''' print("Downloading series...") series_metadata = get_series_metadata(series_url) print( "Downloaded series metadata.\nSeries: {title}\nAuthors: {authors}\nWorks: {numworks}" .format(title=series_metadata.title, authors=series_metadata.authors, numworks=len(series_metadata.work_urls))) # create series subfolder from series name and use as download path sanitized_series = utils.sanitize_filename(series_metadata.title) sanitized_authors = "_".join([ utils.sanitize_filename(author) for author in series_metadata.authors ]) series_subfolder_path = download_path / "{}_by_{}".format( sanitized_series, sanitized_authors) if not series_subfolder_path.exists(): try: series_subfolder_path.mkdir() except IOError as e: print(e) sys.exit(1) for work_url in series_metadata.work_urls: # TODO: break into metadata and download? download_work(work_url, series_subfolder_path, file_format, True) sleep(1)
def __init__(self, ft2, tstart, simulation_time, irfs="P8R2_SOURCE_V6", emin=100, emax=100000, ltfrac=0.9): """ :param ft2: :param tstart: :param simulation_time: :param irfs: :param emin: :param emax: :param ltfrac: """ # At the moment we only support source class #assert irfs == "P8R2_SOURCE_V6", "At the moment we only support P8R2_SOURCE_V6" self._ft2 = sanitize_filename(ft2) self._tstart = tstart self._simulation_time = simulation_time self._irfs = irfs self._emin = emin self._emax = emax self._ltfrac = ltfrac # This will contain the list of temporary files produced self._temp_files = [] self._simulated_ft1 = None
def eval(self, state): name = self.name if name.startswith(u"_"): name = u"~" + name[1:] values = state.context.get(name, u"") return utils.sanitize_filename(values)
def eval( self, state ): name = self.name if name.startswith( u"_" ): name = u"~" + name[1:] values = state.context.get( name, u"" ) return utils.sanitize_filename( values )
def start(self, title): """Start recording streaming data into audio file. Codec and format of file can be specified in config.py script. :param title: Song title - used as filename :type filename: str """ self.title = title filename = utils.sanitize_filename( self.title) + "." + config.recording["file_ext"].lstrip(".") dir = os.path.join(userdata.get_data_dir(), "recorded/") os.makedirs(dir, mode=0o777, exist_ok=True) path = os.path.join(dir, filename) if self._is_recording: raise ValueError("Recording has already been started") if not path.strip(): raise ValueError("Filename has not been specified") # Start recording self._is_recording = True self._should_create_segment = config.recording["create_segments"] # Create codec and link to pipeline self._codec = Gst.ElementFactory.make(config.recording["codec_name"]) for key in config.recording["codec_props"]: value = config.recording["codec_props"][key] self._codec.set_property(key, value) self._audio_sink.add(self._codec) self._resample.link(self._codec) # Create file muxer self._mux = Gst.ElementFactory.make(config.recording["mux_name"]) for key in config.recording["mux_props"]: value = config.recording["mux_props"][key] self._mux.set_property(key, value) self._audio_sink.add(self._mux) self._codec.link(self._mux) # Create filesink - the last element in pipeline that writes data to file self._filesink = Gst.ElementFactory.make('filesink') self._filesink.set_property('location', path) self._filesink.set_property('async', 0) self._audio_sink.add(self._filesink) self._mux.link(self._filesink) # Open valve, so stream can start flowing to the filesink self._valve.set_property('drop', 0) # Fire events dispatchers.recorder.recording_started(self.title) dispatchers.recorder.recording_state_changed(self._is_recording)
def make_data_package_files(self, trigger_name, ra=0.0, dec=0.0, trigger_time=None, destination_dir='.'): """ Make data package for gtburst :return: """ if trigger_time is None: trigger_time = self._tstart destination_dir = sanitize_filename(destination_dir) with within_directory(destination_dir, create=True): # Rename ft1 and ft2 new_ft1 = 'gll_ft1_tr_bn%s_v00.fit' % trigger_name.replace( "bn", "") new_ft2 = 'gll_ft2_tr_bn%s_v00.fit' % trigger_name.replace( "bn", "") shutil.copy(self._simulated_ft1, new_ft1) shutil.copy(self._ft2, new_ft2) _makeDatasetsOutOfLATdata(new_ft1, new_ft2, trigger_name, self._tstart, self._tstart + self._simulation_time, ra, dec, trigger_time, localRepository=".", cspecstart=0, cspecstop=10) # Remove all other files self._cleanup() os.remove(self._simulated_ft1) self._simulated_ft1 = None
async def download_telegram_media(self, message): local_path = await message.download_media(self.telegram_media_dir) if not local_path: return '' if message.document: new_file = sanitize_filename(os.path.basename(local_path)) else: filetype = os.path.splitext(local_path)[1] new_file = str(self.media_cn) + filetype self.media_cn += 1 new_path = os.path.join(self.telegram_media_dir, new_file) if local_path != new_path: os.replace(local_path, new_path) if self.media_url[-1:] != '/': self.media_url += '/' return self.media_url + new_file
type=str, help="Name for the simulated source. If None, the OBJECT name is used") parser.add_argument( "--irf", required=False, default="P8R2_SOURCE_V6", type=str, help="Instrument response function to be used. Default: P8R2_SOURCE_V6" ) args = parser.parse_args() # Read from the data package the trigger time and trigger name package_path = sanitize_filename(args.package_path) assert os.path.exists(package_path) and os.path.isdir(package_path), \ "Path %s does not exist or it is not a directory" % package_path trigger_name = os.path.split(package_path)[-1] # Find response files (.rsp) (there might be more than one because there could be more than one version) response_file_pattern = os.path.join( package_path, "gll_cspec_tr_%s_v*.rsp" % trigger_name) response_files = glob.glob(response_file_pattern) assert len(response_files ) > 0, "Could not find response file %s" % response_file_pattern
os.makedirs(dir_path) # Change directory permission to rw-rw-rw- #os.chmod(dir_path, 0o666) # Make sure all files and directories have the right permissions try: execute_command(log, "chmod a+rwx --recursive %s" % this_trigger_input_dir) except: log.error("Could not change all permissions") map_path = sanitize_filename(args.map) assert os.path.exists(map_path), "The map %s does not exist" % map_path # Finally run the stream # P2_EXE is the path to the executable, likely /afs/slac.stanford.edu/u/gl/glast/pipeline-II/prod/pipeline cmd_line = '%s createStream' % config.get('SLAC', 'P2_EXE') cmd_line += ' %s' % config.get('SLAC', 'P2_TASK') cmd_line += ' --define TRIGGERNAME=%s' % triggername cmd_line += ' --define TRIGGERTIME=%s' % args.triggertime
def run_simulation(self, outfile='gwt_sim', seed=None, point_source=None): """ :param outfile: :param seed: :param point_source: a tuple (name, ra, dec, index, energy_flux), where the energy flux is in erg/cm2/s between 100 MeV and 100 GeV :return: """ if point_source is not None: # Need to add a point source pts_source_name, ra, dec, index, energy_flux = point_source emin = (self._emin * u.MeV).to(u.erg).value emax = (self._emax * u.MeV).to(u.erg).value if index != -2.0: conv = (1.0 + index) / (2.0 + index) * ( pow(emax, index + 2) - pow(emin, index + 2)) / ( pow(emax, index + 1) - pow(emin, index + 1)) else: conv = (emin) * (emax) / (emax - emin) * np.log(emax / emin) photon_flux = energy_flux / conv * (1 / u.cm**2 / u.s) photon_flux_gtobsim = photon_flux.to(1 / u.m**2 / u.s).value log.info("Photon flux for gtobssim: %.2g m^(-2) s^(-1)" % (photon_flux_gtobsim)) log.info("Conversion factor: %s" % conv) # Generate the point source XML temp_pts_xml = "my_point_source.xml" with open(temp_pts_xml, "w+") as f: src_def = ''' <source_library title="PointSource"> <source name="%s" flux="%s"> <spectrum escale="MeV"> <particle name="gamma"> <power_law emin="%s" emax="%s" gamma="%s"/> </particle> <celestial_dir ra="%s" dec="%s"/> </spectrum> </source> </source_library> ''' % (pts_source_name, photon_flux_gtobsim, self._emin, self._emax, float(index) * (-1), ra, dec) f.write(src_def) # Now generate a txt file containing the list of XML to use xml_list = "xml_list.txt" with open(xml_list, "w+") as f: with open(sanitize_filename(config.get("SLAC", "SIM_XML"))) as ff: lines = ff.readlines() f.writelines(lines) f.write("\n%s\n" % sanitize_filename(temp_pts_xml)) # Add the new point source to the list of sources to simulate src_list = "srclist.txt" with open(src_list, "w+") as f: with open(sanitize_filename(config.get("SLAC", "SIM_SRC_LIST"))) as ff: lines = ff.readlines() f.writelines(lines) f.write("\n%s\n" % pts_source_name) else: xml_list = sanitize_filename(config.get("SLAC", "SIM_XML")) src_list = sanitize_filename(config.get("SLAC", "SIM_SRC_LIST")) # We need to setup the environment variable SKYMODEL_DIR before running gtobssim os.environ['SKYMODEL_DIR'] = config.get("SLAC", "SKYMODEL_DIR") # Gather arguments for gtobssim in a dictionary evroot = '__gw_sims' _gtobssim_args = { 'emin': self._emin, 'emax': self._emax, 'edisp': 'no', 'infile': xml_list, 'srclist': src_list, 'scfile': self._ft2, 'evroot': evroot, 'simtime': self._simulation_time, 'ltfrac': self._ltfrac, 'tstart': self._tstart, 'use_ac': 'no', 'irfs': self._irfs, 'evtype': 'none', 'seed': seed if seed is not None else np.random.randint( int(1e4), int(1e9)) } gtobssim_app = GtApp('gtobssim') log.info("About to start simulation") log.info("#### gtobsim output start #####") print("\n\n") gtobssim_app.run(**_gtobssim_args) print("\n\n") log.info("#### gtobsim output stop #####") # Now find out the FT1 file produced by the simulation event_files = glob.glob("%s_events_*.fits" % evroot) assert len(event_files ) > 0, "Simulation failed, there are no ft1 files produced." # Track them as temp files so we'll clean them up at the end event_files = map(self._track_temp_file, event_files) # Merge the event files using gtselect # Make a text file with the list of ft1 files ft1_file_list = self._track_temp_file("__gw_sim_ft1_list.txt") with open(ft1_file_list, "w+") as f: for ft1 in event_files: f.write("%s\n" % ft1) gtselect_app = GtApp('gtselect') log.info("Merging simulated event files") log.info("#### gtselect output start #####") print("\n\n") gtselect_app.run(infile=ft1_file_list, outfile=outfile, ra=0.0, dec=0.0, rad=180.0, tmin=self._tstart, tmax=self._tstart + self._simulation_time, emin=self._emin, emax=self._emax, zmax=180.0, evclass=IRFS.IRFS[self._irfs].evclass, evtype="INDEF", convtype='-1') print("\n\n") log.info("#### gtselect output stop #####") # Now check how many events we had before gtselect n_simulated_events = 0 for ft1 in event_files: with pyfits.open(ft1) as f: n_simulated_events += len(f['EVENTS'].data) # Now get the number of events which survived the cut n_simulated_events_after_cuts = 0 with pyfits.open(outfile, mode='update') as f: n_simulated_events_after_cuts += len(f['EVENTS'].data) # Need to fix this because gtobssim writes "1", which is not an acceptable reprocessing # version for gtburst f[0].header['PROC_VER'] = 302 assert n_simulated_events == n_simulated_events_after_cuts, "Some events were lost when cutting with gtselect!" log.info("Generated %s events of class %s" % (n_simulated_events_after_cuts, self._irfs)) self._cleanup() # Store for future use with its absolute path self._simulated_ft1 = sanitize_filename(outfile)
def _track_temp_file(self, filename): self._temp_files.append(sanitize_filename(filename)) return self._temp_files[-1]