def main(self): self.resolveArgs() cveList = self.load() result = None print("Cve(s) cargados: {}".format(len(cveList))) if (self.args.source == 'circl'): src = CirclSource() src.setCVEList(cveList) result = src.process() elif (self.args.source == 'rfl' or self.args.source == 'rfr'): mode = None if self.args.source == 'rfl': mode = 1 elif self.args.source == 'rfr': mode = 2 opts = {'mode': mode} src = RFSource() src.setCVEList(cveList) result = src.process(opts) if result == None: print('Error desconocido my bro') else: exporter = Exporter() exporter.export(result, 'out.csv')
def main(argv): ws = WorkSpace(args['infile']) if args['plot'] == 'log': ws.plot2d_log() elif args['plot'] == 'linear': ws.plot2d() ws.info() exporter = Exporter(args['outfile'], ws) exporter.export()
def main(self): flat_playlist_settings = self.config.get('flat_playlist_settings') if not flat_playlist_settings: flat_playlist_settings = {'playlist_items': '0-100'} download_list = self.flat_playlist(options=flat_playlist_settings) downloaded_songs = list() for song_url in download_list: resp = self.download_url(song_url, download=True) if resp[0]: downloaded_songs.append(resp[1]) for song_dict in downloaded_songs: #Manual Tags #song_dict["album"] = "Ultimate Christmas Hits" artist = song_dict["artist"] #artist = "" #Is a cover wanted? coverwanted = True #define export dir without title export_dir = "{0}/{1}/{2}/".format(\ self.pretty_path(self.config["export_dir"]) \ ,self.pretty_path(artist) \ ,self.pretty_path(song_dict["album"])) #check if path exists if not os.path.exists(export_dir): os.makedirs(export_dir) #append title export_path = export_dir + self.pretty_path(song_dict["title"]) #debug print("Export_Path: " + export_path) #convert cover resp_cover = self.download_crop_cover(song_dict["cover"]) if coverwanted and resp_cover[0]: song_dict.update({"cover": resp_cover[1]}) else: song_dict.update({"cover": None}) ytdl_tmp_path = song_dict.pop("ytdl_tmp_path") try: exporter = Exporter(ytdl_tmp_path, song_dict) exporterlog = exporter.export(export_path) except Exception as e: print("skipped: {0}\nError: {1}".format(ytdl_tmp_path, e)) else: print("successfully exported: {0}\n{1}".format( song_dict["title"], exporterlog)) print("Downloaded {0}:songs from url: {1}".format( len(downloaded_songs), self.url))
def main(): init_logging() init_filesystem() arguments = docopt(__doc__, version='gtfs-exporter %s' % version) provider_type = arguments['--provider'] provider = DataProvider() if provider_type == "file": provider = FileDataProvider( arguments['--file'], feed_id=arguments['--id'], lenient=arguments['--lenient'], disable_normalization=arguments['--disablenormalize']) elif provider_type == "url": provider = HttpDataProvider( arguments['--url'], feed_id=arguments['--id'], lenient=arguments['--lenient'], disable_normalization=arguments['--disablenormalize']) elif provider_type == "api": builder = ApiBuilder( arguments['--url'], feed_id=arguments['--id'], lenient=arguments['--lenient'], disable_normalization=arguments['--disablenormalize']) provider = builder.build() exporter = Exporter(arguments) sg = ShapeGenerator( "https://download.geofabrik.de/europe/romania-latest.osm.bz2", out_path) # flow needs to be different when receiving data from api # - load # - process # - generate initial gtfs files # - generate shapes for gtfs # - generate bundle # for zip, url # - generation of shapes # - load all the feed to process & interpolate # - generate feed (bundle) if provider.is_from_api(): exporter.load(provider) exporter.process() exporter.export(bundle=False) sg.generate() from exporter.util.storage import generate_gtfs_bundle generate_gtfs_bundle(out_path, bundle=f"gtfs-{arguments['--id']}.zip") else: sg.generate() exporter.load(provider) exporter.process() exporter.export(bundle=True) rg = ReleaseGenerator(GH_REPO, GH_TOKEN) rg.generate([ os.path.join(out_path, f"gtfs-{arguments['--id']}.zip"), ] + glob.glob(os.path.join(out_path, "*.json")))
from project_creator import ProjectCreator from exporter import Exporter if __name__ == '__main__': default_project_dir = os.getcwd() parser = argparse.ArgumentParser() parser.add_argument('-export', help='Export definitions into common header file', action='store_true') parser.add_argument('-create', help='Create a project file', action='store_true') parser.add_argument('--build-system', help='Build system that is used for project', dest='build_system', default=None) parser.add_argument('--project-dir', help='A project directory for analysis', dest='project_dir', default=default_project_dir) args = parser.parse_args() if args.create: pc = ProjectCreator(args.project_dir, args.build_system) pc.create_project_file() elif args.export: exporter = Exporter(args.project_dir) exporter.export()
for thread_id in threads.keys(): cons = get_inters_and_cons(threads[thread_id], edges) # Update cons for uid in cons: if uid in radius: radius[uid] = radius[uid] + 1 else: radius[uid] = 1 interactions[group_id] = edges contributions[group_id] = radius # OVERALL all_id = 0 all_name = 'All' all_edges = Edge.combine(interactions.values()) all_conts = User.combine(contributions.values()) interactions[all_id] = all_edges contributions[all_id] = all_conts group_hash[all_id] = all_name outs = Exporter.export(contributions, user_hash, interactions) #print outs success = sql_handler.insert_plots(sys.argv[1], outs, group_hash) if success == False: print False exit() sql_handler.cleanup() print True #doc = xmltodict.parse(fd.read())