Exemplo n.º 1
0
    def main(self):
        self.resolveArgs()

        cveList = self.load()
        result = None

        print("Cve(s) cargados: {}".format(len(cveList)))

        if (self.args.source == 'circl'):
            src = CirclSource()
            src.setCVEList(cveList)
            result = src.process()
        elif (self.args.source == 'rfl' or self.args.source == 'rfr'):
            mode = None
            if self.args.source == 'rfl':
                mode = 1
            elif self.args.source == 'rfr':
                mode = 2

            opts = {'mode': mode}

            src = RFSource()
            src.setCVEList(cveList)
            result = src.process(opts)

        if result == None:
            print('Error desconocido my bro')
        else:
            exporter = Exporter()
            exporter.export(result, 'out.csv')
Exemplo n.º 2
0
    def test_export_range(
        self,
        mocked_upload,
        import_test_data,
        start_date_str,
        end_date_str,
        num_expected_files,
        total_expected_lines,
    ):
        exporter = Exporter()
        start_date = datetime.strptime(start_date_str, "%Y%m%d").date()
        end_date = datetime.strptime(end_date_str, "%Y%m%d").date()
        exporter.export_range(start_date, end_date)

        num_days = (end_date - start_date).days + 1
        batch_names = {(start_date + timedelta(days=x)).strftime("%Y%m%d")
                       for x in range(num_days)}
        batch_names = list(batch_names.intersection(BATCHES_IN_TEST_FILE))

        total_lines = 0

        for batch_name in batch_names:
            filename = f"{batch_name}_NPR_BACKUP.csv"
            path = os.path.join(settings.TMP_DATA_DIR, filename)
            mocked_upload.assert_any_call(path, filename)

            assert exists(path)
            with open(path) as f:
                lines = f.readlines()
                total_lines += len(lines) - 1  # don't count the header

                # cleanup
                os.remove(path)

        assert total_lines == total_expected_lines
Exemplo n.º 3
0
def main():

    # Grab file_path argument from initial run
    file_path = sys.argv[1]

    #Instatiate repl that interacts with the user
    repl = REPL()
    repl.greet()

    # Instantiate ip populator that generates all the ips
    ip_populator = IpPopulator(file_path=file_path)

    # Save file to json if it doesnt already exist
    json_file_path = file_path.replace("txt", "json")
    if path.exists(json_file_path) is False:
        exporter = Exporter()
        exporter.export_as_json(
            file_path=json_file_path,
            modeled_ips_dict=ip_populator.dict_of_ip_objects)

    # Set queryable_options for repl
    repl.queryable_options = ip_populator.list_of_ip_geo_attributes
    repl.ip_populator = ip_populator

    # Get into query mode
    response = repl.intro()
    repl.handle_response(response)

    #iterate over the subsection of ip addresses for testing purposes

    # write json file of ips

    import pdb
    pdb.set_trace()
Exemplo n.º 4
0
    def main(self):

        flat_playlist_settings = self.config.get('flat_playlist_settings')
        if not flat_playlist_settings:
            flat_playlist_settings = {'playlist_items': '0-100'}

        download_list = self.flat_playlist(options=flat_playlist_settings)
        downloaded_songs = list()
        for song_url in download_list:
            resp = self.download_url(song_url, download=True)
            if resp[0]:
                downloaded_songs.append(resp[1])

        for song_dict in downloaded_songs:

            #Manual Tags
            #song_dict["album"] = "Ultimate Christmas Hits"
            artist = song_dict["artist"]
            #artist = ""
            #Is a cover wanted?
            coverwanted = True

            #define export dir without title
            export_dir = "{0}/{1}/{2}/".format(\
                 self.pretty_path(self.config["export_dir"]) \
                ,self.pretty_path(artist) \
                ,self.pretty_path(song_dict["album"]))

            #check if path exists
            if not os.path.exists(export_dir):
                os.makedirs(export_dir)

            #append title
            export_path = export_dir + self.pretty_path(song_dict["title"])

            #debug
            print("Export_Path: " + export_path)

            #convert cover
            resp_cover = self.download_crop_cover(song_dict["cover"])

            if coverwanted and resp_cover[0]:
                song_dict.update({"cover": resp_cover[1]})
            else:
                song_dict.update({"cover": None})

            ytdl_tmp_path = song_dict.pop("ytdl_tmp_path")

            try:
                exporter = Exporter(ytdl_tmp_path, song_dict)
                exporterlog = exporter.export(export_path)
            except Exception as e:
                print("skipped: {0}\nError: {1}".format(ytdl_tmp_path, e))
            else:
                print("successfully exported: {0}\n{1}".format(
                    song_dict["title"], exporterlog))

        print("Downloaded {0}:songs from url: {1}".format(
            len(downloaded_songs), self.url))
Exemplo n.º 5
0
 def test_log_error(self):
     """don't show a HTTPServer error message"""
     e = Exporter
     r = MockRequest("")
     with Exporter(r, ("", 65536), e) as handler:
         with captured_stdout() as stdout, captured_stderr() as stderr:
             e.log_error(handler, "test")
         # capture
     #
     self.assertEqual(stdout.getvalue(), "")
     self.assertEqual(stderr.getvalue(), "")
Exemplo n.º 6
0
 def initialize(self, json_export_path=None):
     print("Initialising PoissonSolver...")
     self.metal_params = helpers.getMetalParams(self.sqconn)
     self.elec_list = helpers.getElectrodeCollections(self.sqconn)
     self.db_list = helpers.getDBCollections(self.sqconn)
     self.initParameters(self.sqconn.getAllParameters())
     self.sim_params = self.sqconn.getAllParameters()
     self.createBoundaries()
     self.exporter = Exporter(in_path=self.sqconn.inputPath(),
                              out_path=self.sqconn.outputPath(),
                              json_path=json_export_path,
                              sqconn=self.sqconn)
Exemplo n.º 7
0
    def stopRecorder(self):

        self.recorder.stop()
        filename = QtGui.QFileDialog.getSaveFileName(None, 'Save As:', '',
                                                     '*.mov ;; *.*')

        if filename:
            if not str(filename).lower().endswith('.mov'):
                filename += '.mov'

            self.exporter = Exporter(self.workdir, filename)
            self.exporter.start()

            print('\nFile saved {f}\n'.format(f=filename))
Exemplo n.º 8
0
def process(input_file_name, output_file_name):
    exporter = Exporter()

    try:
        decks = exporter.read_and_convert_decks(input_file_name)
    except Exception:
        print("Error while reading CSV file:")
        raise

    try:
        exporter.output_decks(decks, output_file_name)
    except Exception:
        print("Error while writing CSV file:")
        raise

    return len(decks)
Exemplo n.º 9
0
def main():
    logging.basicConfig(level=logging.INFO)

    parser = argparse.ArgumentParser(description='Create TF Records')

    parser.add_argument('--image_dir',
                        dest='image_dir',
                        action='store',
                        required=True,
                        help='the name of the pipeline config file')
    parser.add_argument('--output_dir',
                        dest='output_dir',
                        action='store',
                        required=True,
                        default="records",
                        help='the directory to store the records')
    parser.add_argument('--name',
                        dest='name',
                        action='store',
                        required=True,
                        default="images",
                        help='the file name to use for the record sets')
    parser.add_argument(
        '--force',
        dest='force',
        action='store_true',
        help='Force overwriting existing files without warning')
    parser.add_argument('--single',
                        dest='single',
                        action='store_true',
                        help='Use single class to classify objects')
    parser.add_argument('export_type', help="Specify the type of data to export.\n" \
                        "One of 'tensorflow', 'yolo', 'icdar' or 'keras_ssd'",
                        choices=("tensorflow", "yolo", "icdar", "keras_ssd"))

    args = parser.parse_args()
    exporter = Exporter(args)

    exporter.prepare()
    if args.export_type == "tensorflow":
        exporter.create_tensorflow_records()
    elif args.export_type == "yolo":
        exporter.create_yolo_files()
    elif args.export_type == "keras_ssd":
        exporter.create_keras_ssd_files()
    elif args.export_type == "icdar":
        exporter.create_icdar_files()
Exemplo n.º 10
0
def main():
    """
    Placeholder
    """

    # parse arguments
    args = parseArguments()
    obj = Exporter()

    # for each scene
    scenes = getSceneList(args)
    for scene in scenes:

        # export hdf sub-datasets to geotiff
        obj.process(scene)

    return
Exemplo n.º 11
0
    def test_export_orphans(self, mocked_upload, local_db, import_test_data):
        with local_db.session() as session:
            num = session.query(NPRTable).count()
            assert num > 0

        exporter = Exporter()
        exporter.export_orphans()

        filename = "Leeg_NPR_BACKUP.csv"
        path = os.path.join(settings.TMP_DATA_DIR, filename)
        mocked_upload.assert_called_with(path, filename)
        with open(path) as f:
            lines = f.readlines()
            assert len(lines) == 17

            # cleanup
            os.remove(path)
Exemplo n.º 12
0
    def __init__(self, model, view, defaultPath):

        self._view = view
        self._model = model
        self._path = defaultPath
        self._appPath = str(Path(__file__).parent.absolute())
        self._exporter = Exporter()

        self.appPath = str(Path(__file__).parent.absolute())
        self._view.setWindowIcon(QtGui.QIcon(path.join(self._appPath, "NotingLogo.svg")))

        # Choose if you want to open or create a new session.
        method = self._view.selectSessionDialog()

        self._initSession(method)

        self._view.listNotes.setModel(self._model)
        self._connectSignals()
Exemplo n.º 13
0
def main():
    """
    Placeholder
    """

    # parse arguments
    args = parseArguments()
    obj = Exporter()

    # for each scene
    scenes = getSceneList(args)
    for scene in scenes:

        # generated cloud-masked geotiffs
        if os.path.exists(scene):
            obj.getImages(scene)

    return
Exemplo n.º 14
0
    def convert(
        self,
        input_file,
        output_dir=None,
        output_format='json',
        export_with_errors=False,
        errors_file='./files/output/errors.json'
    ):

        #  Register custom importers
        c_importers = {
            'json': json_importer
        }

         #  Register custom exporters
        c_exporters = {
            'csv': csv_exporter
        }

        importer = Importer(custom_importers=c_importers)
        validator = HotelValidator()
        exporter = Exporter(custom_exporters=c_exporters)

        try:
            data =  importer.load(input_file)
            data, data_backup = tee(data)

            # Validate data
            for row in data:
                validator.validate(row)

            error_count = len(validator.errors)
            LOG.warning(f'Validated {validator.count} records. Found {error_count} errors.')
            if error_count:
                with open(errors_file, 'w') as error_file:
                    error_file.write(json.dumps(validator.errors))
                if export_with_errors:
                   LOG.debug(f'Export to {output_format} started.')
                   output = exporter.write(data_backup, output_dir, output_format)
                   LOG.warning(f'Exported successfully. View file: {os.path.abspath(output)}')
                else:
                    LOG.error(f'Errors identified. Export process stopped. You can view the error logs at {os.path.abspath(errors_file)}')
        except Exception as e:
            LOG.error(str(e))
Exemplo n.º 15
0
    def do_conversion(self):
        exporter = Exporter()
        decks = None
        try:
            exporter.verify_input_file(self.input_file_name.get())
        except Exception as ex:
            messagebox.showwarning("Invalid File", ex)
            raise

        try:
            decks = exporter.read_and_convert_decks(self.input_file_name.get())
        except Exception as ex:
            messagebox.showerror("Could not convert",
                                 "Error while reading CSV file.")
            raise

        try:
            exporter.output_decks(decks, self.output_file_name.get())
        except Exception as ex:
            messagebox.showerror("Could not convert",
                                 "Error while writing CSV file.")
            raise

        messagebox.showinfo("Success", f'{len(decks)} decks converted.')
Exemplo n.º 16
0
    def __init__(self, args):

        self.input_shape = None

        model, cfg = model_with_cfg(args.network, args.pretrained)

        # Init arguments
        self.args = args
        self.weight_bit_width = cfg.getint('QUANT', 'WEIGHT_BIT_WIDTH')
        self.act_bit_width = cfg.getint('QUANT', 'ACT_BIT_WIDTH')
        self.input_bit_width = cfg.getint('QUANT', 'IN_BIT_WIDTH')
        self.in_channels = cfg.getint('MODEL', 'IN_CHANNELS')
        prec_name = "_{}W{}A".format(self.weight_bit_width, self.act_bit_width)
        experiment_name = '{}{}_{}'.format(
            args.network, prec_name,
            datetime.now().strftime('%Y%m%d_%H%M%S'))
        self.output_dir_path = os.path.join(args.experiments, experiment_name)

        if self.args.resume:
            self.output_dir_path, _ = os.path.split(args.resume)
            self.output_dir_path, _ = os.path.split(self.output_dir_path)

        if not args.dry_run:
            self.checkpoints_dir_path = os.path.join(self.output_dir_path,
                                                     'checkpoints')
            if not args.resume:
                os.mkdir(self.output_dir_path)
                os.mkdir(self.checkpoints_dir_path)
        self.logger = Logger(self.output_dir_path, args.dry_run)

        self.exporter = Exporter()

        # Randomness
        random.seed(args.random_seed)
        torch.manual_seed(args.random_seed)
        torch.cuda.manual_seed_all(args.random_seed)

        # Datasets
        transform_to_tensor = transforms.Compose([transforms.ToTensor()])

        dataset = cfg.get('MODEL', 'DATASET')
        self.num_classes = cfg.getint('MODEL', 'NUM_CLASSES')
        if dataset == 'CIFAR10':
            train_transforms_list = [
                transforms.RandomCrop(32, padding=4),
                transforms.RandomHorizontalFlip(),
                transforms.ToTensor()
            ]
            transform_train = transforms.Compose(train_transforms_list)
            builder = CIFAR10

        elif dataset == 'MNIST':
            transform_train = transform_to_tensor
            builder = MNIST
        else:
            raise Exception("Dataset not supported: {}".format(args.dataset))

        train_set = builder(root=args.datadir,
                            train=True,
                            download=True,
                            transform=transform_train)
        test_set = builder(root=args.datadir,
                           train=False,
                           download=True,
                           transform=transform_to_tensor)
        self.train_loader = DataLoader(train_set,
                                       batch_size=args.batch_size,
                                       shuffle=True,
                                       num_workers=args.num_workers)
        self.test_loader = DataLoader(test_set,
                                      batch_size=args.batch_size,
                                      shuffle=False,
                                      num_workers=args.num_workers)

        imgs, _ = next(iter(self.train_loader))
        self.input_shape = list(imgs.shape)
        self.input_shape[0] = 1

        # Init starting values
        self.starting_epoch = 1
        self.best_val_acc = 0

        # Setup device
        if args.gpus is not None:
            args.gpus = [int(i) for i in args.gpus.split(',')]
            self.device = 'cuda:' + str(args.gpus[0])
            torch.backends.cudnn.benchmark = True
        else:
            self.device = 'cpu'
        self.device = torch.device(self.device)

        # Resume checkpoint, if any
        if args.resume:
            print('Loading model checkpoint at: {}'.format(args.resume))
            package = torch.load(args.resume, map_location='cpu')
            model_state_dict = package['state_dict']
            model.load_state_dict(model_state_dict, strict=False)

        if args.gpus is not None and len(args.gpus) == 1:
            model = model.to(device=self.device)
        if args.gpus is not None and len(args.gpus) > 1:
            model = nn.DataParallel(model, args.gpus)
        self.model = model

        # Loss function
        if args.loss == 'SqrHinge':
            self.criterion = SqrHingeLoss()
        else:
            self.criterion = nn.CrossEntropyLoss()
        self.criterion = self.criterion.to(device=self.device)

        # Init optimizer
        if args.optim == 'ADAM':
            self.optimizer = optim.Adam(self.model.parameters(),
                                        lr=args.lr,
                                        weight_decay=args.weight_decay)
        elif args.optim == 'SGD':
            self.optimizer = optim.SGD(self.model.parameters(),
                                       lr=self.args.lr,
                                       momentum=self.args.momentum,
                                       weight_decay=self.args.weight_decay)

        # Resume optimizer, if any
        if args.resume and not args.evaluate:
            self.logger.log.info("Loading optimizer checkpoint")
            if 'optim_dict' in package.keys():
                self.optimizer.load_state_dict(package['optim_dict'])
            if 'epoch' in package.keys():
                self.starting_epoch = package['epoch']
            if 'best_val_acc' in package.keys():
                self.best_val_acc = package['best_val_acc']

        # LR scheduler
        if args.scheduler == 'STEP':
            milestones = [int(i) for i in args.milestones.split(',')]
            self.scheduler = MultiStepLR(optimizer=self.optimizer,
                                         milestones=milestones,
                                         gamma=0.1)
        elif args.scheduler == 'FIXED':
            self.scheduler = None
        else:
            raise Exception("Unrecognized scheduler {}".format(
                self.args.scheduler))

        # Resume scheduler, if any
        if args.resume and not args.evaluate and self.scheduler is not None:
            self.scheduler.last_epoch = package['epoch'] - 1
Exemplo n.º 17
0
# filter_row/ filter_row_segment is used to filter the data based on the ETL start_date and end_date
if param.reset_time == param.reset_value:
    filter_row = " where updated_at >='" + str(
        param.reset_start_date) + "' and updated_at<'" + str(
            param.reset_end_date) + "'"
else:
    filter_row = " where updated_at >='" + str(
        param.start_date) + "' and updated_at<'" + str(param.end_date) + "'"
#filter_row = " "
filter_row_segment = " where updated_at::date >= current_date::date -1 and updated_at::date < current_date::date "

if (host == "nwsl"):
    param.counter = 2
    for i in param.tbl_nwsl:
        print('extraction of ' + i + ' started')
        runner = Exporter("select * from " + i + filter_row, i + "_nwsl")
        runner.start()

    runner_employee = Exporter("select * from newsletter_customers",
                               'newsletter_customers')
    runner_employee.start()

elif (host == "core"):
    param.counter = 26
    for i in param.tbl_core:
        print('extraction of ' + i + ' started')
        runner = Exporter("select * from " + i + filter_row, i)
        runner.start()

    runner_employee = Exporter("select * from employees", 'employees')
    runner_employee.start()
Exemplo n.º 18
0
def update(c):
    asyncio.run(Exporter().generate_achievement_export())
Exemplo n.º 19
0
    parser.add_argument(
        '-e',
        '--enddate',
        type=int,
        default=time.strftime('%Y%m%d', time.localtime(time.time())),
        help='end date (YYYYMMDD), if not assigned, set as today')
    parser.add_argument(
        '-t',
        '--threads',
        type=int,
        default=multiprocessing.cpu_count(),
        help=
        'how many threads to use, if not assigned thread will be the same as cpu cores'
    )
    parser.add_argument('-p',
                        '--province',
                        help='the province to get, if not assigned crawl all')
    args = parser.parse_args()
    logger = init_logger()

    db = DBIO(args.database)
    ex = Exporter(db, args.threads, args.startdate, args.enddate)
    if args.province:
        ex.get_province(args.province)
    else:
        for prov in provinces:
            ex.get_province(prov)
    db.close()
    e_time = time.time()
    logger.info('Used %s seconds' % (e_time - s_time))
Exemplo n.º 20
0
				for j,o in enumerate(candidate):
					if o.jo in [r.jo for r in bestsubperm]:
						candidate[j] = bestsubperm[i]
						i+=1
				candidates.append(candidate)
		return candidates

	def neighbours_idle(self, size):
		candidates = []
		fororders = ForSchedule()
		self.UpdateTHpanel()
		if not self.SignalCheck():
			return []
		return [self.orders]

if __name__ == '__main__':
	from kivymd.button import *
	from kivymd.snackbar import Snackbar
	from kivymd.theming import ThemeManager
	from kivymd.progressbar import MDProgressBar
	from kivymd.selectioncontrols import MDCheckbox
	from kivymd.navigationdrawer import NavigationDrawerIconButton
	from kivymd.date_picker import MDDatePicker
	from uix.jscolor import *
	from uix.jslabel import ContentLabel, TitleLabel
	from allkv import *

	multiprocessing.freeze_support()
	ex = Exporter()
	app = MDScheduler()
	app.run()
Exemplo n.º 21
0
import sys
import os

# argument passed to runner program is stored in host variable
host = sys.argv[1]
#host = "cs"
param.dbconn(host)

if not os.path.exists(param.newpath):
    os.makedirs(param.newpath)

if sys.argv[1] in ('cs', 'abasence.io'):
    print("Extracting Json for " + str(param.start_date) + " - " +
          str(param.end_date))

else:
    print("No other ETL defiend yet")

if (host == "cs"):
    param.counter = 2
    for i in param.tbl_cs:
        print('Extracting Json for ' + i + ' started')
        runner = Exporter(i)
        runner.start()
else:
    print("no other mongo source is defined yet")

# run the ETL process until all the mentioned collections in the param file are exported.
while param.counter != 0:
    importer.import_data()
Exemplo n.º 22
0
async def home(user: str):
    exporter = Exporter(user)
    if not await exporter.valid_user():
        return {"error": "invalid user"}

    return {"hello": user}
Exemplo n.º 23
0
def main(argv):
    global exporters
    opt_parser = argparse.ArgumentParser(description='Parse discogs release',
                                         epilog='''
You must specify either -d DATE or some files.
JSON output prints to stdout, any other output requires
that --params is used, e.g.:
--output pgsql
--params "host=localhost dbname=discogs user=pguser"

--output couchdb
--params "http://localhost:5353/"
''')
    opt_parser.add_argument('-n', type=int, help='Number of records to parse')
    opt_parser.add_argument('-d',
                            '--date',
                            help='Date of release. For example 20110301')
    opt_parser.add_argument('-o',
                            '--output',
                            choices=exporters.keys(),
                            default='json',
                            help='What to output to')
    opt_parser.add_argument(
        '-p', '--params', help='Parameters for output, e.g. connection string')
    opt_parser.add_argument(
        '-i',
        '--ignore-unknown-tags',
        action='store_true',
        dest='ignore_unknown_tags',
        help='Do not error out when encountering unknown tags')
    opt_parser.add_argument(
        '-q',
        '--quality',
        dest='data_quality',
        help='Comma-separated list of permissable data_quality values.')
    opt_parser.add_argument('-t',
                            type=int,
                            dest='number_of_workers',
                            default=1,
                            help='Number of exporter worker threads')
    opt_parser.add_argument('-v',
                            '--verbose',
                            action='store_true',
                            default=False,
                            help='If defined output is verbose')
    opt_parser.add_argument(
        'file',
        nargs='*',
        help=
        'Specific file(s) to import. Default is to parse artists, labels, releases matching -d'
    )
    global options
    options = opt_parser.parse_args(argv)
    print(options)

    if options.date is None and len(options.file) == 0:
        opt_parser.print_help()
        sys.exit(1)

    exporter = Exporter(options, jobs, exporters)
    parser = xml.sax.make_parser()
    timer = time.time()
    try:
        parse(parser, exporter, 'artists', artistHandler, options)
        parse(parser, exporter, 'labels', labelHandler, options)
        parse(parser, exporter, 'releases', releaseHandler, options)
        parse(parser, exporter, 'masters', masterHandler, options)
    finally:
        exporter.finish(completely_done=True)
        if options.verbose:
            print 'Running time was: %.2fs' % (time.time() - timer)
Exemplo n.º 24
0
@app.route("/metrics", methods=["GET"])
def get_data():
    return Response(generate_latest(), mimetype=CONTENT_TYPE_LATEST)


def load_config():
    with open(CONF_LOCATION) as f:
        data = yaml.load(f, Loader=yaml.FullLoader)

    if "debug" not in data or "refresh_rate" not in data or "hosts" not in data:
        logger.fatal(
            'FATAL: /src/config.yml must contain "debug", "refresh_rate" and "hosts"'
        )
        exit(-1)
    return data


if __name__ == "__main__":
    conf = load_config()
    metrics = {
        "ping":
        Gauge("ping", "Ping duration in ms", ["host"]),
        "ports":
        Gauge("ports", "Open ports on host", ["host", "port", "protocol"]),
    }
    for host in conf["hosts"]:
        exporter = Exporter(host, conf["refresh_rate"], metrics)
        exporter.start()

    app.run(debug=conf["debug"], host="0.0.0.0", port=9100)
Exemplo n.º 25
0
        # print route
        if is_same_line(boarded_line, cur_line):
            # print("Same Line")
            correct_route(route, boarded_line, remaining_route, counter)

        elif is_line_in_edge(boarded_line, cur_edge.source(),
                             cur_edge.target()):
            route[counter]._line = cur_line
            # print("Line Corrected")
            correct_route(route, boarded_line, remaining_route, counter)
        else:
            # print("Changed Bus")
            boarded_line = cur_edge.line()
            correct_route(route, boarded_line, remaining_route, counter)

    e = Exporter()
    db_edges = e.get_edges()
    db_vertices = e.get_vertices()
    # Initialize graph and populate it
    g = Grapher()
    g.populate_vertices(db_vertices)
    g.populate_edges(db_edges)
    g.populate_graph()
    d = Dijkstra(g.graph)
    i = Isochroner(g, 3)
    route = d.min_path(444, 728)
    route = route[1]
    geoms_to_lines = i.geoms_to_lines
    init_route = route
    init_line = route[0].line()
    init_size = len(route)
Exemplo n.º 26
0
    param.exported_file = dict((el, 0) for el in tbl_all)
    param.truncate_tbl = copy.copy(truncate_table)


# if host in param.list_of_available_sources
if (host in param.sources):
    fetch_table(host)
    param.counter = len(param.tbl_source) + len(param.tbl_source_truncate)

    for i in param.tbl_source:
        #print('extraction of ' + i + ' started')
        # handle the table renaming while importing the table

        if i == 'appointment_occurrences':  # use another key in hash for these kind of special cases
            runner = Exporter(
                "select * from " + i +
                " where appointment_series_id in (select id from appointment_series "
                + filter_occurrences + ")", i)
            runner.start()
            #print("select * from "+ i + " where appointment_series_id in (select id from appointment_series "+filter_row+")", i)

        elif i in param.tbl_source_rename:
            runner = Exporter(
                "select * from " + i + filter_row,
                param.tbl_source_rename[i])  #need to tackle the renamed tables
            runner.start()
            #print("select * from " + i + filter_row, param.tbl_source_rename[i])

        else:
            runner = Exporter("select * from " + i + filter_row,
                              i)  #need to tackle the renamed tables
            runner.start()
Exemplo n.º 27
0
def main():
    init_logging()
    init_filesystem()

    arguments = docopt(__doc__, version='gtfs-exporter %s' % version)
    provider_type = arguments['--provider']
    provider = DataProvider()
    if provider_type == "file":
        provider = FileDataProvider(
            arguments['--file'],
            feed_id=arguments['--id'],
            lenient=arguments['--lenient'],
            disable_normalization=arguments['--disablenormalize'])
    elif provider_type == "url":
        provider = HttpDataProvider(
            arguments['--url'],
            feed_id=arguments['--id'],
            lenient=arguments['--lenient'],
            disable_normalization=arguments['--disablenormalize'])
    elif provider_type == "api":
        builder = ApiBuilder(
            arguments['--url'],
            feed_id=arguments['--id'],
            lenient=arguments['--lenient'],
            disable_normalization=arguments['--disablenormalize'])
        provider = builder.build()

    exporter = Exporter(arguments)
    sg = ShapeGenerator(
        "https://download.geofabrik.de/europe/romania-latest.osm.bz2",
        out_path)

    # flow needs to be different when receiving data from api
    #  - load
    #  - process
    #  - generate initial gtfs files
    #  - generate shapes for gtfs
    #  - generate bundle
    # for zip, url
    #  - generation of shapes
    #  - load all the feed to process & interpolate
    #  - generate feed (bundle)
    if provider.is_from_api():
        exporter.load(provider)
        exporter.process()
        exporter.export(bundle=False)

        sg.generate()
        from exporter.util.storage import generate_gtfs_bundle
        generate_gtfs_bundle(out_path, bundle=f"gtfs-{arguments['--id']}.zip")
    else:
        sg.generate()
        exporter.load(provider)
        exporter.process()
        exporter.export(bundle=True)

    rg = ReleaseGenerator(GH_REPO, GH_TOKEN)

    rg.generate([
        os.path.join(out_path, f"gtfs-{arguments['--id']}.zip"),
    ] + glob.glob(os.path.join(out_path, "*.json")))
Exemplo n.º 28
0
    param.exported_file = dict((el, 0) for el in tbl_all)
    param.truncate_tbl = copy.copy(truncate_table)


if (host in param.sources):
    fetch_table(host)
    param.counter = len(param.tbl_source) + len(param.tbl_source_truncate)
    param.redshift_counter = len(param.tbl_source) + len(
        param.tbl_source_truncate)

    for i in param.tbl_source:

        if i in param.truncate_tbl:
            runner = Exporter(
                "select * from {0}.".format(param.schema) + i + filter_refresh,
                param.tbl_source_rename[i])
            runner.start()

        if i in param.tbl_source_rename:
            runner = Exporter(
                "select * from {0}.".format(param.schema) + i + filter_row,
                param.tbl_source_rename[i])
            runner.start()

        else:
            runner = Exporter(
                "select * from {0}.".format(param.schema) + i + filter_row, i)
            runner.start()

    for j in param.tbl_source_truncate:
Exemplo n.º 29
0
from project_creator import ProjectCreator
from exporter import Exporter

if __name__ == '__main__':
    default_project_dir = os.getcwd()

    parser = argparse.ArgumentParser()
    parser.add_argument('-export',
                        help='Export definitions into common header file',
                        action='store_true')
    parser.add_argument('-create',
                        help='Create a project file',
                        action='store_true')
    parser.add_argument('--build-system',
                        help='Build system that is used for project',
                        dest='build_system',
                        default=None)
    parser.add_argument('--project-dir',
                        help='A project directory for analysis',
                        dest='project_dir',
                        default=default_project_dir)
    args = parser.parse_args()

    if args.create:
        pc = ProjectCreator(args.project_dir, args.build_system)
        pc.create_project_file()
    elif args.export:
        exporter = Exporter(args.project_dir)
        exporter.export()
Exemplo n.º 30
0
from datetime import datetime, timedelta

import settings
from exporter import Exporter
from importer import Importer

if __name__ == "__main__":
    """
    Import the last 7 days of data from the RDW database
    and export the data in CSV files to the objectstore.
    """
    end_date = datetime.now().date()
    start_date = end_date - timedelta(days=settings.NUM_DAYS_TO_IMPORT)

    importer = Importer()
    importer.import_range(start_date, end_date)

    exporter = Exporter()
    exporter.export_range(start_date, end_date)