Ejemplo n.º 1
0
def find_eligible_staging_pod(client: CoreV1Api, exclude_namespaces: [] = None):
    pods = client.list_pod_for_all_namespaces()
    available_pods = pods.items
    if exclude_namespaces:
        available_pods = [
            pod
            for pod in available_pods
            if pod.metadata.namespace not in exclude_namespaces
        ]

    print(
        f"[*] found {len(available_pods)} pods - checking for eligible staging candidates (this may take a while)"
    )
    pod_capabilities = []
    with FillingCirclesBar("[*] Processing...", max=len(available_pods)) as bar:
        for pod_info in available_pods:
            capabilities = run_checks(client, pod_info)
            pod_capabilities.append(capabilities)
            bar.next()

    eligible_pods = [pod for pod in pod_capabilities if pod.can_be_staging()]
    print(f"[+] valid pods for proxy staging: {len(eligible_pods)}")
    for pod in eligible_pods:
        print(f"\t{pod.namespace}/{pod.pod_name}")

    staging_pods = [pod for pod in eligible_pods if pod.has_dependencies_installed()]
    if len(staging_pods) == 0:
        install_dependencies(client, eligible_pods)

    print(f"[+] selecting {staging_pods[0]}")
    return staging_pods[0]
Ejemplo n.º 2
0
def insert_source(source):
    '''
    Gets the connection and binding and inserts data.
    '''

    get_connection(source)

    if not isinstance(source, sc.CenPy):
        get_binding(source)

    if source.engine.dialect.has_table(source.engine, source.tbl_name):
        print()
        warnings.warn(("Destination table already exists. Current table " +
                       "will be dropped and replaced."))
        print()
        if not isinstance(source, sc.CenPy):
            source.binding.__table__.drop(source.engine)

    try:
        if not isinstance(source, sc.CenPy):
            source.binding.__table__.create(source.engine)
    except ProgrammingError as e:

        raise CLIError('Error creating destination table: %s' % str(e))

    circle_bar = FillingCirclesBar('  ▶ Loading from source',
                                   max=source.num_rows)

    source.insert(circle_bar)

    circle_bar.finish()

    ui.item('Committing rows (this can take a bit for large datasets).')
    source.session.commit()

    success = 'Successfully imported %s rows.' % (source.num_rows)
    ui.header(success, color='\033[92m')
    if source.name == "Socrata" and source.client:
        source.client.close()

    return
Ejemplo n.º 3
0
    def format_data(self, data_format):
        """
            This method format the data to the required format for the database
        """
        print('')
        progress_bar = 'Mise en forme des données :'
        progress_bar = FillingCirclesBar(progress_bar, max=len(self.data))
        for i, dictionary in enumerate(self.data):
            for key_format in data_format:
                key = key_format['name']
                if key in dictionary:
                    data_type = key_format['type']
                    if data_type == str:
                        dictionary[key] = str(dictionary[key])
                        if 'length' in key_format:
                            length = key_format['length']
                            dictionary[key] = dictionary[key][:length]
                    elif data_type == int:
                        dictionary[key] = int(dictionary[key])
                    elif data_type == list:
                        dictionary[key] = self.string_to_list(dictionary[key])

                self.data[i] = dictionary
            progress_bar.next()
        progress_bar.finish()
Ejemplo n.º 4
0
    def keep_required(self, data_required):
        """
            This method drop data with missing keys
        """
        index_list = list()
        print('')
        progress_bar = 'Suppression des produits avec données manquantes :'
        progress_bar = FillingCirclesBar(progress_bar, max=len(self.data))
        for i, dictionary in enumerate(self.data):
            try:
                for required in data_required:
                    key = required['name']
                    required = required['required']

                    # Check if the data have the required keys
                    if required and key not in dictionary:
                        raise KeyError
                    # Check if the required data are not null
                    if required and not dictionary[key]:
                        raise KeyError

            except KeyError:
                # Save the data's index if there is a key error
                index_list.append(i)
            progress_bar.next()
        progress_bar.finish()

        index_list.reverse()

        # Delete all datas with a key error
        for index in index_list:
            self.data.pop(index)
Ejemplo n.º 5
0
def upload_data(es):
    for csv_df in pd.read_csv(
            csv_file, index_col=False,
            chunksize=500000):  #MEMORY MANAGMENT JUST IN CASE.
        csv_reader = csv_df.T.to_dict(
        )  #TRANSFORM CSV TO DICT CAUSE IT HAS SIMILAR STRUCTURE AS JSON FILE.
        with FillingCirclesBar("Uploading Data: ", max=len(csv_df)) as bar:
            es.indices.create(index='movies', ignore=400,
                              body=es_settings)  # CREATE THE INDEX.
            for (key, docs_data) in csv_reader.items():
                res = es.index(index="movies", id=key + 1, body=docs_data)
                bar.next()
    print("Data uploaded successfully")
Ejemplo n.º 6
0
    def download_products(self, categories, page_size, pages):
        """
            Download products in temp json files
        """

        self.categories = categories
        self.page_size = page_size
        self.pages = pages

        # Download each category
        for category in self.categories:
            try:
                dir_path = path.join(self.tmp_dir, category)
                mkdir(dir_path)
            except FileExistsError:
                print(f'Le répertoire "{dir_path}" existe déjà')

            # Headers for the request see : https://en.wiki.openfoodfacts.org/API/Read/Search
            headers = {
                'User-agent':
                'Pur Beurre Substitute - Mac OS X 10.13 - Version 1.0'
            }

            # A progress bar for seeing the application working
            print('')
            progress_bar = f'Téléchargement en cours de la catégorie "{category}" :'
            progress_bar = FillingCirclesBar(progress_bar, max=self.pages)
            for page in range(self.pages):
                # Parameters sent with te request
                parameters = {
                    'json': 1,
                    'page_size': self.page_size,
                    'page': (page + 1),
                    'tagtype_0': 'categories',
                    'tag_contains_0': 'contains',
                    'tag_0': category,
                    'action': 'process'
                }

                # File in wich data are saved
                file_name = f'{page}.json'
                file_path = path.join(dir_path, file_name)

                with open(file_path, 'w') as output_file:
                    try:
                        result = requests.get(self.url_base,
                                              params=parameters,
                                              headers=headers,
                                              stream=True)
                        result.raise_for_status()
                    except requests.HTTPError as err:
                        print(err)

                    # Write data in a json format
                    json.dump(result.json(), output_file, indent=4)
                progress_bar.next()
            progress_bar.finish()
Ejemplo n.º 7
0
def main():
    arguments = docopt(__doc__, version=__version__)

    client = Socrata(arguments['<site>'], arguments['-a'])

    try:
        if arguments['ls']:
            datasets = list_datasets(client, arguments['<site>'])
            print(tabulate(datasets, headers='keys', tablefmt='psql'))
        elif arguments['insert']:
            dataset_id = arguments['<dataset_id>']
            metadata = client.get_metadata(dataset_id)

            engine, session, geo = get_connection(arguments['-d'], metadata)
            Binding = get_binding(client, dataset_id, metadata, geo,
                                  arguments['-t'])

            # Create the table
            try:
                Binding.__table__.create(engine)
            except ProgrammingError as e:
                # Catch these here because this is our first attempt to
                # actually use the DB
                if 'already exists' in str(e):
                    raise CLIError(
                        'Destination table already exists. Specify a new table'
                        ' name with -t.')
                raise CLIError('Error creating destination table: %s' % str(e))

            num_rows = get_row_count(client, dataset_id)
            bar = FillingCirclesBar('  ▶ Loading from API', max=num_rows)

            # Iterate the dataset and INSERT each page
            for page in get_dataset(client, dataset_id):
                to_insert = []
                for row in page:
                    to_insert.append(Binding(**parse_row(row, Binding)))

                session.add_all(to_insert)
                session.flush()
                bar.next(n=len(to_insert))

            bar.finish()

            ui.item(
                'Committing rows (this can take a bit for large datasets).')
            session.commit()

            success = 'Successfully imported %s rows from "%s".' % (
                num_rows, metadata['name'])
            ui.header(success, color='\033[92m')

        client.close()
    except CLIError as e:
        ui.header(str(e), color='\033[91m')
Ejemplo n.º 8
0
def search(inputlist, protein_seqs, tsvsalida):
    try:
        numerodominios = 0  #Inicializa el total de matches
        lineaalinea = pd.read_csv(protein_seqs, sep='\t')
        bar = FillingCirclesBar('Buscando dominios...',
                                max=len(inputlist['pattern']) *
                                (len(lineaalinea['qseqid']) + 1))
        with open(tsvsalida, 'a') as found:
            found.write('blast hit\tname\taccession\tdescription\tpattern\n')
            for j in range(len(lineaalinea['sseqid']) + 1):
                for k in range(len(inputlist['pattern'])):
                    #Para el query hago esto
                    if j == 0:
                        busca = inputlist.loc[k, 'pattern']
                        prosearch = lineaalinea.loc[1, 'qseq']
                        match = re.search(busca, prosearch, flags=re.I)
                        bar.next()
                        if match:
                            found.write( lineaalinea.loc[1,'qseqid']+'\t' \
                                        +inputlist.loc[k, 'name']+'\t' \
                                        +inputlist.loc[k, 'accession']+'\t' \
                                        +inputlist.loc[k, 'description']+'\t' \
                                               +inputlist.loc[k, 'pattern']+'\n')
                            numerodominios += 1

                    # Y esto lo hago para los multiples subjects
                    else:
                        busca = inputlist.loc[k, 'pattern']
                        prosearch = lineaalinea.loc[j - 1, 'sseq']
                        match = re.search(busca, prosearch, flags=re.I)
                        bar.next()
                        if match:
                            found.write( lineaalinea.loc[j-1,'sseqid']+'\t' \
                                        +inputlist.loc[k, 'name']+'\t' \
                                        +inputlist.loc[k, 'accession']+'\t' \
                                        +inputlist.loc[k, 'description']+'\t' \
                                               +inputlist.loc[k, 'pattern']+'\n')
                            numerodominios += 1

        found.close()
        bar.finish()
        return (numerodominios)
    except:
        print('Fallo al buscar dominios')
        pass
Ejemplo n.º 9
0
def Pb6():
    from progress.bar import FillingCirclesBar
    import time

    bar = FillingCirclesBar('进度条6', max=100)  #max的值100,可调节

    for i in range(100):  #这个也需要适当调节
        bar.next()
        time.sleep(0.1)  #延迟时间,可调节,0.1~1之间最佳

    bar.finish()
Ejemplo n.º 10
0
 def generate_simple(data_name, count, klass, *args):
     """
         Generate data with a simple loop
     """
     progress_bar = f'Create {data_name}'
     progress_bar = FillingCirclesBar(progress_bar, max=count)
     i = 0
     while i < count:
         i += 1
         klass(LANG_CODE, *args)
         progress_bar.next()
     progress_bar.finish()
Ejemplo n.º 11
0
def make_video(params_file='output/params.txt',
               phi_file='output/phi.txt',
               x_file='output/x.txt',
               gifname='movie.gif',
               duration=0.1,
               xkcd=False):
    params = get_params(params_file)
    phi_array = np.loadtxt(phi_file)
    x_array = np.loadtxt(x_file)
    solution = {'u': phi_array, 'x': x_array}
    step = int(0.01 / params['dt'])
    bar = FillingCirclesBar('Loading',
                            suffix='%(percent)d%%',
                            max=int((params['steps'] - 1) / step))
    images = []
    figsize = (6, 6)
    for subplot in range(1, int(params['steps']), step):
        if xkcd:
            plt.rcParams['text.usetex'] = False
            plt.xkcd()
        fig = plt.figure(figsize=figsize)
        ax = plt.subplot(1, 1, 1)
        plt.sca(ax)
        plt.plot(solution['x'],
                 solution['u'][subplot - 1, :],
                 c='#F61067',
                 lw=3.5)
        plt.ylim(-1.5 * params['eta'], 1.5 * params['eta'])
        if xkcd:
            plt.xlabel(r'x')
            plt.ylabel(r'u(x, t)')
        else:
            plt.xlabel(r'$x$')
            plt.ylabel(r'$\phi(x, t)$')
        plt.title('t = {:.2f}s'.format((subplot - 1) * params['dt']))
        if subplot > 1:
            plt.axis(axis)
        if subplot == 1:
            axis = plt.axis()
        filename = 'temp.png'
        plt.savefig(filename)
        plt.close()
        images.append(Image.open(filename))
        os.remove(filename)
        bar.next()
    bar.finish()
    print('', end='\r\r')
    if xkcd:
        imageio.mimsave('xkcd_' + gifname, images, duration=duration)
    else:
        imageio.mimsave(gifname, images, duration=duration)
Ejemplo n.º 12
0
 def generate_complex_while(data_name, count, parents, klass, *args):
     """
         Generate data with a for and a while loops
     """
     count_min, count_max = count
     progress_bar = f'Create {data_name}'
     progress_bar = FillingCirclesBar(progress_bar, max=len(parents))
     for parent in parents:
         child_count = randrange(count_min, count_max)
         i = 0
         while i < child_count:
             i += 1
             klass(LANG_CODE, parent, *args)
         progress_bar.next()
     progress_bar.finish()
Ejemplo n.º 13
0
    def do_update(self, _):
        "***Actualiza las tools"
        data = dict()

        for x in [x.rstrip() for x in open("mirror.txt", "r").readlines()]:
            print(x)
            data.update(requests.get(x).json())

        bar = FillingCirclesBar('Actualizando', max=len(str(data)))
        for i in range(len(str(data))):
            with open('update.json', 'w') as upt:
                json.dump(data, upt)
                bar.next()
        bar.finish()
        self.requiere = json.loads(open("update.json", "r").read())
Ejemplo n.º 14
0
def progressbar(title):
    # for i in range(21):
    #     sys.stdout.write('\r')
    #     # the exact output you're looking for:
    #     sys.stdout.write("[%-20s] %d%%" % ('='*i, 5*i))
    #     sys.stdout.flush()
    #     sleep(0.05)
    text = colored(str(title), 'red', attrs=['reverse', 'blink'])
    print(text)
    bar = FillingCirclesBar('Processing', max=100)
    for i in range(100):
        # Do some work
        sleep(0.025)
        bar.next()
    bar.finish()
Ejemplo n.º 15
0
def process_items(items):
    processed_items = []
    index = 1
    number_of_items = len(items)
    progress_bar = FillingCirclesBar('Processing pages', max=number_of_items)

    for item in items:
        product_name = item.xpath('.//a[@class="productName product1Name"]/span')[0].text_content().strip()
        actual_price = item.xpath('.//div[@class="mm-price media__price"]')[0].text_content().strip()
        processed_item = {'name': product_name, 'price': Decimal(actual_price), 'reduced_price': None, 'discount': None}
        processed_items.append(processed_item)
        index = index + 1
        progress_bar.next()
    progress_bar.finish()

    return processed_items
Ejemplo n.º 16
0
 def random_status_history():
     """
         Create random history for status
     """
     progress_bar = 'Create status histories for the orders'
     progress_bar = FillingCirclesBar(progress_bar, max=len(Order.orders))
     for i, order in enumerate(Order.orders):
         history_count = randrange(0, (len(STATUS) - 1))
         j = 0
         while j < history_count:
             j += 1
             StatusHistory(order)
             Order.orders[i].random_date()
             Order.orders[i].random_status(Status.status)
         progress_bar.next()
     progress_bar.finish()
Ejemplo n.º 17
0
 def generate_complex(data_name, lists, klass, *args, random_choice=False):
     """
         Generate data with a double for loop
     """
     parents, children = lists
     progress_bar = f'Create {data_name}'
     progress_bar = FillingCirclesBar(progress_bar, max=len(parents))
     for parent in parents:
         for child in children:
             if random_choice:
                 if choice([True, False]):
                     klass(parent, child, *args)
             else:
                 klass(parent, child, *args)
         progress_bar.next()
     progress_bar.finish()
Ejemplo n.º 18
0
    def collect_epic(self, epic_name):
        """
            Collects all jobs in epic
            epic_name: name of job series to submit this job to
        :raise
            KeyError if epic name not registered before
        :return
            list of worker results
        """
        if epic_name not in self.__epics.keys():
            raise KeyError("Cannot find named epic '%s'" % epic_name)

        if self.__repprog:
            bar = FillingCirclesBar("Processing epic '%s'" % epic_name,
                                    max=len(self.__epics[epic_name]))
            bar.start()
            results = []
            j = 0
            while j < len(self.__epics[epic_name]):
                try:
                    results.append(
                        self.__epics[epic_name][j].result(timeout=1))
                    self.__epics[epic_name].remove(self.__epics[epic_name][j])
                    if len(self.__epics[epic_name]) > 0:
                        j %= len(self.__epics[epic_name])
                    else:
                        j = 0
                    bar.next()
                except TimeoutError:
                    j = (j + 1) % len(self.__epics[epic_name])
            bar.finish()
        else:
            results = []
            for f in self.__epics[epic_name]:
                results.append(f.result())
            self.__epics[epic_name] = []
        return results
Ejemplo n.º 19
0
def make_video(solution, gifname='movie.gif', duration=0.1, xkcd=False):
    params = solution.params
    step = int(0.01 / params['dt'])
    bar = FillingCirclesBar('Loading',
                            suffix='%(percent)d%%',
                            max=int((solution.steps - 1) / step))
    images = []
    figsize = (6, 6)
    for subplot in range(1, solution.steps, step):
        if xkcd:
            plt.rcParams['text.usetex'] = False
            plt.xkcd()
        fig = plt.figure(figsize=figsize)
        ax = plt.subplot(1, 1, 1)
        plt.sca(ax)
        plt.plot(solution.x, solution.u[subplot - 1, :], c='#F61067', lw=3.5)
        if xkcd:
            plt.xlabel(r'x')
            plt.ylabel(r'u(x, t)')
        else:
            plt.xlabel(r'$x$')
            plt.ylabel(r'$u(x, t)$')
        plt.title('t = {:.2f}s'.format(params['t0'] +
                                       (subplot - 1) * params['dt']))
        if subplot > 1:
            plt.axis(axis)
        if subplot == 1:
            axis = plt.axis()
        filename = 'temp.png'
        plt.savefig(filename)
        plt.close()
        images.append(Image.open(filename))
        os.remove(filename)
        bar.next()
    bar.finish()
    print('', end='\r\r')
    if xkcd:
        imageio.mimsave('xkcd_' + gifname, images, duration=duration)
    else:
        imageio.mimsave(gifname, images, duration=duration)
Ejemplo n.º 20
0
def retrieve_domain_address():
    """ Performs DNS lookup on each domain """
    global SUBDOMAIN_LIST

    resolver = dns.resolver.Resolver()
    pop_list = []

    bar = FillingCirclesBar('[*] Resolving Domains', max=len(SUBDOMAIN_LIST))

    for i in range(len(SUBDOMAIN_LIST)):
        try:
            answers = resolver.resolve("%s" % SUBDOMAIN_LIST[i].name, "A")
            for response in answers:
                SUBDOMAIN_LIST[i].resolved_addresses.append(response.to_text())
        except dns.resolver.NoAnswer:
            pop_list.append(SUBDOMAIN_LIST[i])
        except dns.resolver.NXDOMAIN:
            pop_list.append(SUBDOMAIN_LIST[i])
        bar.next()
    bar.finish()

    SUBDOMAIN_LIST = adjust_list(pop_list)
Ejemplo n.º 21
0
    def read_json_with_key(self, key):
        """
            This method read json files and return only data on specific key
        """
        print('')
        progress_bar = 'Lecture des données en cours :'
        progress_bar_count = len(self.categories) * self.pages
        progress_bar = FillingCirclesBar(progress_bar, max=progress_bar_count)
        for category in self.categories:
            for page in range(self.pages):
                # Create a path for the file
                file_name = f'{page}.json'
                file_path = path.join(self.tmp_dir, category, file_name)

                # Read the JSON file
                with open(file_path, 'r') as file:
                    json_data = json.load(file)

                    # Store data in list
                    for line in json_data[key]:
                        self.data.append(line)
                progress_bar.next()
        progress_bar.finish()
Ejemplo n.º 22
0
def main():
    args = get_cli_args()
    validate_cli_args(args)
    alphas = np.array(args.alphas)
    beta = np.array(args.beta)**2

    mean_prior = np.array([180., 50., 0.])
    Sigma_prior = 1e-12 * np.eye(3, 3)
    initial_state = Gaussian(mean_prior, Sigma_prior)

    if args.input_data_file:
        data = load_data(args.input_data_file)
    elif args.num_steps:
        # Generate data, assuming `--num-steps` was present in the CL args.
        data = generate_input_data(initial_state.mu.T, args.num_steps,
                                   args.num_landmarks_per_side,
                                   args.max_obs_per_time_step, alphas, beta,
                                   args.dt)
    else:
        raise RuntimeError('')

    should_show_plots = True if args.animate else False
    should_write_movie = True if args.movie_file else False
    should_update_plots = True if should_show_plots or should_write_movie else False

    field_map = FieldMap(args.num_landmarks_per_side)

    fig = get_plots_figure(should_show_plots, should_write_movie)
    movie_writer = get_movie_writer(should_write_movie, 'Simulation SLAM',
                                    args.movie_fps, args.plot_pause_len)
    progress_bar = FillingCirclesBar('Simulation Progress', max=data.num_steps)

    data = load_data("slam-evaluation-input.npy")

    slam = SAM(beta, alphas, initial_state)

    with movie_writer.saving(
            fig, args.movie_file,
            data.num_steps) if should_write_movie else get_dummy_context_mgr():
        for t in range(data.num_steps):
            # Used as means to include the t-th time-step while plotting.
            tp1 = t + 1

            # Control at the current step.
            u = data.filter.motion_commands[t]
            # Observation at the current step.
            z = data.filter.observations[t]
            # print(data.filter.observations.shape)

            slam.predict(u)
            trajectory, landmarks = slam.update(z)

            progress_bar.next()
            if not should_update_plots:
                continue

            plt.cla()
            plot_field(field_map, z, slam.lm_positions,
                       slam.lm_correspondences)
            plot_robot(data.debug.real_robot_path[t])
            plot_observations(data.debug.real_robot_path[t],
                              data.debug.noise_free_observations[t],
                              data.filter.observations[t])

            plt.plot(data.debug.real_robot_path[1:tp1, 0],
                     data.debug.real_robot_path[1:tp1, 1], 'm')
            plt.plot(data.debug.noise_free_robot_path[1:tp1, 0],
                     data.debug.noise_free_robot_path[1:tp1, 1], 'g')

            plt.plot([data.debug.real_robot_path[t, 0]],
                     [data.debug.real_robot_path[t, 1]], '*r')
            plt.plot([data.debug.noise_free_robot_path[t, 0]],
                     [data.debug.noise_free_robot_path[t, 1]], '*g')

            # TODO plot SLAM soltion
            plt.plot(np.array(trajectory)[:, 0], np.array(trajectory)[:, 1])
            plt.scatter(np.array(landmarks)[:, 0], np.array(landmarks)[:, 1])

            # print(t)

            # for lm in slam.lm_positions:
            #     # print(len(lm))
            #     if len(lm)>5:
            #         lm_mu, lm_sigma = get_gaussian_statistics_xy(np.array(lm[-5:]))
            #         # print('lm_mu',lm_mu)
            #         # print('lm_sigma',lm_sigma)
            #         # print('plot lm')
            #         plot2dcov(lm_mu, lm_sigma, 3, 50)

            if should_show_plots:
                # Draw all the plots and pause to create an animation effect.
                plt.draw()
                plt.pause(args.plot_pause_len)

            if should_write_movie:
                movie_writer.grab_frame()

    progress_bar.finish()

    plt.show(block=True)
Ejemplo n.º 23
0

def download(link):
    fileName = link.split("/")[-1]
    fileRequest = req.get(link, stream=True)
    with open(os.path.join(save_path, fileName), "wb") as musicFile:
        musicFile.write(fileRequest.content)
    bar.next()


if os.path.isdir("download"):
    with requests.Session() as req:
        save_path = "download/"

        threads = []
        bar = FillingCirclesBar("Downloading ", max=len(cleaned_urls))
        for link in cleaned_urls:
            thread = threading.Thread(target=download, args=(link, ))
            threads.append(thread)
        for thread in threads:
            thread.start()
        for thread in threads:
            thread.join()
        bar.finish()

    # rename song files
    for file in os.listdir("download"):
        tag = TinyTag.get(os.path.join("download", file))
        newName = tag.title
        filePath = os.path.join("download", file)
        if "/" in newName:
""" initialization """
animate = 1
random_obstacles = 1
num_random_obstacles = 8
num_robots = 1
moving_obstacles = 1
impedance = 0
formation_gradient = 1
draw_gradients = 0
postprocessing = 0

pos_coef = 3.0
initialized = False
max_its = 120
# movie writer
progress_bar = FillingCirclesBar('Number of Iterations', max=max_its)
should_write_movie = 0
movie_file_name = os.getcwd() + '/videos/output.avi'
movie_writer = get_movie_writer(should_write_movie,
                                'Simulation Potential Fields',
                                movie_fps=10.,
                                plot_pause_len=0.01)

R_obstacles = 0.05  # [m]
R_swarm = 0.3  # [m]
start = np.array([-1.8, 1.8])
goal = np.array([1.8, -1.8])
V0 = (goal - start) / norm(
    goal - start)  # initial movement direction, |V0| = 1
U0 = np.array([-V0[1], V0[0]]) / norm(
    V0)  # perpendicular to initial movement direction, |U0|=1
Ejemplo n.º 25
0
def main():
    args = get_cli_args()
    validate_cli_args(args)

    # weights for covariance action noise R and observation noise Q
    alphas = np.array(args.alphas) **2 # variance of noise R proportional to alphas, see tools/tasks@get_motion_noise_covariance()
    beta = np.deg2rad(args.beta) # see also filters/localization_filter.py

    mean_prior = np.array([180., 50., 0.])
    Sigma_prior = 1e-12 * np.eye(3, 3)
    initial_state = Gaussian(mean_prior, Sigma_prior)

    if args.input_data_file:
        data = load_data(args.input_data_file)
    elif args.num_steps:
        # Generate data, assuming `--num-steps` was present in the CL args.
        data = generate_input_data(initial_state.mu.T, args.num_steps, alphas, beta, args.dt)
    else:
        raise RuntimeError('')

    store_sim_data = True if args.output_dir else False
    show_plots = True if args.animate else False
    write_movie = True if args.movie_file else False
    show_trajectory = True if args.animate and args.show_trajectory else False
    show_particles = args.show_particles and args.animate and args.filter_name == 'pf'
    update_mean_trajectory = True if show_trajectory or store_sim_data else False
    update_plots = True if show_plots or write_movie else False
    one_trajectory_per_particle = True if show_particles and not store_sim_data else False

    if store_sim_data:
        if not os.path.exists(args.output_dir):
            os.makedirs(args.output_dir)
        save_input_data(data, os.path.join(args.output_dir, 'input_data.npy'))

    # ---------------------------------------------------------------------------------------------------
    # Student's task: You will fill these function inside 'filters/.py'
    # ---------------------------------------------------------------------------------------------------
    localization_filter = None
    if args.filter_name == 'ekf':
        localization_filter = EKF(initial_state, alphas, beta)
    elif args.filter_name == 'pf':
        localization_filter = PF(initial_state, alphas, beta, args.num_particles, args.global_localization)
    fig = None
    if show_plots or write_movie:
        fig = plt.figure(1)
    if show_plots:
        plt.ion()

    # Initialize the trajectory if user opted-in to display.
    sim_trajectory = None
    if update_mean_trajectory:
        if one_trajectory_per_particle:
            mean_trajectory = np.zeros((data.num_steps, localization_filter.state_dim, args.num_particles))
        else:
            mean_trajectory = np.zeros((data.num_steps, localization_filter.state_dim))

        sim_trajectory = FilterTrajectory(mean_trajectory)

    if store_sim_data:
        # Pre-allocate the memory to store the covariance matrix of the trajectory at each time step.
        sim_trajectory.covariance = np.zeros((localization_filter.state_dim,
                                              localization_filter.state_dim,
                                              data.num_steps))

    # Initialize the movie writer if `--movie-file` was present in the CL args.
    movie_writer = None
    if write_movie:
        get_ff_mpeg_writer = anim.writers['ffmpeg']
        metadata = dict(title='Localization Filter', artist='matplotlib', comment='PS2')
        movie_fps = min(args.movie_fps, float(1. / args.plot_pause_len))
        movie_writer = get_ff_mpeg_writer(fps=movie_fps, metadata=metadata)

    progress_bar = FillingCirclesBar('Simulation Progress', max=data.num_steps)

    with movie_writer.saving(fig, args.movie_file, data.num_steps) if write_movie else get_dummy_context_mgr():
        for t in range(data.num_steps):
            # Used as means to include the t-th time-step while plotting.
            tp1 = t + 1

            # Control at the current step.
            u = data.filter.motion_commands[t]
            # Observation at the current step.
            z = data.filter.observations[t]

            localization_filter.predict(u)
            localization_filter.update(z)

            if update_mean_trajectory:
                if one_trajectory_per_particle:
                    sim_trajectory.mean[t, :, :] = localization_filter.X.T
                else:
                    sim_trajectory.mean[t] = localization_filter.mu

            if store_sim_data:
                sim_trajectory.covariance[:, :, t] = localization_filter.Sigma

            progress_bar.next()

            if not update_plots:
                continue

            plt.cla()
            plot_field(z[1])
            plot_robot(data.debug.real_robot_path[t])
            plot_observation(data.debug.real_robot_path[t],
                             data.debug.noise_free_observations[t],
                             data.filter.observations[t])

            plt.plot(data.debug.real_robot_path[1:tp1, 0], data.debug.real_robot_path[1:tp1, 1], 'g')
            plt.plot(data.debug.noise_free_robot_path[1:tp1, 0], data.debug.noise_free_robot_path[1:tp1, 1], 'm')

            #plt.plot([data.debug.real_robot_path[t, 0]], [data.debug.real_robot_path[t, 1]], '*g')
            plt.plot([data.debug.noise_free_robot_path[t, 0]], [data.debug.noise_free_robot_path[t, 1]], '*m')

            if show_particles:
                samples = localization_filter.X.T
                plt.scatter(samples[0], samples[1], s=2)
            else:
                plot2dcov(localization_filter.mu_bar[:-1],
                          localization_filter.Sigma_bar[:-1, :-1],
                          'red', 3,
                          legend='{} -'.format(args.filter_name.upper()))
                plot2dcov(localization_filter.mu[:-1],
                          localization_filter.Sigma[:-1, :-1],
                          'blue', 3,
                          legend='{} +'.format(args.filter_name.upper()))
                plt.legend()

            if show_trajectory:
                if len(sim_trajectory.mean.shape) > 2:
                    # This means that we probably intend to show the trajectory for ever particle.
                    x = np.squeeze(sim_trajectory.mean[0:t, 0, :])
                    y = np.squeeze(sim_trajectory.mean[0:t, 1, :])
                    plt.plot(x, y)
                else:
                    plt.plot(sim_trajectory.mean[0:t, 0], sim_trajectory.mean[0:t, 1], 'blue')

            if show_plots:
                # Draw all the plots and pause to create an animation effect.
                plt.draw()
                plt.pause(args.plot_pause_len)

            if write_movie:
                movie_writer.grab_frame()

    progress_bar.finish()

    if show_plots:
        plt.show(block=True)

    if store_sim_data:
        file_path = os.path.join(args.output_dir, 'output_data.npy')
        with open(file_path, 'wb') as data_file:
            np.savez(data_file,
                     mean_trajectory=sim_trajectory.mean,
                     covariance_trajectory=sim_trajectory.covariance)
Ejemplo n.º 26
0
def main():
    arguments = docopt(__doc__)

    site = arguments['<site>']

    if arguments['--HUD']:
        source = "HUD"
        dataset_id = site
        client = None
    if arguments['--Socrata']:
        source = "Socrata"
        client = Socrata(site, arguments.get('-a'))

    try:
        if arguments.get('ls'):
            datasets = list_datasets(client, site)
            print(tabulate(datasets, headers='keys', tablefmt='psql'))
        elif arguments.get('insert'):        
            if source == "Socrata":
                dataset_id = arguments['<dataset_id>']
                metadata = client.get_metadata(dataset_id)['columns']
            if source == "HUD":
                metadata = json.loads(
                    urllib.request.urlopen(site).read())['fields']

            engine, session, geo = \
                get_connection(arguments['-d'], metadata, source)
            
            if arguments['-t']:
                Binding = get_binding(
                    metadata, geo, arguments['-t'], source
                )
            else:
                Binding = get_binding(
                    metadata, geo, dataset_id, source
                )

            # Create the table
            try:
                Binding.__table__.create(engine)
            except ProgrammingError as e:
                # Catch these here because this is our first attempt to
                # actually use the DB
                if 'already exists' in str(e):
                    raise CLIError(
                        'Destination table already exists. Specify a new table'
                        ' name with -t.'
                    )
                raise CLIError('Error creating destination table: %s' % str(e))

            num_rows, data = get_data(source, dataset_id, client)
            bar = FillingCirclesBar('  ▶ Loading from source', max=num_rows)

            # Iterate the dataset and INSERT each page
            if source == "Socrata":
                for page in data:
                    insert_data(page, session, bar, Binding)

            if source == "HUD":
                insert_data(data, session, bar, Binding)

            bar.finish()

            ui.item(
                'Committing rows (this can take a bit for large datasets).'
            )
            session.commit()

            success = 'Successfully imported %s rows.' % (
                num_rows
            )
            ui.header(success, color='\033[92m')
        if client:
            client.close()
    except CLIError as e:
        ui.header(str(e), color='\033[91m')
Ejemplo n.º 27
0

filename1 = 'csvs/drone1.csv'
filename2 = 'csvs/drone2.csv'

poses1, scans1 = file_read(filename1)
poses1, scans1 = preprocessing(poses1, scans1)

poses2, scans2 = file_read(filename2)
poses2, scans2 = preprocessing(poses2, scans2)

poses = np.vstack([poses1, poses2])
scans = scans1 + scans2

# Configuration parameters
progress_bar = FillingCirclesBar('Simulation Progress', max=len(poses))
animate = 0
save_frames = 0

# Clear occupancy grid
DIM_X = 4.0
DIM_Y = 4.0
RESOLUTION = 0.04
LOWEST_X = -2.0
LOWEST_Y = -2.0

grid = np.zeros((int(DIM_X / RESOLUTION), int(DIM_Y / RESOLUTION)))

robotX1 = poses1[:, 0]
robotX2 = poses2[:, 0]
robotY1 = poses1[:, 1]
Ejemplo n.º 28
0
 def __init__(self, message, max, tail=None):
     FillingCirclesBar.__init__(self, message, max=max)
     self._solved = 0
     self.suffix = "%(percent)5.1f%% | +%(solved)5s @ %(elapsed_td)s | EST +%(eta_solved)5s @ %(eta_td)s"
     if tail:
         self.suffix += " | %s " % tail
def main():
    args = get_cli_args()
    validate_cli_args(args)
    alphas = np.array(args.alphas)**2
    beta = np.array(args.beta)
    beta[1] = np.deg2rad(beta[1])

    mean_prior = np.array([180., 50., 0.])
    Sigma_prior = 1e-12 * np.eye(3, 3)
    initial_state = Gaussian(mean_prior, Sigma_prior)

    if args.input_data_file:
        data = load_data(args.input_data_file)
    elif args.num_steps:
        # Generate data, assuming `--num-steps` was present in the CL args.
        data = generate_input_data(initial_state.mu.T, args.num_steps,
                                   args.num_landmarks_per_side,
                                   args.max_obs_per_time_step, alphas, beta,
                                   args.dt)
    else:
        raise RuntimeError('')

    should_show_plots = True if args.animate else False
    should_write_movie = True if args.movie_file else False
    should_update_plots = True if should_show_plots or should_write_movie else False

    field_map = FieldMap(args.num_landmarks_per_side)

    fig = get_plots_figure(should_show_plots, should_write_movie)
    movie_writer = get_movie_writer(should_write_movie, 'Simulation SLAM',
                                    args.movie_fps, args.plot_pause_len)
    progress_bar = FillingCirclesBar('Simulation Progress', max=data.num_steps)

    with movie_writer.saving(
            fig, args.movie_file,
            data.num_steps) if should_write_movie else get_dummy_context_mgr():
        for t in range(data.num_steps):
            # Used as means to include the t-th time-step while plotting.
            tp1 = t + 1

            # Control at the current step.
            u = data.filter.motion_commands[t]
            # Observation at the current step.
            z = data.filter.observations[t]

            # TODO SLAM predict(u)

            # TODO SLAM update

            progress_bar.next()
            if not should_update_plots:
                continue

            plt.cla()
            plot_field(field_map, z)
            plot_robot(data.debug.real_robot_path[t])
            plot_observations(data.debug.real_robot_path[t],
                              data.debug.noise_free_observations[t],
                              data.filter.observations[t])

            plt.plot(data.debug.real_robot_path[1:tp1, 0],
                     data.debug.real_robot_path[1:tp1, 1], 'm')
            plt.plot(data.debug.noise_free_robot_path[1:tp1, 0],
                     data.debug.noise_free_robot_path[1:tp1, 1], 'g')

            plt.plot([data.debug.real_robot_path[t, 0]],
                     [data.debug.real_robot_path[t, 1]], '*r')
            plt.plot([data.debug.noise_free_robot_path[t, 0]],
                     [data.debug.noise_free_robot_path[t, 1]], '*g')

            # TODO plot SLAM solution

            if should_show_plots:
                # Draw all the plots and pause to create an animation effect.
                plt.draw()
                plt.pause(args.plot_pause_len)

            if should_write_movie:
                movie_writer.grab_frame()

    progress_bar.finish()

    plt.show(block=True)
Ejemplo n.º 30
0
def download_and_verify(path, destination=None):
    """
    Downloads the Apache file and verifies its hash
    :param path: the download file path, e.g. /nifi/nifi-registry/nifi-registry-0.5.0/nifi-registry-0.5.0-bin.tar.gz
    :param destination: the location to save the downloaded file or file object
    """
    destination = destination or "."
    if isinstance(destination, str):
        download_dir = dirname(destination)
        download_file = basename(destination) or basename(path)
        if isdir(download_file):
            download_dir = download_file
            download_file = basename(path)
        download_path = expanduser(os.path.join(download_dir, download_file))
        logging.info(
            "Downloading Apache project {path} to destination {dest}".format(
                path=path, dest=download_path))
        assert not os.path.exists(download_path), "File already exists"
    else:
        download_path = destination
        logging.info("Downloading Apache project {path}".format(path=path))

    site = "www"
    try:
        expected_hash = get_hash(path, site)
    except requests.exceptions.HTTPError:
        logging.debug("Not found, try from archive")
        site = "archive"
        expected_hash = get_hash(path, site)
        logging.info("Downloading from archive")

    with requests.get(get_mirror_url(path, site), stream=True) as r:
        r.raise_for_status()
        file_length = r.headers.get("content-length")
        if file_length:
            file_length = int(file_length)
            logging.info("File size: {size}".format(
                size=humanize.naturalsize(file_length)))
            progress_bar = FillingCirclesBar("Downloading",
                                             max=ceil(file_length /
                                                      DOWNLOAD_CHUNK_SIZE))
        else:
            progress_bar = Spinner("Downloading")

        def save_to_file(_f):
            m = hashlib.sha512()
            for chunk in r.iter_content(chunk_size=DOWNLOAD_CHUNK_SIZE):
                if chunk:
                    _f.write(chunk)
                    m.update(chunk)
                    progress_bar.next()
            actual_hash = m.hexdigest()
            assert actual_hash in expected_hash,\
                "Hash of downloaded file is invalid, expected {expected_hash} but got {actual_hash}.".format(
                    expected_hash=expected_hash,
                    actual_hash=actual_hash
                )

        if hasattr(download_path, "write"):
            save_to_file(download_path)
        else:
            with open(download_path, "wb") as f:
                save_to_file(f)
            assert os.path.exists(
                download_path), "File could not be downloaded."
    print(" Done.", file=sys.stderr)