Example #1
0
def run_bar():
    mylist = [1, 2, 3, 4, 5]
    bar = FillingSquaresBar('Bar', max=len(mylist))
    for item in mylist:
        bar.next()
        time.sleep(0.5)
    bar.finish()
Example #2
0
def load_images(img_path):
    '''
        Function to load images into the main memory

        img_path    : Relative path to the image directory
        return      : numpy array of images present in that directory
                      sorted in the numerical order
    '''
    image_files_names = [name for name in os.listdir(img_path)]
    image_files_names = [name.split('.')[0] for name in os.listdir(img_path)]
    image_files_names.sort()
    image_files_names = [
        img_path + name + '.png' for name in image_files_names
    ]

    images = []
    bar = FillingSquaresBar('Loading Images from {}'.format(img_path),
                            max=len(image_files_names))
    for i in range(len(image_files_names)):
        image = cv2.imread(image_files_names[i])
        images.append(image)
        bar.next()
    bar.finish()

    images = np.array(images)
    return images
Example #3
0
 def extract_data(self):
     if not self.views:
         extract_data_views = [self.view]
     else:
         extract_data_views = self.views
     for extract_data_view in extract_data_views:
         folders = getattr(self.db, extract_data_view)
         if self.range:
             folders = folders[int(self.range.split(':')[0]):int(self.range.split(':')[1])]
         if self.limit:
             folders = folders.head(self.limit)
         if self.licence_id:
             folders = folders[folders.REFERENCE == self.licence_id]
         if self.id:
             folders = folders[folders.id == int(self.id)]
         bar = FillingSquaresBar('Processing licences for {}'.format(str(extract_data_view)), max=folders.shape[0])
         for id, licence in folders.iterrows():
             self.get_licence(id, licence)
             bar.next()
         bar.finish()
         export_error_csv([self.parcel_errors, self.street_errors])
         if self.iterate is True:
             try:
                 self.validate_data(self.data, 'GenericLicence')
             except Exception:
                 raise IterationError('Schema change during iterative process')
Example #4
0
 def validate_data(self, data, type):
     bar = FillingSquaresBar('Validating licences with : {}'.format(type),
                             max=len(data))
     for licence in data:
         self.validate_schema(licence, type)
         bar.next()
     bar.finish()
Example #5
0
 def _mine(self, progress=True):
     if progress:
         bar = FillingSquaresBar('Mining %s:' % self.grid.name,
                                 max=self.grid.dim)
         for i in range(self.grid.dim):
             p = {
                 'lat': self.grid.points[i][0],
                 'lng': self.grid.points[i][1]
             }
             query_result = self.searcher(lat_lng=p,
                                          radius=self.r,
                                          types=self.place_type)
             for place in self.get_places(query_result):
                 yield (place)
             bar.next()
         bar.finish()
     else:
         for i in range(dim):
             p = {
                 'lat': self.grid.points[i][0],
                 'lng': self.grid.points[i][1]
             }
             query_result = self.searcher(lat_lng=p,
                                          radius=self.r,
                                          types=self.place_type)
             for place in self.get_places(query_result):
                 yield (place)
Example #6
0
class FacebookMiner(object):
    page_fields = '?fields=is_community_page,category,category_list,fan_count,hours,link,location,name,name_with_location_descriptor,overall_star_rating,parking,phone,rating_count,single_line_address,store_location_descriptor,website,were_here_count'

    def __init__(self,
                 mine_points,
                 API_KEY,
                 search_rayon=1000,
                 categories=['FOOD_BEVERAGE'],
                 _type='place'):
        self.points = mine_points
        self.graph = GraphAPI(API_KEY, version='2.9')
        self.categories = categories
        self.r = search_rayon
        self.dim = len(self.points)
        self._type = _type

    def _mine(self, progress=True):
        if progress:
            self.bar = FillingSquaresBar('Mining:', max=self.dim)
            for p in self.points:
                for pla in self.get_places(p):
                    yield pla
                self.bar.next()
            self.bar.finish()
        else:
            for p in self.points:
                for pla in self.get_places(p):
                    yield pla

    def get_places(self, p):
        c = str(p[0]) + ',' + str(p[1])
        nearby_ids = [
            l['id'] for l in self.graph.search(term='',
                                               categories=str(self.categories),
                                               type=self._type,
                                               center=c,
                                               distance=self.r)['data']
        ]
        for _id in nearby_ids:
            entity = self.graph.get(str(_id) + self.page_fields)
            entity['fb_id'] = entity.pop('id')
            try:

                entity['location']['latitude'] = float(
                    entity['location'].pop('latitude'))
                entity['location']['longitude'] = float(
                    entity['location'].pop('longitude'))
            except Exception:
                pass
            try:
                entity['overall_star_rating'] = float(
                    entity.pop('overall_star_rating'))
            except Exception:
                pass
            yield entity
Example #7
0
def Pb5():
    from progress.bar import FillingSquaresBar
    import time

    bar = FillingSquaresBar('进度条5', max=100)  #max的值100,可调节

    for i in range(100):  #这个也需要适当调节
        bar.next()
        time.sleep(0.1)  #延迟时间,可调节,0.1~1之间最佳

    bar.finish()
Example #8
0
def download_from_eoddata(start_date, end_date, market, driver):
    """Provide datetime.date arguments `start_date` and `end_date`, a string
    `market`, and Selenium driver `driver`.  The function will then download
    the EOD data for the appropriate market and dates from the eoddata
    """

    # navigate to the downloads page
    driver.get('http://www.eoddata.com/download.aspx')

    # get a list of the all of the hyperlink tags in the pagen
    bs_obj = BeautifulSoup(driver.page_source, "lxml")
    url_list = bs_obj.find_all('a')

    # each iteration steps through the list of hyperlink tags in the page until
    # it finds the list of example downloads, and then extracts the `k` field
    k = ''
    for url in url_list:

        if not url.has_attr('href'):
            continue

        # looks for a link of the form
        # /data/filedownload.aspx?e=INDEX&sd=20180606&ed=20180606&d=4&k=ph72h4ynw2&o=d&ea=1&p=0
        # Once we find one, we need to extract the `k` field so that we can use
        # it when constructing our own HTML request.
        url_string = url.attrs['href']
        if re.match('/data/filedownload.aspx', url_string):
            k = re.search('k=([^&]*)', url_string).group(1)
            break
    if not k:
        raise Exception

    # construct the URL according to the dates and market that we want to
    # download
    url_template = '{url_base}?e={e}&sd={sd}&ed={ed}&d={d}&k={k}&o={o}&ea={ea}&p={p}'
    url_download = url_template.format(
        url_base='http://www.eoddata.com/data/filedownload.aspx',
        e=market,
        sd=start_date.strftime('%Y%m%d'),
        ed=end_date.strftime('%Y%m%d'),
        d='4',
        k=k,
        o='d',
        ea='1',
        p='0')
    # submit the download request
    driver.get(url_download)

    # wait for 10 seconds to ensure that the file has time to download
    bar = FillingSquaresBar('Downloading data ', max=100)
    for i in range(100):
        bar.next()
        time.sleep(0.1)
    bar.finish()
Example #9
0
def get_point_cloud(B_matrix,
                    parallax_map,
                    color_map,
                    poses,
                    mask_lower_bound=2):

    point_cloud = []
    point_cloud_colors = []

    bar = FillingSquaresBar('Generating Frame Point Cloud',
                            max=len(parallax_map))
    for i in range(len(parallax_map)):

        p_map = parallax_map[i]
        mask = (p_map[:, 2] > mask_lower_bound)
        p_map = p_map[mask, :]
        c_map = color_map[i]
        c_map = (c_map[mask, :] / 255.0).astype('float64')

        point_cloud_colors.append(c_map)

        point_cloud.append(B_matrix @ p_map.T)

        point_cloud[i] = point_cloud[i] / point_cloud[i][3]

        point_cloud[i] = poses[i] @ point_cloud[i]
        point_cloud[i] = point_cloud[i].T

        bar.next()
    bar.finish()

    registered_point_cloud = point_cloud[0]
    registered_point_cloud_colors = point_cloud_colors[0]

    bar = FillingSquaresBar('Registering Global Point Cloud',
                            max=len(point_cloud) - 1)
    for i in range(1, len(point_cloud)):
        registered_point_cloud = np.concatenate(
            (registered_point_cloud, point_cloud[i]), axis=0)
        registered_point_cloud_colors = np.concatenate(
            (registered_point_cloud_colors, point_cloud_colors[i]), axis=0)
        bar.next()
    bar.finish()

    pcd = o3d.geometry.PointCloud()
    pcd.points = o3d.utility.Vector3dVector(registered_point_cloud)
    pcd.colors = o3d.utility.Vector3dVector(registered_point_cloud_colors)

    o3d.io.write_point_cloud(
        OP1_DIR + "/point_cloud_{}.ply".format(
            datetime.datetime.now().strftime("%d-%b-%Y %H:%M:%S.%f")), pcd)

    return registered_point_cloud, registered_point_cloud_colors
Example #10
0
 def _generate(self):
     self._initialize_content()
     self._parse_structure()
     start_time = default_timer()
     progress_bar = FillingSquaresBar(
         "Generate content", max=self._structure["number"]
     )
     loop = asyncio.get_event_loop()
     loop.run_until_complete(self._generate_async(progress_bar))
     progress_bar.finish()
     elapsed = default_timer() - start_time
     print("{:5.2f}s elapsed".format(elapsed))
Example #11
0
def image_point_cloud(point_cloud, point_cloud_colors, poses, image_width,
                      image_height):
    bar = FillingSquaresBar('Imaging the Point Cloud with the given poses',
                            max=len(poses))
    for i in range(len(poses)):
        P = poses[i]
        R = P[:, :3]
        T = -1 * (R.T @ P[:, 3])
        R = R.T
        im = get_image(i, point_cloud, point_cloud_colors, R, T, K,
                       image_width, image_height)
        bar.next()
    bar.finish()
Example #12
0
def download_video(link):
    yt=pytube.YouTube(link)
    stream=yt.streams.first()
    video_length=get_time(yt.length)
    video_size=get_size(stream.filesize)
    print("Downloading \""+yt.title+"\" Length : "+video_length)
    print("\tFile Size : "+video_size)
    bar=FillingSquaresBar("Progress : ", suffix="%(percent)d%%")
    for i in range(100):
        stream=yt.streams.first()
        stream.download(SAVE_PATH)
        bar.next()
    bar.finish()
def create_parallax_map(images_left, images_right):
    '''
        Return a parallax map given two stereo rectified images

        images_left: np array of the left stereo images
        images_left: np array of the right stereo images
        return:
    '''
    if len(images_left) != len(images_right):
        print("Error: #images_left must be equal to #images_right")
        return False

    window_size = 5
    minDisparity = -39
    numDisparities = 144
    stereo = cv2.StereoSGBM_create(minDisparity=-39,
                                   numDisparities=144,
                                   blockSize=5,
                                   P1=8 * 3 * window_size**2,
                                   P2=64 * 3 * window_size**2,
                                   disp12MaxDiff=1,
                                   uniquenessRatio=10,
                                   speckleWindowSize=100,
                                   speckleRange=32,
                                   preFilterCap=63,
                                   mode=3)

    disparity = []
    parallax_map = []

    bar = FillingSquaresBar('Extracting Disparity Map', max=len(images_left))
    for k in range(len(images_left)):
        im_right = cv2.cvtColor(images_right[k], cv2.COLOR_BGR2GRAY)
        im_left = cv2.cvtColor(images_left[k], cv2.COLOR_BGR2GRAY)
        disparity = stereo.compute(im_right, im_left).astype('float64')
        disparity = (disparity - minDisparity) / numDisparities

        parallax_map.append([])
        for y in range(disparity.shape[0]):
            for x in range(disparity.shape[1]):
                parallax_map[k].append([x, y, disparity[y, x], 1])

        bar.next()

    parallax_map = np.array(parallax_map)

    bar.finish()
    return parallax_map, disparity
Example #14
0
def train(logbook, net, device, loss_fn, opt, train_l):
    """Run one epoch of the training experiment."""
    logbook.meter.reset()
    bar = FillingSquaresBar('Training \t', max=len(train_l))
    controllers = indiv.Controller.getControllers(net)
        
    for i_batch, data in enumerate(train_l):
        
        # load data onto device
        inputs, gt_labels = data
        inputs            = inputs.to(device)
        gt_labels         = gt_labels.to(device)
        
        # forprop
        pr_outs           = net(inputs)
        loss              = loss_fn(pr_outs, gt_labels)
        
        # update statistics
        logbook.meter.update(pr_outs, gt_labels, loss.item(), track_metric=logbook.track_metric)
        bar.suffix = 'Total: {total:} | ETA: {eta:} | Epoch: {epoch:4d} | ({batch:5d}/{num_batches:5d})'.format(
                total=bar.elapsed_td,
                eta=bar.eta_td,
                epoch=logbook.i_epoch,
                batch=i_batch + 1,
                num_batches=len(train_l))
        bar.suffix = bar.suffix + logbook.meter.bar()
        bar.next()
        
        # backprop
        opt.zero_grad()
        loss.backward()
        opt.step()
        for ctrl in controllers: 
            ctrl.step_postOptimStep()
        
    bar.finish()
    stats = {
        'train_loss':   logbook.meter.avg_loss,
        'train_metric': logbook.meter.avg_metric
    }
    for k, v in stats.items():
        if v:
            logbook.writer.add_scalar(k, v, global_step=logbook.i_epoch)
    logbook.writer.add_scalar('learning_rate', opt.param_groups[0]['lr'], global_step=logbook.i_epoch)
    return stats
Example #15
0
def prepare(rkeys, version, force=False, cores=1, hashing=None):
    pool = Pool(cores)
    m = Manager()
    queue = m.Queue()
    jobs = [rkey + (version, force, hashing, queue) for rkey in rkeys]
    bar = Bar("[1/3]",
              max=len(jobs),
              suffix="%(percent).1f%% / %(elapsed_td)s / ETA %(eta_td)s")
    bar.start()
    res = pool.map_async(prepare2, jobs, chunksize=1)
    todo = len(jobs)
    while todo:
        queue.get()
        todo -= 1
        bar.next()
    bar.finish()
    pool.close()
    pool.join()
Example #16
0
def download_audio(link):
    yt=pytube.YouTube(link)
    bar=FillingSquaresBar("Downloading Audio : ", suffix="%(percent)d%%")
    for i in range(100):
        stream=yt.streams.filter(only_audio=True).first()
        bad_chars=[";", ":", "!", "*", ' ', "$", "@", "(", ")", "[", "]", "|", ".", "\"", "\'", ","]
        _filename=yt.title
        for i in bad_chars:
            _filename=_filename.replace(i, "_")
        mp4_name="download/%s.mp4"%_filename
        mp3_name="download/%s.mp3"%_filename
        stream.download(SAVE_PATH, _filename)
        bar.next()

    print("\nPerforming required conversions...")
    ffmpeg=('ffmpeg -loglevel panic -i %s ' % mp4_name + mp3_name)
    subprocess.call(ffmpeg, shell=True)
    os.remove(mp4_name)
    bar.finish()
Example #17
0
def test(logbook, net, device, loss_fn, test_l, valid=False, prefix=None):
    """Run a validation epoch."""
    logbook.meter.reset()
    bar_title = 'Validation \t' if valid else 'Test \t'
    bar       = FillingSquaresBar(bar_title, max=len(test_l))
    with torch.no_grad():
        for i_batch, data in enumerate(test_l):
            
            # load data onto device
            inputs, gt_labels     = data
            inputs                = inputs.to(device)
            gt_labels             = gt_labels.to(device)
            
            # forprop
            tensor_stats, pr_outs = net.forward_with_tensor_stats(inputs)
            loss                  = loss_fn(pr_outs, gt_labels)
            
            # update statistics
            logbook.meter.update(pr_outs, gt_labels, loss.item(), track_metric=True)
            bar.suffix = 'Total: {total:} | ETA: {eta:} | Epoch: {epoch:4d} | ({batch:5d}/{num_batches:5d})'.format(
                total=bar.elapsed_td,
                eta=bar.eta_td,
                epoch=logbook.i_epoch,
                batch=i_batch + 1,
                num_batches=len(test_l))
            bar.suffix = bar.suffix + logbook.meter.bar()
            bar.next()
    bar.finish()

    if prefix == None: 
        prefix = 'valid' if valid else 'test'
    stats = {
        prefix+'_loss':   logbook.meter.avg_loss,
        prefix+'_metric': logbook.meter.avg_metric
    }
    if valid:
        for k, v in stats.items():
            if v:
                logbook.writer.add_scalar(k, v, global_step=logbook.i_epoch)
        for name, tensor in tensor_stats:
            logbook.writer.add_histogram(name, tensor, global_step=logbook.i_epoch)
    return stats
Example #18
0
    def extract_data(self):
        folders = getattr(self.db, self.view)
        if self.range:
            folders = folders[int(self.range.split(':')[0]):int(self.range.split(':')[1])]
        if self.limit:
            folders = folders.head(self.limit)
        if self.licence_id:
            folders = folders[folders.DOSSIER_NUMERO == self.licence_id]

        bar = FillingSquaresBar('Processing licences', max=folders.shape[0])
        for id, licence in folders.iterrows():
            self.get_licence(id, licence)
            bar.next()
        bar.finish()
        export_error_csv([self.parcel_errors, self.street_errors])
        if self.iterate is True:
            try:
                self.validate_data(self.data, 'GenericLicence')
            except Exception:
                raise IterationError('Schema change during iterative process')
Example #19
0
    def generate_test_files(self):
        if not os.path.exists(self.test_directory):
            os.makedirs(self.test_directory)
        else:
            shutil.rmtree(self.test_directory)
            os.makedirs(self.test_directory)
        os.chdir(self.test_directory)

        bar = FillingSquaresBar('Processing', max=self.files_number)
        for i in range(self.files_number):
            filename = "file" + str(
                i
            ) + f".{self.file_extention[rand(0,len(self.file_extention) - 1 )]}"
            with open(filename, 'wb') as new_random_file:
                d1 = rand(1, 1000)
                d2 = rand(1, 999)
                dimension = d1 * self.file_size + d2
                new_random_file.write(os.urandom(dimension))
            bar.next()
        bar.finish()
Example #20
0
def scrape_symbols_tsx_website(
        tsx_company_directory_url='https://www.tmxmoney.com/en/research/listed_company_directory.html',
        webdriver_exec_path='/usr/lib/chromium-browser/chromedriver'):
    driver = webdriver.Chrome(executable_path=webdriver_exec_path)
    driver.get(tsx_company_directory_url)
    elem = driver.find_element_by_id("SearchKeyword")
    elem.clear()
    elem.send_keys("^")
    button = driver.find_element_by_id("btn-search")
    driver.execute_script("arguments[0].click();", button)
    time.sleep(5)

    table = driver.find_element_by_id('tresults')
    rows = table.find_elements_by_tag_name('tr')

    extracted_table = pd.DataFrame(columns=['company_name', 'symbol'])

    bar = FillingSquaresBar('Extracting', max=len(rows))

    for row in rows[1:]:
        cols = row.find_elements_by_tag_name('td')
        to_append = [None, None]

        for i in range(len(cols)):
            to_append[i] = cols[i].text.lstrip()

        extracted_table = extracted_table.append(
            {
                'company_name': to_append[0],
                'symbol': to_append[1]
            },
            ignore_index=True)

        bar.next()

    driver.close()
    bar.finish()

    final_table = extracted_table.loc[~extracted_table['symbol'].isnull(), :]

    return final_table
Example #21
0
 def _mine(self, post_list=[], progress=True):
     post_list = post_list if post_list != [] else self.posts_ids
     post_bach = [{
         'method': 'GET',
         'relative_url': p_id + self.post_fields
     } for p_id in post_list]
     n = len(post_list)
     posts = self.graph.batch(post_bach)
     if progress:
         bar = FillingSquaresBar('Mining %s:' % self.name, max=n)
         for post in posts:
             p = self.clean_post(post)
             if p != None:
                 yield p
             bar.next()
         bar.finish()
     else:
         for post in posts:
             p = self.clean_post(post)
             if p != None:
                 yield p
Example #22
0
def extract_color(images):
    '''
        Function to get the RGB color values from the images

        images: np array of images
        return: np array of the color values extracted from the images

    '''
    color_map = []

    bar = FillingSquaresBar('Extracting Color Map', max=len(images))
    for k in range(len(images)):
        color_map.append([])
        image = cv2.cvtColor(images[k], cv2.COLOR_BGR2RGB)
        for y in range(image.shape[0]):
            for x in range(image.shape[1]):
                color_map[k].append(image[y, x, :])
        bar.next()
    color_map = np.array(color_map)

    bar.finish()
    return color_map
Example #23
0
def download_resources(resources, path):
    """
    Download page resources to the specified directory.

    Show the download progress in terminal.

    Args:
        resources: list of (url, local storage subpath) tuples
        path: parent directory for local storage
    """
    download_progress = FillingSquaresBar(
        'Downloading page resources',
        max=len(resources),
        suffix='%(percent)d%%',  # noqa:WPS323
    )
    for resource_url, resource_filename in resources:
        logging.debug(
            'resource_url: {0}, resource_filename: {1}'.format(
                resource_url,
                resource_filename,
            ), )
        try:
            resource_content, resource_binary, _ = send_request(resource_url, )
        except Exception as resource_request_error:
            logging.warning(
                'Resource download failed: {0}'.format(
                    str(resource_request_error), ),
                exc_info=logger.isEnabledFor(logging.DEBUG),
            )
            continue

        write_to_file(
            path / resource_filename,
            resource_content,
            binary_mode=resource_binary,
        )
        download_progress.next()  # noqa: B305

    download_progress.finish()
Example #24
0
def make(rkeys, out=None, hashing=None):
    dat = []
    bar = Bar("[2/3]",
              max=len(rkeys),
              suffix="%(percent).1f%% / %(elapsed_td)s / ETA %(eta_td)s")
    bar.start()
    for (bid, pid, problem, limit) in rkeys:
        f_dat = expres.results.path(bid,
                                    pid,
                                    problem,
                                    limit,
                                    ext="in" if hashing else "pre")
        if out:
            tmp = open(f_dat).read().strip()
            if tmp:
                out.write(tmp)
                out.write("\n")
        else:
            dat.extend(open(f_dat).read().strip().split("\n"))
        bar.next()
    bar.finish()
    return dat if not out else None
Example #25
0
  def convertRange(self, start, end):
    if not len(self.targets):
      raise ValueError("There seem to be no targets...")
    elif self.outOfRange(start) or self.outOfRange(end):
      raise ValueError("Range does not exist.")

    progress = FillingSquaresBar('Converting', max = (end - start))
    for t in range(start, end): 
      
      img = self.convertTarget(self.targets[t])
      imgName = './out/'+str(t)+'.png' 
      img.save(imgName)

      img.close()

      progress.next()
    progress.finish()

    images = []
    for fn in os.listdir('out'):
      images.append(imageio.imread('out/'+fn))

    imageio.mimsave('out.gif', images, 'GIF-FI', duration = .25)
Example #26
0
    def execute(self):

        with open(self.config['main']['input_path'], 'r') as input_file:
            data = json.load(input_file)

        try:
            if self.config['plone']['foldermanager']:
                self.foldermanager_uid = self.search_foldermanager(
                    self.config['plone']['foldermanager'])
            bar = FillingSquaresBar('Importing licences', max=len(data))
            self.nb_licence = len(data)
            if self.limit:
                self.nb_licence = self.limit
            if self.group_licences:
                for i in range(0, self.nb_licence, self.group_licences):
                    licence = data[i:i + self.group_licences]
                    self.import_licence(licence)
                    for j in licence:
                        bar.next()
            else:
                self.group_licences = 1
                for i in range(0, self.nb_licence, self.group_licences):
                    licence = data[i:i + self.group_licences]
                    self._current_licence = licence[0]
                    if self.licence_type:
                        if licence[0]['portalType'] == self.licence_type:
                            self.import_licence(licence)
                    elif self.licence_id:
                        if self.licence_id == licence[0]['reference']:
                            self.import_licence(licence)
                    else:
                        self.import_licence(licence)

                    for j in licence:
                        bar.next()

            bar.finish()
        except Exception as e:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            self.licence_errors.append(
                ErrorToCsv("licence_errors", "Dossier non traité et en erreur",
                           self._current_licence, str(exc_type)))
            print("Erreur: {} *** Licence: {} *** traceback: {}".format(
                e, "group_list", traceback.print_tb(e.__traceback__)))
            if self.exit_on_error:
                export_error_csv([self.licence_errors])
                sys.exit(1)
            pass

        export_error_csv([self.licence_errors])

        if self._log:
            print("--- LOGS ---")
            print(
                json.dumps(self._log,
                           indent=4,
                           sort_keys=True,
                           cls=DateTimeEncoder))
        print("--- Total Duration --- %s seconds ---" %
              (time.time() - self.start_time))
        if self.benchmarking:
            print(
                json.dumps(self._benchmark,
                           indent=4,
                           sort_keys=True,
                           cls=DateTimeEncoder))
Example #27
0
def main():
    namefile = sys.argv[1]
    name = (namefile.split("/")[-1]).split(".")[0]

    #parameters
    DO = float(
        sys.argv[2])  #distance cut-off, there is not any interaction below 8A
    DELTA = float(sys.argv[3])  #parameter of the logistic probability function
    MIN_SIZE = int(sys.argv[4])  #minimum size of a PU
    MAX_SIZE = int(sys.argv[5])  #maximum size of a PU

    #First, the programm reads the available chains from the pdb and ask the user
    #which chain he/she wants to analyse
    list_chains = readChainPDB(namefile)
    chain = input(
        "Choose a chain, in your pdb file the chains are {} :".format(
            ','.join(list_chains)))
    while chain not in list_chains:
        print("It is not a chain from your pdb file")
        chain = input(
            "Choose a chain, in your pdb file the chains are {} :".format(
                ','.join(list_chains)))

    list_atoms = readPDB(namefile, chain)  #the list of atoms from the pdb file
    #Then, the programm creates the contacts matrix
    contacts = contacts_matrix(namefile, DO, DELTA, chain, list_atoms)
    #It assigns secondary structures
    list_ss = dssp("DSSP/" + name + ".out", chain)

    #And calculates PI, sigma and k criteria
    list_PU = []

    M = MAX_SIZE  #max size will be decreased
    bar = FillingSquaresBar('Processing', max=(len(list_ss) - MIN_SIZE))
    for begin in range(0, len(list_ss)):
        if begin <= (len(list_ss) - MAX_SIZE):
            list_PU = list_PU + calculate_criteria(contacts, begin, MIN_SIZE,
                                                   MAX_SIZE, list_ss)
            bar.next()
        elif M >= MIN_SIZE:
            #To handle the end of the protein
            M = M - 1
            list_PU = list_PU + calculate_criteria(contacts, begin, MIN_SIZE,
                                                   M, list_ss)
            bar.next()
    bar.finish()

    #Finds the best PUs based on criteria values
    found_PU = find_PU(list_PU)

    create_file(found_PU, name, chain + "_" + name + "2.txt", MIN_SIZE, "w",
                chain)

    #Find the final best PUs
    list_best_PU = best_PU(found_PU)

    create_file(list_best_PU, name, chain + "_" + name + ".txt", MIN_SIZE, "w",
                chain)

    #Creates png of matrix with delineated PUs
    for pu in list_best_PU:
        plt.imshow(contacts)
        plt.colorbar()
        plt.axvline(pu.begin)
        plt.axvline(pu.end)
        plt.axhline(pu.begin)
        plt.axhline(pu.end)
        plt.text(pu.begin + 2, pu.end - 2, "A", fontsize=20, color="red")

        plt.savefig("resultPU/" + name + "/" + chain + "_" + name + "_" +
                    str(pu.begin) + ".png")
        plt.show()
Example #28
0
def move_file_from_directories(sourceDir, distDir):
    """Take files from directories and after make a directory
     named it as OTHER then check if is that file extension in
     the list or not if not then first make a directory in
     destinations directory and named it as that file extension
     then move files from that directory to destination directory
     that made based on file extension and if file extension is in
     the list so directory is already exist and file will transfer
     to that directory, but if same file name is in the directory
     then this file in a duplicate file and should be rename then
     transfer that and after renaming, and at the end bar progress
     will increase."""

    distlist = []
    boolean = True
    process_type = 'Move'
    counter = bar_file_counter(sourceDir)
    # Customizing bar progress
    bar = FillingSquaresBar('Processing', max=counter)
    for root, dir, files in os.walk(sourceDir):
        for ffile in files:
            file_format = file_format_finder(root, ffile, distDir)
            #  File for mat is not exist in the list
            if file_format == False or file_format == 1:
                lower_ffile = ffile.lower()
                # files with extension
                if lower_ffile in distlist:
                    boolean = False
                    new_file = file_renamer(ffile, distlist, boolean)
                    destdir = distDir + '/OTHER'
                    # OTHER directory for none type files
                    os.rename(os.path.join(root, ffile),
                              os.path.join(destdir, new_file))
                    distlist.append(new_file)
                    log_1 = "'I couldn't recognize the file:=> %s <= type so it is moved to %s dir as %s'" \
                            % (ffile, destdir, new_file)
                    log_file(distDir, log_1)
                    progressing_bar(process_type)
                    # After finish every step bar will forward
                    bar.next()
                # File without extension
                else:
                    src = os.path.join(root, ffile)
                    destdir = distDir + '/OTHER'
                    log_2 = "'I couldn't recognize the file:=> %s <=type so it will been moved to %s dir'" \
                            % (ffile, destdir)
                    log_file(distDir, log_2)
                    file_mover(src, destdir, distlist, ffile)
                    progressing_bar(process_type)
                    bar.next()
            # File format exist in the list
            else:
                new_dir = making_dir(file_format.upper(), distDir)
                if new_dir != 'NFOD':
                    lower_ffile = ffile.lower()
                    if lower_ffile in distlist:
                        new_file = file_renamer(ffile, distlist, boolean)
                        os.rename(os.path.join(root, ffile),
                                  os.path.join(new_dir, new_file))
                        distlist.append(new_file)
                        progressing_bar(process_type)
                        bar.next()
                    else:
                        src = os.path.join(root, ffile)
                        file_mover(src, new_dir, distlist, ffile)
                        progressing_bar(process_type)
                        bar.next()
                else:
                    pass

    bar.finish()
    destination_file_list(distDir, distlist)
    print Fore.RED + "WARNING : check Error log file in (%s) as (%s) name." % (
        distDir, "log.txt")
    print Fore.BLUE + "You can find out files list without extension in (%s) as (%s) name" \
          % (distDir, "noneFileType.txt")
    print Fore.BLUE + "You can find out list of files of source directory in (%s) as (%s) name" \
          % (distDir, "sourceFileList.txt")
    print Fore.BLUE + "You can find out list of all files that moved to destination directory in (%s) as (%s) name" \
          % (distDir, "copiedOrMovedFileList.txt")
    time.sleep(20)
Example #29
0
def copy_file_from_directories(sourceDir, distDir):
    # Like Moving file but here will copying files
    distlist = []
    boolean = True
    process_type = 'Copy'
    counter = bar_file_counter(sourceDir)
    bar = FillingSquaresBar('Processing', max=counter)
    for root, dir, files in os.walk(sourceDir):
        for ffile in files:
            file_format = file_format_finder(root, ffile, distDir)
            #  Check for existing of a file in destination folder
            if file_format == False or file_format == 1:
                lower_ffile = ffile.lower()
                if lower_ffile in distlist:
                    boolean = False
                    new_file = file_renamer(ffile, distlist, boolean)
                    destdir = distDir + '/OTHER'
                    os.rename(os.path.join(root, ffile),
                              os.path.join(root, new_file))
                    src = os.path.join(root, new_file)
                    file_copier(src, destdir, distlist, new_file, distDir)
                    log_1 = "'I couldn't recognize the file:=> %s <= type so it is copied to %s dir as %s'" \
                            % (ffile, destdir, new_file)
                    log_file(distDir, log_1)
                    progressing_bar(process_type)
                    bar.next()
                else:
                    src = os.path.join(root, ffile)
                    destdir = distDir + '/OTHER'
                    log_2 = "'I couldn't recognize the file:=> %s <=type so it will been copied to %s dir'" \
                            % (ffile, destdir)
                    log_file(distDir, log_2)
                    file_copier(src, destdir, distlist, ffile, distDir)
                    progressing_bar(process_type)
                    bar.next()
            else:
                new_dir = making_dir(file_format.upper(), distDir)
                if new_dir != 'NFOD':
                    lower_ffile = ffile.lower()
                    if lower_ffile in distlist:
                        new_file = file_renamer(ffile, distlist, boolean)
                        os.rename(os.path.join(root, ffile),
                                  os.path.join(root, new_file))
                        src = os.path.join(root, new_file)
                        file_copier(src, new_dir, distlist, new_file, distDir)
                        progressing_bar(process_type)
                        bar.next()
                    else:
                        src = os.path.join(root, ffile)
                        file_copier(src, new_dir, distlist, ffile, distDir)
                        progressing_bar(process_type)
                        bar.next()
                else:
                    pass

    bar.finish()
    destination_file_list(distDir, distlist)
    print Fore.RED + "WARNING : check Error log file in (%s) as (%s) name." % (
        distDir, "log.txt")
    print Fore.BLUE + "You can find out files list without extension in (%s) as (%s) name" \
          % (distDir, "noneFileType.txt")
    print Fore.BLUE + "You can find out list of files of source directory in (%s) as (%s) name" \
          % (distDir, "sourceFileList.txt")
    print Fore.BLUE + "You can find out list of all files that copied to destination directory in (%s) as (%s) name" \
          % (distDir, "copiedOrMovedFileList.txt")
    time.sleep(20)
                if len(e["acknowledges"]) > 0:
                    sheet.cell(row=row, column=10).value = time.strftime(
                        "%d/%m/%Y %H:%M:%S",
                        time.localtime(int(e["acknowledges"][0]["clock"])),
                    )
                    sheet.cell(
                        row=row,
                        column=11).value = e["acknowledges"][0]["message"]
                    if len(e["acknowledges"][0]["message"]) > Col_Message:
                        Col_Message = len(e["acknowledges"][0]["message"])

            row += 1
            eventos += 1
            bar.next()

bar.finish()
area = sheet.dimensions
sheet.auto_filter.ref = area
sheet.column_dimensions["A"].width = 18
sheet.column_dimensions["B"].width = 18
sheet.column_dimensions["C"].width = Col_Host + 3
sheet.column_dimensions["D"].width = Col_Trigger
sheet.column_dimensions["E"].width = 15
sheet.column_dimensions["F"].width = 15
sheet.column_dimensions["G"].width = Col_Grupo + 3
sheet.column_dimensions["H"].width = Col_App + 1
sheet.column_dimensions["I"].width = 7
sheet.column_dimensions["J"].width = 18
sheet.column_dimensions["K"].width = Col_Message
sheet.freeze_panes = "A2"