def calculate_entry_histograms( plots, chain ) :
    ##assert canvas is not None, "Canvas must be specified in calculate_histograms"
    # setup our 2d histos
    histos = []
    chi2histos = []
    for p in plots :
        entryhisto, chi2histo = initialize_histo( p )
        histos.append(entryhisto)
        chi2histos.append(chi2histo)

    nentries = chain.GetEntries()
    prog = ProgressBar(0, nentries+1, 77, mode='fixed', char='#')
    for entry in range(0,nentries+1) :
        prog.increment_amount()
        print prog,'\r',
        stdout.flush()
        chain.GetEntry(entry)
        for h, c, plot in zip( histos, chi2histos, plots ) :
            indices = plot.get_indices()
            vals = [ chain.treeVars["predictions"][ index ] for index in indices ]
            nbins = plot.bins
            ibin = h.FindBin(*vals)
            max_bin = h.FindBin(*plot.max_vals)
            if ibin != 0 and ibin < max_bin :
                chi2 = chain.treeVars["predictions"][0]
                if chi2 < c.GetBinContent(ibin) :
                    c.SetBinContent(ibin, chi2)
                    h.SetBinContent(ibin, entry)

    print
    return histos
Ejemplo n.º 2
0
def integrate(F,t,y,tEnd,h, **opt):
    def run_kut4(F,t,y,h):
        K0 = h*F(t,y)
        K1 = h*F(t + h/2.0, y + K0/2.0)
        K2 = h*F(t + h/2.0, y + K1/2.0)
        K3 = h*F(t + h, y + K2)
        return (K0 + 2.0*K1 + 2.0*K2 + K3)/6.0

    step = 0
    initialStep = 0
    if opt and opt["step"]:
        initialStep = opt["step"]

    T = [t]
    Y = [y]
    progress = ProgressBar('Runge Kutta 4', t, tEnd)
    while t < tEnd:
        y = y + run_kut4(F,t,y,h)
        t = t + h
        if initialStep == 0 or t >= step:
            T.append(t)
            Y.append(y)
            step += initialStep

        progress.setValue(t)
    return array(T), transpose(array(Y))
Ejemplo n.º 3
0
    def download_file(self, file_name, sub_url):
        """ 传入字幕页面链接, 字幕包标题, 返回压缩包类型,压缩包字节数据 """

        s = requests.session()
        r = s.get(sub_url, headers=self.headers)
        bs_obj = BeautifulSoup(r.text, 'html.parser')
        a = bs_obj.find('div', {'class': 'subtitle-links'}).a
        download_link = a.attrs['href']

        try:
            with closing(requests.get(download_link, stream=True)) as response:
                chunk_size = 1024  # 单次请求最大值
                # 内容体总大小
                content_size = int(response.headers['content-length'])
                bar = ProgressBar(prefix + ' Get', file_name.strip(),
                                  content_size)
                sub_data_bytes = b''
                for data in response.iter_content(chunk_size=chunk_size):
                    sub_data_bytes += data
                    bar.refresh(len(sub_data_bytes))
            # sub_data_bytes = requests.get(download_link, timeout=10).content
        except requests.Timeout:
            return None, None
        if 'rar' in download_link:
            datatype = '.rar'
        elif 'zip' in download_link:
            datatype = '.zip'
        elif '7z' in download_link:
            datatype = '.7z'
        else:
            datatype = 'Unknown'

        return datatype, sub_data_bytes
Ejemplo n.º 4
0
	def delete_files(self, file_list ):
		'''create files. The filenames are supplied as a list'''
		
		
		count = 0
		total = len(file_list)
		if self.verbose: print "\nTrying to delete " + str(total) + " files"
		prog = ProgressBar(count, total, 77, mode='fixed', char='#')
		
		before = time.time()
		for file_name in file_list:
			ret = self.delete_file( file_name )
			
			if self.verbose:
				count += 1
				prog.increment_amount()
				print prog, '\r',
				sys.stdout.flush()

			
			if ret == False:
				print "Failed to delete file " + str( file_name )
				return False
		print

		after = time.time()
		self.profiling_printer("Deleting " + str(len(file_list)) + " files", after - before)
		return True
Ejemplo n.º 5
0
    def download_file(self, file_name, download_link):

        try:
            with closing(requests.get(download_link, stream=True)) as response:
                filename = response.headers['Content-Disposition']
                chunk_size = 1024  # 单次请求最大值
                # 内容体总大小
                content_size = int(response.headers['content-length'])
                bar = ProgressBar(prefix + ' Get',
                                  file_name.strip(), content_size)
                sub_data_bytes = b''
                for data in response.iter_content(chunk_size=chunk_size):
                    sub_data_bytes += data
                    bar.refresh(len(sub_data_bytes))
        except requests.Timeout:
            return None, None, 'false'
        if '.rar' in filename:
            datatype = '.rar'
        elif '.zip' in filename:
            datatype = '.zip'
        elif '.7z' in filename:
            datatype = '.7z'
        else:
            datatype = 'Unknown'

        with open('test.zip', 'wb') as f:
            f.write(sub_data_bytes)
        return datatype, sub_data_bytes
Ejemplo n.º 6
0
    def compute_initial_allocation(self,disp_progress = True):

        self.to_assign = np.random.permutation(range(self.N)).tolist()

        if disp_progress:
            prog = ProgressBar(0, len(self.to_assign), 77, mode='fixed')
            oldprog = str(prog)

        while True:
            if disp_progress:
                #<--display progress
                prog.increment_amount()
                if oldprog != str(prog):
                        print prog, "\r",
                        sys.stdout.flush()
                        oldprog=str(prog)
                #-->

            i = self.to_assign.pop()
            dists = sdis.cdist(np.atleast_2d(self.X[i]),self.centroids,self.metric).ravel()
            sorted_centroids = np.argsort(dists)
            for c in sorted_centroids:
                if self.weight_per_cluster[c]+self.weights[i]<weight_limit:
                    self.clusters[c].append(i)
                    self.weight_per_cluster[c]+=self.weights[i]
                    break

            if not(self.to_assign):
                break
Ejemplo n.º 7
0
def cuttree( oldname, newname, oldtree, branch, check ) :
    oldfile = r.TFile( oldname )
    oldtree = oldfile.Get( oldtree )
    nentries = oldtree.GetEntries()
    nTotVars = oldtree.GetLeaf( branch ).GetLen()  
    chi2data = array('d',[0]*nTotVars) 
    oldtree.SetBranchAddress( branch, chi2data )
    
    newfile = r.TFile( newname, "recreate" )
    newfile.cd()
    newtree = oldtree.CloneTree(0)

    prog = ProgressBar(0, nentries, 77, mode='fixed')
    oldprog = str(prog)

    for i in range(0,nentries) :
        prog.increment_amount()
        if oldprog != str(prog):
            print prog, "\r",
            stdout.flush()
            oldprog=str(prog)
        oldtree.GetEntry(i)
        if check( chi2data[0] ) :
            newtree.Fill()

    newtree.AutoSave()
     
    oldfile.Close()
    newfile.Close()
Ejemplo n.º 8
0
def scan_E_scale(E_range, scale_range):

    logger.init(foldername='scan E-Scale',prefix='Scan2D')
    logger.title('E, Scale, SuccessRate')    

    #bar = Progress('Main Scan',on=True)
    bar = ProgressBar()
    
    all=len(E_range)*len(scale_range)
    i=1

    for scale in scale_range:
        for E in E_range:       
            
            bar.log( i/all )
            success_rate = tile_implementation(pick=[3,5],
                                sample=True,
                                train_sample=3000,
                                test_sample=1000,
                                standardize='normalize',
                                w_dist_type='normal',
                                w_dist_param=(0, scale),
                                b_dist_type= 'uniform',
                                b_dist_param=(0, math.pi),
                                E = E
                            )
            i+=1
            data = (E, scale, success_rate)
            logger.log(data)

    logger.finalize()
Ejemplo n.º 9
0
 def solve_5(self, period, N):
     result = np.identity(N, dtype=np.complex)
     I = np.identity(N, dtype=np.complex)
     He = self.matrix_electric
     Hs = self.matrix_static
     prog = ProgressBar(0, self.fine_step / 6, 50, mode='fixed', char='#')
     #python version
     for i in xrange(0, self.fine_step, 6):
         prog.increment_amount()
         print prog, '\r',
         sys.stdout.flush()
         k1 = (He * self.E_arr[period][i] + Hs)
         tmp = He * self.E_arr[period][i + 1] + Hs
         k2 = np.dot(tmp, I + k1 * 0.25 * self.dt)
         tmp = He * self.E_arr[period][i + 2] + Hs
         k3 = np.dot(tmp, I + (k1 * 3.0 / 32.0 + k2 * 9.0 / 32.0) * self.dt)
         tmp = He * self.E_arr[period][i + 3] + Hs
         k4 = np.dot(
             tmp, I +
             (k1 * 1932.0 - k2 * 7200.0 + k3 * 7296.0) * self.dt / 2197.0)
         tmp = He * self.E_arr[period][i + 4] + Hs
         k5 = np.dot(
             tmp, I + (k1 * 439.0 / 216.0 - k2 * 8.0 + k3 * 3680.0 / 513.0 -
                       k4 * 845.0 / 4104.0) * self.dt)
         tmp = He * self.E_arr[period][i + 5] + Hs
         k6 = np.dot(
             tmp,
             I + (-1.0 * k1 * 8.0 / 27.0 - k2 * 2.0 - k3 * 3544.0 / 2565.0 +
                  k4 * 1859.0 / 4104.0 - k5 * 11.0 / 40.0) * self.dt)
         result = np.dot(
             I + (k1 * 16.0 / 135.0 + k3 * 6656.0 / 12825.0 + k4 * 28561.0 /
                  56430.0 - k5 * 9.0 / 50.0 + k6 * 2.0 / 55.0) * self.dt,
             result)
     return result
Ejemplo n.º 10
0
def _serial_distance_matrix(fnames, compression_fn, pairing_fn, **kwargs):
  """Serial calculation for distance matrix."""
  sys.stderr.write('Compressing individual files...\n')
  progress_bar = ProgressBar(len(fnames))

  def update_progress(fname):
    x = compression_fn(fname)
    progress_bar.increment()
    return x
  compressed_sizes = map(update_progress, fnames)

  zip_size = dict(zip(fnames, compressed_sizes))

  sys.stderr.write('\nCompressing file pairs...\n')
  file_pairs = [(fname1, fname2)
      for fname1 in fnames
      for fname2 in fnames
      if fname1 < fname2]
  progress_bar = ProgressBar(len(file_pairs))

  def update_progress(pair):
    fname1, fname2 = pair
    ncd_result = ncd(compression_fn, pairing_fn, fname1, fname2,
        (zip_size[fname1], zip_size[fname2]), **kwargs)
    progress_bar.increment()
    return ncd_result
  ncd_results = map(update_progress, file_pairs)

  sys.stderr.write('\n')
  return ncd_results
Ejemplo n.º 11
0
 def __init__(self,
              input_options=[],
              output_options=[],
              quiet=False,
              max_progress=100,
              has_audio=False,
              has_video=False,
              process_audio=True,
              process_video=True,
              audio_codec="pcm_s16le",
              video_codec="h264"):
     super().__init__(input_options, output_options)
     self.progress = ProgressBar(max_value=max_progress, quiet=quiet)
     self.has_video = has_video
     self.process_video = process_video
     self.video_codec = video_codec
     if (self.has_video):
         self.prepend_output_options(["-map", "[vconc]"])
         if (self.process_video):
             self.prepend_output_options(["-pix_fmt", "yuv420p"])
         self.prepend_output_options(["-codec:v", self.video_codec])
     self.has_audio = has_audio
     self.process_audio = process_audio
     self.audio_codec = audio_codec
     if (self.has_audio):
         if (self.process_audio):
             self.prepend_output_options(["-ac", "1", "-map", "[anorm]"])
         else:
             self.prepend_output_options(["-map", "[aconc]"])
         self.prepend_output_options(["-codec:a", self.audio_codec])
     self.filters = []
def fill_and_save_data_hists( mcf, plots,entry_hists, modes, contribs,predicts ) :
    axes = [ "X", "Y", "Z" ]
    chain = MCAnalysisChain( mcf )
    nentries = chain.GetEntries()
    
    KOhack=KOhack_class(mcf)
    for p , h in zip(plots,entry_hists) :
#############################################
        if check_entry_KO_hack(p,KOhack):
            KOhack.init_hack(p)
#############################################
            
        histo_cont = {}
        contrib_cont = {}
        predict_cont = {}

        print p.short_names
        firstbin, lastbin = get_histogram_bin_range(h,space=p)
        for mode in modes :
            # here need to add in check on contrib and make one for each contribution
            hname = histo_name( p.short_names, entry_histo_prefix )+ "_" + mode
            histo_cont[mode] =initialize_histo( p,hname,data =True ) 
            base_val = 1e9
            if mode == "pval" :
                base_val = 0.0
            for bin in range( firstbin, lastbin + 1 ) :
                histo_cont[mode].SetBinContent( bin, base_val )

        for c in contribs : # contribs is a list of Contribution objects
            hname = histo_name( p.short_names, entry_histo_prefix )+ "_dX_" + c.short_name
            contrib_cont[c.short_name] = initialize_histo( p,hname,data =True )
            for bin in range( firstbin, lastbin + 1 ) :
                contrib_cont[c.short_name].SetBinContent( bin, 0.0 )

        for pred in predicts : # predicts is a list of Contribution objects
            hname = histo_name( p.short_names, entry_histo_prefix )+ "_pred_" + pred.short_name
            predict_cont[pred.short_name] = initialize_histo( p,hname,data =True )
            for bin in range( firstbin, lastbin + 1 ) :
                predict_cont[pred.short_name].SetBinContent( bin, 0.0 )

        prog = ProgressBar(0, (lastbin-firstbin)+1, 77, mode='fixed', char='#')
        for i in range( firstbin, lastbin+1 ) :
            prog.increment_amount()
            print prog,'\r',
            stdout.flush()
            entry = int( h.GetBinContent(i) )
            if entry > 0 :
                chain.GetEntry(entry)
#############################################
                if check_entry_KO_hack(p,KOhack): 
                     KOhack.set_ssi_bin_centre(h,i)
#############################################
                fill_bins( histo_cont, contrib_cont,predict_cont, contribs,predicts  , i, chain, mcf, KOhack )
        perform_zero_offset( histo_cont["dchi"],space=p )
        print
        save_hdict_to_root_file( histo_cont,  mcf.FileName, mcf.DataDirectory)
        save_hdict_to_root_file( contrib_cont, mcf.FileName, mcf.DataDirectory)
        save_hdict_to_root_file( predict_cont, mcf.FileName, mcf.DataDirectory)
Ejemplo n.º 13
0
def _annotate_frame(video: VideoTk, y_coordinate: float, lane_distances: list,
                    progress_bar: ProgressBar) -> None:
    if y_coordinate < 0.0:
        y_coordinate = 0.0
    elif y_coordinate > 1.0:
        y_coordinate = 1.0
    y_coordinate = round(y_coordinate, 2)
    print("[", video.current.index, "]: ", "height = ", y_coordinate)
    progress_bar.add_progress(video.current.index)
Ejemplo n.º 14
0
 def __init__(self, min, max, width=0, value=None):
     self._width = None
     self._term_width = None
     self._oldsignal = None
     self._progress_bar = ProgressBar(min, max, min)
     if value is not None:
         self._progress_bar.set_value(value)
     # ширина в ноль означает, что полоска должна растянуться на все окно
     # при изменении размера окна полоска подстаивается под новый размер
     self.set_width(width)
Ejemplo n.º 15
0
def progbar(ip):
    r = redis.Redis(ip)
    initial = r.llen('blogs')
    prog = ProgressBar(0, initial, 77, mode='fixed', char='#')
    while True:
        prog.update_amount(initial - r.llen('blogs'))
        print prog, '\r',
        sys.stdout.flush()
        if initial - r.llen('blogs') == initial:
            break
    return
Ejemplo n.º 16
0
    def sweep_multiprocessing(self,sweep_n,start,end,points,filename='./test.txt'):
        """
        nu[sweep_n] is sweeped.
        Sweep the frequency and output the result to filename.
        """
        ###############################
        ##multiprocessing preparation
        ##############################
        core = 10
        points = points//core*core # points per thread
        self.result = [[0.0 for i in range(self.n+1)]for j in range(points)]#this is the matrix which store the result, it will be saved to file later.
        job = self.allocate_job(start,end,points,core)

        
        ################################
        ##This are codes for progress bar
        ###############################
        prog = ProgressBar(0, points, 50, mode='fixed', char='#')
        ##the linear algebra start here
        a = np.zeros(self.N)
        a[self.N-1] = 1 #1 because rho_11+rho_22 ... =1
        a = np.matrix(a)
        a = a.T

        done_queue = multiprocessing.Queue()
        process_list = []
        for x in range(core):
            process_list.append(multiprocessing.Process(target = sweep_mp,args = (job[x],self.system,self.nu2,a,self.add_freq,self.index,sweep_n,self.n,done_queue)))

        tStart = time.time()
        print 'start'
        for p in process_list:
            p.start()

        stop_num = 0
        while stop_num != core:
            a = done_queue.get()
            if a == 'STOP':
                stop_num += 1
            else:
                self.result[a[0]] = a[1]
                prog.increment_amount()
                print prog, '\r',
                sys.stdout.flush()

        print '\n'
        for p in process_list:
            p.join()
            print "%s.exitcode = %s" %(p.name, p.exitcode)

        tStop = time.time()
        print"spend",(tStop - tStart),"second"
            
        self.sweep_save_file(filename,points)
Ejemplo n.º 17
0
    def _generate_AX(self):
        self.log('Creating features and adjacency matrices..')
        pr = ProgressBar(60, len(self.data))

        data = []
        smiles = []
        data_S = []
        data_A = []
        data_X = []
        data_D = []
        data_F = []
        data_Le = []
        data_Lv = []

        max_length = max(mol.GetNumAtoms() for mol in self.data)
        max_length_s = max(len(Chem.MolToSmiles(mol)) for mol in self.data)

        for i, mol in enumerate(self.data):
            A = self._genA(mol, connected=True, max_length=max_length)
            D = np.count_nonzero(A, -1)
            if A is not None:
                data.append(mol)
                smiles.append(Chem.MolToSmiles(mol))
                data_S.append(self._genS(mol, max_length=max_length_s))
                data_A.append(A)
                data_X.append(self._genX(mol, max_length=max_length))
                data_D.append(D)
                data_F.append(self._genF(mol, max_length=max_length))

                L = np.diag(D) - A
                Le, Lv = np.linalg.eigh(L)

                data_Le.append(Le)
                data_Lv.append(Lv)

            pr.update(i + 1)

        self.log(date=False)
        self.log(
            'Created {} features and adjacency matrices  out of {} molecules!'.
            format(len(data), len(self.data)))

        self.data = data
        self.smiles = smiles
        self.data_S = data_S
        self.data_A = data_A
        self.data_X = data_X
        self.data_D = data_D
        self.data_F = data_F
        self.data_Le = data_Le
        self.data_Lv = data_Lv
        self.__len = len(self.data)
Ejemplo n.º 18
0
def process_frame_segments(args, segments, width, height):
    """Post-process frame segments to set frame images, etc."""
    fn = "process_frame_segments"
    globals.log.info("Processing frames...")
    frame_segments = [s for s in segments if isinstance(s, FrameSegment)]
    n = len(frame_segments)
    globals.log.debug("{fn}(): num frames = {n}".format(fn=fn, n=n))
    progress = ProgressBar(max_value=n,
                           quiet=args.quiet or args.debug or n == 0)
    progress.update(0)
    for i, f in enumerate(frame_segments):
        try:
            globals.log.debug(
                "{fn}(): frame (before) = {b}".format(fn=fn, b=f))
            # Frame segments that use a frame from the previous segment.
            if (f.input_file == "^"):
                if (f.segment_number > 0):
                    prev = segments[f.segment_number - 1]
                    globals.log.debug(
                        "{fn}(): prev = {p}".format(fn=fn, p=prev))
                    prev.generate_temp_file(args.output, width=width,
                                            height=height)
                    f.use_frame(
                        prev.generate_frame(f.frame_number, args.output,
                                            width=width, height=height))
                else:
                    globals.log.error(
                        "frame segment {s} is attempting to use the last "
                        "frame of a non-existent previous "
                        "segment".format(s=f.segment_number))
                    sys.exit(1)
            # Frame segments whose frame comes from a PDF file.
            else:
                suffix = PurePath(f.input_file).suffix
                if (suffix.lower() == ".pdf"):
                    f.use_frame(f.generate_temp_file(args.output, width=width,
                                            height=height))
                else:
                    globals.log.error(
                        'unexpected input file type "{s}" for frame segment '
                        "{f}".format(s=suffix, f=f.segment_number))
                    sys.exit(1)
            progress.update(i)
            globals.log.debug("{fn}(): frame (after) = ""{a}".format(fn=fn, a=f))
        except SegmentError as e:
            progress.finish()
            globals.log.exception(e)
            sys.exit(1)
    else:
        progress.finish()
    def _generate_AX(self):
        self.log('Creating features and adjacency matrices..')
        pr = ProgressBar(60, len(self.data))

        data_ax = []
        smiles = []
        data_s = []
        data_a = []
        data_x = []
        data_d = []
        data_f = []
        data_le = []
        data_lv = []

        max_length = max(mol.GetNumAtoms() for mol in self.data)
        max_length_s = max(len(Chem.MolToSmiles(mol)) for mol in self.data)

        for i, mol in enumerate(self.data):
            a = self._genA(mol, connected=True, max_length=max_length)
            d = np.count_nonzero(a, -1)
            if a is not None:
                data_ax.append(mol)
                smiles.append(Chem.MolToSmiles(mol))
                data_s.append(self._genS(mol, max_length=max_length_s))
                data_a.append(a)
                data_x.append(self._genX(mol, max_length=max_length))
                data_d.append(d)
                data_f.append(self._genF(mol, max_length=max_length))

                le, lv = np.linalg.eigh(d - a)

                data_le.append(le)
                data_lv.append(lv)

            pr.update(i + 1)

        self.log(date=False)
        self.log('Created {} features and adjacency matrices  out of {} molecules!'.format(len(data_ax),
                                                                                           len(self.data)))

        self.data = data_ax
        self.smiles = smiles
        self.data_S = data_s
        self.data_A = data_a
        self.data_X = data_x
        self.data_D = data_d
        self.data_F = data_f
        self.data_Le = data_le
        self.data_Lv = data_lv
        self.__len = len(self.data)
Ejemplo n.º 20
0
class ProgressDialog(Pmw.Dialog):
    def __init__(self, parent=None, **args):
        th = args.pop('toplevel_height', 200)
        tw = args.pop('toplevel_width', 400)
        ac = args.pop('activatecommand', None)
        self.no_bar = args.pop('nobar', False)
    
        Pmw.Dialog.__init__(self, parent, buttons=[],
                            activatecommand = ac)
        self.component('hull').overrideredirect(1)
        w = self.interior()
        w.pack_propagate(0)
        w.configure(bd=5, relief='raised', height=th, width=tw)
        self.text = args.pop('text', 'Progress bar')
        self.text_var = Tkinter.StringVar()
        self.text_var.set(self.text)
        Tkinter.Label(w, textvariable=self.text_var).pack(fill='y', expand=1)
        if not self.no_bar:
            self.pb = ProgressBar(w, **args)
            self.pb.pack(side='top', expand=1, anchor='n')
            self.counter = 0
            self.pb.updateProgress(self.counter)

    def incr(self):
        self.counter += 1
        self.pb.updateProgress(self.counter)

    def set(self, value, max=None):
        self.counter = value
        self.pb.updateProgress(self.counter, max)

    def message(self, text):
        self.text = text
        self.text_var.set(self.text)
    def createDemand(self):
        def determineStartingInterval():
            start_interval = P_constrain(np.random.normal(), -3.0, 3.0)
            start_interval = P_map(start_interval, -3.0, 3.0, 15 * 60, self.MAX_INTERVAL - self.MAX_DURATION - 1)
            start_interval = int(P_constrain(start_interval, 0, self.MAX_INTERVAL - self.MAX_DURATION - 1))
            return start_interval

        def determineSkillAssignments():
            SKILLS = ['LEVEL_1', 'LEVEL_2', 'LEVEL_3']
            skill = random.choice(SKILLS)
            return skill

        def determineDuration():
            return int(P_map(P_constrain(np.random.normal(), -3.0, 3.0), -3.0, 3.0, 5, self.MAX_DURATION))

        print('Creating demand list...')
        toolbar_width = 40
        pb = ProgressBar(toolbar_width=toolbar_width)
        for x_ in range(self.NUM_CALLS):
            pb.update(x_, self.NUM_CALLS)

            start_interval = determineStartingInterval()
            duration = determineDuration()
            skill = determineSkillAssignments()

            self.addDemand({'id': x_ + 10, 'interval': start_interval, 'duration': duration, 'skill': skill})

        pb.update(1, 1)
        pb.clean()
Ejemplo n.º 22
0
def generate_hierarchy(event_list, stack_dict):
    prog = ProgressBar(0,
                       len(event_list),
                       77,
                       mode='fixed',
                       char='#',
                       autoprint=True)
    #create a hierarchal track of the allocation event
    root = CallStatistics(0)

    #create a heap to track live allocated blocks and current memory status overall
    heap = Heap()
    for event in event_list:
        #    print event.text()

        # if the event is a free then we need to find out the size of the block we are freeing
        # @todo this could be part of the event stream when using debug CRT but probably not for release builds?
        if event.event_type == Event.eFree:
            size_free = heap.block_size(event.pData)
        elif event.event_type == Event.eRealloc:
            size_free = heap.block_size(event.pDataPrev)
        else:
            size_free = 0

        if event.event_type == Event.eAlloc or event.event_type == Event.eRealloc:
            size_alloc = event.allocSize
        else:
            size_alloc = 0

        # only now that we have our size information on potential free blocks do we
        # let heap track event (since the block in question might get freed)
        heap.track(event)

        # start at the tree root and walk each stack call in this event and tally the event with this call and it's callee's
        stat_obj = root
        # start at the first call and walk down until we hit the desired leaf
        stack = stack_dict[event.stack_id]
        for index in range(len(stack.calls) - 1, stack.leaf_index - 2, -1):
            call = stack.calls[index]
            child_obj = stat_obj.add_child_call(call)
            # add stats for the child to the parent
            stat_obj.sample_child_event(size_alloc, size_free)
            if index == stack.leaf_index - 1:
                # add leaf stats on the child leaf node
                child_obj.sample_self_event(size_alloc, size_free)
            stat_obj = child_obj  # step down the tree for next call
        prog.increment_amount()
    print "\n\n"
    return root
Ejemplo n.º 23
0
class ProgressBarTestCase(unittest.TestCase):
    """Test the ProgressBar class."""

    DEFAULT_VALUE = 0
    DEFAULT_MAX_VALUE = 100
    DEFAULT_PRINT_WIDTH = 50
    DEFAULT_NEWLINE = "\r"

    def setUp(self):
        """Set up for test."""
        self.bar = ProgressBar(quiet=True)

    def tearDown(self):
        """Clean up after test."""
        self.bar = None

    def test_defaults(self):
        """Test default values are correct."""
        test_data = (
            (self.bar.value, self.DEFAULT_VALUE,
             "default value = {v}".format(v=self.DEFAULT_VALUE)),
            (self.bar.initial_value, self.DEFAULT_VALUE,
             "default initial value = {v}".format(v=self.DEFAULT_VALUE)),
            (self.bar.max_value, self.DEFAULT_MAX_VALUE,
             "default max value = {v}".format(v=self.DEFAULT_MAX_VALUE)),
            (self.bar.print_width, self.DEFAULT_PRINT_WIDTH,
             "default print width = {v}".format(v=self.DEFAULT_PRINT_WIDTH)),
            (self.bar.newline, self.DEFAULT_NEWLINE,
             "default newline = {v}".format(v=self.DEFAULT_NEWLINE)),
        )
        for actual, expected, description in test_data:
            with self.subTest(msg=description):
                self.assertEqual(actual, expected)

    def test_set_and_reset(self):
        """Test setting and resetting the current value."""
        self.bar.set(value=50)
        self.assertEqual(self.bar.value, 50)
        self.bar.reset()
        self.assertEqual(self.bar.value, 0)

    def test_update_ad_draw(self):
        """Test updating and drawing the progress bar."""
        self.bar.reset()
        self.bar.quiet = False
        for i in range(self.bar.initial_value, self.bar.max_value + 1):
            percent = int(i * 100 / self.bar.max_value)
            dots = int(i * self.bar.print_width / self.bar.max_value)
            expected_bar = "{nl}[{c}{nc}] {p}% ".format(
                c="".join(["+"] * dots),
                nc="".join(["."] * (self.bar.print_width - dots)),
                p=percent,
                nl=self.bar.newline)
            with captured_output() as (out, _):
                self.bar.update(i)
            with self.subTest(msg="value = {v}".format(v=i)):
                self.assertEqual(self.bar.value, i)
            with self.subTest(msg="output = {v}".format(v=expected_bar)):
                self.assertEqual(out.getvalue(), expected_bar)
Ejemplo n.º 24
0
def main():
    a = get_args()
    tempdir = os.path.join(a.out_dir, 'a')
    os.makedirs(tempdir, exist_ok=True)

    ptfiles = file_list(a.in_dir, 'pt')

    ptest = torch.load(ptfiles[0])
    if isinstance(ptest, list): ptest = ptest[0]
    shape = [*ptest.shape[:3], (ptest.shape[3] - 1) * 2]

    vsteps = int(a.length * 25 /
                 len(ptfiles)) if a.steps is None else a.steps  # 25 fps
    pbar = ProgressBar(vsteps * len(ptfiles))
    for px in range(len(ptfiles)):
        params1 = read_pt(ptfiles[px])
        params2 = read_pt(ptfiles[(px + 1) % len(ptfiles)])

        params, image_f, _ = fft_image(shape, resume=params1)
        image_f = to_valid_rgb(image_f)

        for i in range(vsteps):
            with torch.no_grad():
                img = image_f(
                    (params2 - params1) *
                    math.sin(1.5708 * i / vsteps)**2)[0].permute(1, 2, 0)
                img = torch.clip(img * 255, 0,
                                 255).cpu().numpy().astype(np.uint8)
            imsave(os.path.join(tempdir, '%05d.jpg' % (px * vsteps + i)), img)
            if a.verbose is True: cvshow(img)
            pbar.upd()

    os.system('ffmpeg -v warning -y -i %s/\%%05d.jpg "%s-pts.mp4"' %
              (tempdir, a.in_dir))
Ejemplo n.º 25
0
def get_progress_bar_iterator(iterator,
                              every=None,
                              size=None,
                              labeling_fun=_default_labeling_fun,
                              display=True,
                              hide_bar_on_success=False,
                              hide_on_success=False):
    if size is None:
        try:
            size = len(iterator)
        except (AttributeError, TypeError):
            pass

    pb = ProgressBar(every=every,
                     size=size,
                     labeling_fun=labeling_fun,
                     display=display)

    def _iter():
        try:
            for item in iterator:
                yield item
                pb.increase()
            pb.stop(True)
            if hide_bar_on_success:
                pb.hide_bar()
            if hide_on_success:
                pb.hide()
        except:
            pb.stop(False)
            raise
        finally:
            pb.update()

    return pb, _iter()
Ejemplo n.º 26
0
 def create_pdf_documents(self):
     pool: multiprocessing.Pool = multiprocessing.Pool()
     results: list = list()
     print('\nCreating PDF...')
     progress: ProgressBar = ProgressBar(len(self.barcodes_dict.keys()))
     progress.show()
     for key in self.barcodes_dict.keys():
         if config.create_title_page and key != 'barcodes':
             title: str or None = key
         else:
             title = None
         staging_folders: str = ''
         for i in range(1, config.num_grouping_characters):
             staging_folders += f'{key[0:i]}/'
         path: str = f'{config.pdf_documents_folder}/{staging_folders}{key}.pdf'
         folder_path: str = os.path.split(path)[0]
         if not os.path.exists(folder_path):
             os.makedirs(folder_path)
         results.append(
             pool.apply_async(self.create_pdf_document, (
                 self.barcodes_dict[key],
                 path,
                 title,
             )))
     for result in results:
         result.get(timeout=60)
         progress.inc()
         progress.show()
     self.barcodes_dict.clear()
Ejemplo n.º 27
0
 def test_negative(self):
     bar = ProgressBar()
     self.assertRaises(
         ProgressBarNumberError,
         bar.load, 
         end=-1
     )
Ejemplo n.º 28
0
 def test_number_string(self):
     bar = ProgressBar()
     self.assertRaises(
         ProgressBarIntegerNumberError,
         bar.load, 
         end='1'
     )
Ejemplo n.º 29
0
def main():
    """A multi-thread tool to crop sub images."""
    input_folder = a.in_dir
    save_folder = a.out_dir
    n_thread = a.workers
    crop_size = a.size
    step = a.size // 2 if a.step is None else a.step
    min_step = a.size // 8

    os.makedirs(save_folder, exist_ok=True)

    images = img_list(input_folder, subdir=True)

    def update(arg):
        pbar.upd(arg)

    pbar = ProgressBar(len(images))

    pool = Pool(n_thread)
    for path in images:
        pool.apply_async(worker,
                         args=(path, save_folder, crop_size, step, min_step),
                         callback=update)
    pool.close()
    pool.join()
    print('All subprocesses done.')
Ejemplo n.º 30
0
 def test_number_100(self):
     bar = ProgressBar()
     self.assertRaises(
         ProgressBarNumberError,
         bar.load, 
         end=101
     )
Ejemplo n.º 31
0
def main():
    import numpy as np
    options = parse_options()
    os.makedirs(options.dir, exist_ok=True)
    config_logger(options.dir)

    with open(options.config, 'r') as stream:
        try:
            opt = yaml.safe_load(stream)
        except yaml.YAMLError as exc:
            print(exc)
            logging.exception(exc)
            exit(1)

    gt_dir = f"{options.dir}/original"
    data_dir = f"{options.dir}/spoiled"
    export_dir = f"{options.dir}/export"
    os.makedirs(data_dir, exist_ok=True)
    os.makedirs(gt_dir, exist_ok=True)
    os.makedirs(export_dir, exist_ok=True)

    seed = opt.get('seed', None)
    if not seed:
        seed = random.randint(0, 2**32 - 1)
        opt['seed'] = seed
        opt['n_workers'] = options.workers
    logging.info(f"Starting generation with seed {seed} ")
    n_workers = opt.get('n_workers', None)
    n_workers = n_workers or options.workers  # ensure determinism

    filler = [None for _ in range(n_workers - (options.size % n_workers))]
    vals = np.concatenate([np.arange(options.size),
                           filler]).reshape(-1, n_workers).transpose()
    vals = [[
        f'{options.out_name}_{val:04}' for val in values if val is not None
    ] for values in vals]
    processes = []

    random.seed(seed)
    pbar = ProgressBar(options.size)

    def update_pbar(item):
        pbar.update(f"Generated {item}")

    for i in range(n_workers):
        seed = random.randint(0, 2**32 - 1)
        generator = Generator(opt)
        spoiler = Spoiler()
        exporters = from_options(opt, export_dir)
        visitors = [spoiler] + exporters
        p = Process(target=gen_image_pool,
                    args=(generator, visitors, vals[i], options, seed,
                          update_pbar))
        p.start()
        processes.append(p)
    for p in processes:
        p.join()

    with open(f"{options.dir}/config.yml", 'w') as f:
        yaml.dump(opt, f)
Ejemplo n.º 32
0
 def test_numbers(self):
     bar = ProgressBar()
     self.assertRaises(
         ProgressBarHigherNumberError,
         bar.load, 
         start=10,
         end=0,
     )
Ejemplo n.º 33
0
def build_callstack_detail_html(stack_dict):
    html = ""
    prog = ProgressBar(0,
                       len(stack_dict),
                       77,
                       mode='fixed',
                       char='#',
                       autoprint=True)
    stacks_sorted = sorted(stack_dict.values(),
                           key=operator.attrgetter('display_id'))
    for i, stack in enumerate(stacks_sorted):
        if MemTrack.call_stack_write_limit > 0 and i > MemTrack.call_stack_write_limit:
            break
        html += stack.html_table()
        html += "\n\n"
        prog.increment_amount()
    return html
Ejemplo n.º 34
0
def _parallel_distance_matrix(fnames, compression_name, pairing_name,
                              **kwargs):
    """Parallel calculation of distance matrix."""
    num_cpus = mp.cpu_count()
    manager = mp.Manager()
    pool = mp.Pool(num_cpus)
    queue = manager.Queue(2 * num_cpus)

    sys.stderr.write('Compressing individual files...\n')
    progress_bar = ProgressBar(len(fnames))

    compression_args = [{
        'cname': compression_name,
        'fname': fname,
        'queue': queue,
        'kwargs': kwargs
    } for fname in fnames]

    async_result = pool.map_async(_parallel_compression_worker,
                                  compression_args)

    for _ in xrange(len(fnames)):
        queue.get(timeout=5)
        progress_bar.increment()

    compressed_sizes = async_result.get()

    zip_size = dict(zip(fnames, compressed_sizes))

    sys.stderr.write('\nCompressing file pairs...\n')
    file_pairs = [(fname1, fname2) for fname1 in fnames for fname2 in fnames
                  if fname1 < fname2]
    progress_bar = ProgressBar(len(file_pairs))

    ncd_args = [{
        'cname': compression_name,
        'pname': pairing_name,
        'f1': fname1,
        'f2': fname2,
        'queue': queue,
        'zip': (zip_size[fname1], zip_size[fname2]),
        'kwargs': kwargs
    } for fname1, fname2 in file_pairs]

    async_result = pool.map_async(_parallel_ncd_worker, ncd_args)
    pool.close()

    for _ in xrange(len(file_pairs)):
        queue.get(timeout=5)
        progress_bar.increment()

    ncd_results = async_result.get()
    sys.stderr.write('\n')
    return ncd_results
Ejemplo n.º 35
0
def __main__(stat):
    engine_NM = "R4y-Search: "
    print(engine_NM+stat)
    print("Commands : \n[a] - start search\n[c] - quit")
    e = input()
    if e == "a":
        search = input("\nEnter here ")
        search_word = search
        print("Finding results for :"+search)
        progress = ProgressBar(20, fmt=ProgressBar.FULL)
        for x in range(progress.total):
            progress.current += 1
            progress()
            sleep(0.01)
        progress.done()
        Search(search)
    elif e == "c":
        print("Bye!")
        sys.exit(0)
Ejemplo n.º 36
0
def summarize(cmds, key, type, stack_dict):
    #create a summary list based off of accumulating stats based on the given key
    prog = ProgressBar(0,
                       len(cmds),
                       77,
                       mode='fixed',
                       char='#',
                       autoprint=True)
    all_total = 0
    summary_dict = {}
    null_summary_key = CallStatistics(0)
    for cmd in cmds:
        if type == Event.eAny or cmd.event_type == type:
            size = cmd.allocSize
            all_total += size

            stack = stack_dict.get(
                cmd.stack_id)  # lookup the stack for this event
            # check stack for key first
            if stack.__dict__.has_key(key):
                summary_key = stack.__dict__[key]
            elif SymbolDB.symbol_cache.has_key(stack.leaf()):
                addr_info = SymbolDB.symbol_cache[stack.leaf()]
                summary_key = addr_info.__dict__[key]
            else:
                summary_key = null_summary_key
            summary = summary_dict.setdefault(summary_key, Summary())
            summary.sample(cmd, stack)
            summary.key = summary_key
        prog.increment_amount()
    print "\n\n"

    summaries_sorted = sorted(summary_dict.items(),
                              key=lambda (k, v): v.accumulator.value,
                              reverse=True)

    # after all the summaries are collected go ahead and generate some of the
    # final stats in each summary
    summaries = [summary[1] for summary in summaries_sorted]

    for summary in summaries:
        summary.prepare(all_total)
    return summaries
def calculate_entry_histograms( plots, chain ) :
    ##assert canvas is not None, "Canvas must be specified in calculate_histograms"
    # setup our 2d histos
    vars = v.mc_variables()
    # KOhack class gets initiated, because it has to be checked "if KOhack is applied"
    KOhack=KOhack_class(plots[0].mcf)
    histos = []
    chi2histos = []
    for p in plots :
        hname = histo_name( p.short_names, entry_histo_prefix )
        cname = histo_name( p.short_names, chi2_histo_prefix )

        entryhisto = initialize_histo( p,hname,entry=True )
        chi2histo  = initialize_histo( p,cname,chi2 =True )

        histos.append(entryhisto)
        chi2histos.append(chi2histo)

        if check_entry_KO_hack(p,KOhack):
            KOhack.init_hack(p)

    nentries = chain.GetEntries()
    prog = ProgressBar(0, nentries+1, 77, mode='fixed', char='#')
    for entry in range(0,nentries+1) :
        prog.increment_amount()
        print prog,'\r',
        stdout.flush()
        chain.GetEntry(entry)
        for h, c, plot in zip( histos, chi2histos, plots ) :
            vals_list = get_values_list_from_chain_and_histo(chain,plot,vars,s,KOhack,h)
            for vals in vals_list:
                nbins = plot.bins
                ibin = h.FindBin(*vals)
                max_bin = h.FindBin(*plot.max_vals)
                if ibin != 0 and ibin < max_bin :
                    chi2 = get_modified_entry_chi2(vals,chain,KOhack)
                    if chi2 < c.GetBinContent(ibin) :
                        c.SetBinContent(ibin, chi2)
                        h.SetBinContent(ibin, entry)

    print
    return histos
Ejemplo n.º 38
0
	def create_files(self, file_list ):
		'''create files. The filenames are supplied as a list'''
		count = 0
		total = len(file_list)
		
		if self.verbose: print "\nTrying to create " + str(total) + " files"
		
		prog = ProgressBar(count, total, 77, mode='fixed', char='#')
	
		counter = 0
		last_time =time.time()

		before = time.time()
		for file_name in file_list:
			ret = self.create_empty_file( file_name )
		    	
			if self.verbose:
				count += 1
				prog.increment_amount()
				print prog, '\r',
				sys.stdout.flush()

			if ret == False:
				print "Failed to create file " + str( file_name )
				return False
			else:
				counter+=1

			if counter % 1000 == 0 and self.profiling:
				diff = time.time() - last_time
				last_time = time.time()	
				f = open("/tmp/basicStat.csv","a")
				f.write(str(counter) + "," + str(diff) + "\n")
				f.flush()
				f.close()
		print

		after = time.time()
		files_per_second = str( len(file_list) / (after-before) )
		self.profiling_printer("Creating " + str(len(file_list)) + " files (" + files_per_second +  " per second)", after - before)

		return True
Ejemplo n.º 39
0
 def solve_5(self, period,N):
     result = np.identity(N,dtype = np.complex)
     I = np.identity(N,dtype = np.complex)
     He = self.matrix_electric
     Hs = self.matrix_static
     prog = ProgressBar(0, self.fine_step/6, 50, mode='fixed', char='#')        
     #python version
     for i in xrange(0,self.fine_step,6):
         prog.increment_amount()
         print prog, '\r',
         sys.stdout.flush()
         k1 = (He*self.E_arr[period][i] + Hs)
         tmp = He*self.E_arr[period][i+1]+Hs
         k2 = np.dot(tmp,I+k1*0.25*self.dt)
         tmp = He*self.E_arr[period][i+2]+Hs
         k3 = np.dot(tmp,I+(k1*3.0/32.0+k2*9.0/32.0)*self.dt)
         tmp = He*self.E_arr[period][i+3]+Hs
         k4 = np.dot(tmp,I+(k1*1932.0-k2*7200.0+k3*7296.0)*self.dt/2197.0)
         tmp = He*self.E_arr[period][i+4]+Hs
         k5 = np.dot(tmp,I+(k1*439.0/216.0-k2*8.0+k3*3680.0/513.0-k4*845.0/4104.0)*self.dt)
         tmp = He*self.E_arr[period][i+5]+Hs
         k6 = np.dot(tmp,I+(-1.0*k1*8.0/27.0-k2*2.0-k3*3544.0/2565.0+k4*1859.0/4104.0-k5*11.0/40.0)*self.dt)
         result = np.dot(I+(k1*16.0/135.0+k3*6656.0/12825.0+k4*28561.0/56430.0-k5*9.0/50.0+k6*2.0/55.0)*self.dt,result)
     return result
Ejemplo n.º 40
0
def _parallel_distance_matrix(fnames, compression_name, pairing_name, **kwargs):
  """Parallel calculation of distance matrix."""
  num_cpus = mp.cpu_count()
  manager = mp.Manager()
  pool = mp.Pool(num_cpus)
  queue = manager.Queue(2*num_cpus)

  sys.stderr.write('Compressing individual files...\n')
  progress_bar = ProgressBar(len(fnames))

  compression_args = [{
    'cname': compression_name, 'fname': fname,
    'queue': queue, 'kwargs': kwargs}
    for fname in fnames]
 
  async_result = pool.map_async(_parallel_compression_worker, compression_args)

  for _ in xrange(len(fnames)):
    queue.get(timeout=5)
    progress_bar.increment()

  compressed_sizes = async_result.get()

  zip_size = dict(zip(fnames, compressed_sizes))

  sys.stderr.write('\nCompressing file pairs...\n')
  file_pairs = [(fname1, fname2)
      for fname1 in fnames
      for fname2 in fnames
      if fname1 < fname2]
  progress_bar = ProgressBar(len(file_pairs))

  ncd_args = [{
    'cname': compression_name,
    'pname': pairing_name,
    'f1': fname1, 'f2': fname2,
    'queue': queue, 'zip': (zip_size[fname1], zip_size[fname2]),
    'kwargs': kwargs} for fname1, fname2 in file_pairs]

  async_result = pool.map_async(_parallel_ncd_worker, ncd_args)
  pool.close()

  for _ in xrange(len(file_pairs)):
    queue.get(timeout=5)
    progress_bar.increment()

  ncd_results = async_result.get()
  sys.stderr.write('\n')
  return ncd_results
Ejemplo n.º 41
0
def main(argv=None):
    if argv is None:
        argv = sys.argv
    data = {0:[]}
    songs = data[max(list(data.keys()))]
    print ("Would you like to:\n\t[1]Load a previous saved generation\n\t[2]Generate a new random generation\n\t[3]Print Previous Generation Data")
    choice = prompt("Please select an option", ["1", "2", "3"], True)
    if choice == "1":
        print ("Loading last generation")
        data = pickle.load(open(filename+".pkl", "rb"))
        songs = data[max(list(data.keys()))]
        for song in songs:
            print ("%d.%d: %s" % (song.generation, song.songnum, song.name))
            if song.score == -1:
                while True:
                    try:
                        score = float(prompt("Song score"))
                        song.score = score
                        pickle.dump(data, open(filename+".pkl", "wb"))
                        break
                    except ValueError:
                        print("Please enter a valid number")
            else:
                print ("Score: %s" % (song.score))
            print ()
            
    elif choice == "2":
        print ("Generating Songs")
        for i in ProgressBar.progressbar(range(generationSize)):
            songs.append(Song())
            s = songs[-1]
            s.songnum = i
            s.createFile()
        pickle.dump(data, open(filename+".pkl", "wb"))
        print("Generation 0 generated and saved to file")
    else:
        data = pickle.load(open(filename+".pkl", "rb"))
        pprint(data[max(list(data.keys()))])
        AND key.number_of_accidentals=2
'''

parser = ArgumentParser(description="Convert notes in tunes to CSV")
parser.add_argument('db', metavar='DATABASE', type=str)
parser.add_argument('output', metavar='CSVFILE', type=str)

args = parser.parse_args()

conn = sqlite3.connect(args.db)
c = conn.cursor()

c.execute(TUNE_COUNT_SQL)
tune_count = c.fetchone()[0]

progress = ProgressBar(tune_count)
progress.width = 80

with file(args.output, 'wb') as output:
    writer = csv.writer(output)

    for i, (tuneId, abc) in enumerate(c.execute(TUNE_SQL)):
        progress.update_time(i)
        print progress, chr(27) + '[A'

        try:
            score = converter.parseData(abc, format='abc')
            row = [tuneId]

            for note in score.flat.notesAndRests:
                duration = note.quarterLength
import csv

parser = ArgumentParser(description="Calculate variability positionally across tunes")
parser.add_argument("-a", "--accidentals", dest="accidentals", type=int, default=2)
parser.add_argument("-t", "--type", dest="type", type=str, default="jig")
parser.add_argument("-l", "--limit", dest="limit", type=int, default=100)
args = parser.parse_args()

tunes = Tune.objects.filter(rhythm=args.type, key__accidentals="s", key__number_of_accidentals=args.accidentals)

if args.limit > 0:
    tunes = tunes[: args.limit]

tune_count = tunes.count()

progress = ProgressBar(tune_count)
progress.width = 60
errors = 0
print progress, "{: >5,d} / {: <5,d}".format(0, tune_count), chr(27) + "[A"

byOffset = {}
byBeat = {}

for i, tune in enumerate(tunes.values_list("raw_abc", flat=True)):
    progress.update_time(i)
    print progress, "{: >5,d} / {: <5,d}".format(i, tune_count), "Errors: {}".format(errors), chr(27) + "[A"

    try:
        score = parseData(tune, format="abc")
        key = score.analyze("key")
Ejemplo n.º 44
0
    args = parser.parse_args()

    # Get the tunes from the database
    tunes = Tune.objects.filter(rhythm=args.type, key__number_of_accidentals=args.accidentals, key__accidentals='s')
    tune_count = tunes.count()

    # Get the features from music21
    feature_list = featuresByCategory(args.categories)
    feature_count = len(feature_list)

    # Set up a CSV writer
    fieldnames = ['id'] + list(chain(*[feature().getAttributeLabels() for feature in feature_list]))
    writer = csv.DictWriter(open(args.output, 'wb'), fieldnames=fieldnames)

    # A little progress checking
    progressbar = ProgressBar(tune_count * len(feature_list))

    # Store a count by feature name of all the errors, so that if one feature is particularly
    # troublesome, we can skip it when computing the singular value decomposition
    errors = 0

    print('Extracting {} features from {} tunes'.format(len(feature_list), tunes.count()))

    for  i, tune in enumerate(tunes.values('id', 'title', 'raw_abc')):
        try:
            score = converter.parseData(tune['raw_abc'], format='abc')
            
            row = {'id': tune['id']}

            for j, feature in enumerate(feature_list):
                try:
Ejemplo n.º 45
0
    x,y = pos // img.shape[1], pos % img.shape[1]
    return img[x][y][rgb] & 1


with Image.open(sys.argv[1]) as img:
    with open(sys.argv[2], "w+") as out:
        arrimg = np.array(img)
        pos = 0
        cur_char = ''
        size_str = ""
        while cur_char != "|":
            ord_chr = 0
            for i in range(8):
                bit = get_bit(pos, arrimg)
                pos += 1
                ord_chr = ord_chr | bit << i
            cur_char = chr(ord_chr)
            size_str += cur_char
        size = int(size_str[:-1])
        pb = ProgressBar(size)
        pb.begin()
        for i in range(size):
            ord_chr = 0
            for i in range(8):
                bit = get_bit(pos, arrimg)
                pos += 1
                ord_chr = ord_chr | bit << i
            out.write(chr(ord_chr))
            pb.add_progress()

def create_patches_at_center(img_basenames, annotation_dir, img_dir, size=50, step=40, grayscale=True, progressbar=True):
    
    if progressbar:
        pb = ProgressBar(len(img_basenames))
        
    if not annotation_dir[-1] == os.path.sep:
        annotation_dir = annotation_dir + os.path.sep
        
    if not img_dir[-1] == os.path.sep:
        img_dir = img_dir + os.path.sep        
    
    color_type = 0
    
    if grayscale:
        channels=1
        
    else:
        channels =3

    pos = []
    neg = []
    s = 1
    for img_filename in img_basenames:
        if progressbar:
            pb.step(s)
        s +=1
        annotation_filename = annotation_dir + img_filename[:-3] + 'xml'
        boundingboxes = get_bounding_boxes_for_single_image(annotation_filename)
        #print '%d objects in %s' % (len(boundingboxes), img_filename)
        
        colortype = cv2.IMREAD_COLOR
        if grayscale:
            colortype = cv2.IMREAD_GRAYSCALE
            img = cv2.imread(img_dir + img_filename, colortype)
            height,width=img.shape
            img = img.reshape((height, width,channels))
            img = np.rollaxis(img,2)
            image_pos = get_image_positives(img,boundingboxes, size)
            pos.append(image_pos)
            image_neg = get_image_negatives(img, boundingboxes,size,step)
            neg.append(image_neg)

        else:
            img = cv2.imread(img_dir + img_filename, colortype)
            height,width,channels=img.shape
            img = img.reshape((height, width,channels))
            img = np.rollaxis(img,2)
            image_pos = get_image_positives(img,boundingboxes, size)
            pos.append(image_pos)
            #print len(pos)
            image_neg = get_image_negatives(img, boundingboxes,size,step)
            neg.append(image_neg)
            #print len(neg)
            
    pos = [item for sublist in pos for item in sublist]
    print len(pos)
    
    
    neg = [item for sublist in neg for item in sublist]
    print len(neg)
    patches = pos+neg
    print len(patches)
    
    index = np.arange(len(patches))
    np.random.seed(0)
    np.random.shuffle(index)

    np_patches = np.empty((len(patches),channels,size,size),dtype=np.uint8)
    np_labels = np.empty(len(patches),dtype=int)
    
    
    max_pos=len(pos)
    for i,j in zip(index,xrange(len(index))):        
        if i < max_pos:
            np_patches[j,] = pos[i]
            np_labels[j] = 1
        else:
            np_patches[j,] = neg[i-max_pos]
            np_labels[j] =0
        
    #print '\nSplit each of ' + str(N) + ' images in ' + str(len(patches)) + \
             #' ' + str(size) + 'x' +  str(size) + ' patches'
    #print 'Total number of patches: ' + str(N*(len(patches)))
            
    #return  np.array(np_labels).flatten(), np_patches     
    return np_labels,np_patches
Ejemplo n.º 47
0
glacier = GlacierWrapper(account_id, PART_SIZE)
glacier.set_vault_name(vault_name)

# find all the files with the allowed extensions
filenames = [f for f in os.listdir(directory) if f.lower().endswith(tuple(exts))]
total = len(filenames)
ind = 1
for filename in filenames:
    file_path = directory + '/' + filename
    file_size = os.path.getsize(file_path)
    expected_part_count = ceil(file_size / PART_SIZE)
    print('(' + str(ind) + '/' + str(total) + ') ' + filename + ': ' + str(round(file_size/1024/1024)) + 'MB')

    upload_id = glacier.initiate_multipart_upload(file_path)
    with open(file_path, mode = 'rb') as archive:
        progress_bar = ProgressBar(expected_part_count)
        part_ind = 0
        content = archive.read(PART_SIZE)
        while True:
            progress_bar.advance()
            if glacier.upload_multipart_part(upload_id, part_ind, content):
                archive_id = glacier.complete_multipart_upload(upload_id, archive)
                print(filename + ',' + archive_id, file = log)
                progress_bar.finish()
                break;
            else:
                content = archive.read(PART_SIZE)
                part_ind += 1
    archive.close()
    ind += 1
Ejemplo n.º 48
0
API_VERSION = "0.6"

parser = argparse.ArgumentParser(description='Convert a ESRI Shapefile (POINT only) to .OSM')
parser.add_argument('INFILE', help='The path to the input ESRI shapefile, will append .shp if omitted')
parser.add_argument('OUTFILE', type=argparse.FileType('w'), default='out.osm', help='The path to the output OSM XML file')
parser.add_argument('--quiet', action='store_true', default=False)
args = parser.parse_args()

osm_id = 0
dt = datetime.now()

sf = shapefile.Reader(args.INFILE)
f = sf.fields
l = len(sf.shapes())
if not args.quiet:
    p = ProgressBar(l) 

w = XMLWriter(args.OUTFILE)
w.start("osm", {"generator": "shape2osm " + str(__version__), "version": API_VERSION, "upload": "false"})
for shape in sf.shapeRecords():
    osm_id -= 1
    (x,y) = shape.shape.points[0]
    w.start("node", {"id": str(osm_id), "timestamp": dt.isoformat(), "version": "1", "visible": "true", "lon": str(x), "lat": str(y)})
    for i in range(1,len(f)):
        w.element("tag", "", {"k": str(f[i][0]), "v": str(shape.record[i-1])})
    w.end()
    if not args.quiet:
        p.update_time(l - (l + osm_id))
        print "{0}\r".format(p),
w.end()
if not args.quiet:
from progress_bar import ProgressBar
import sys
import time
def updateBar(step):
    p.update_time(step)
    sys.stdout.write("%s\r" % p)
    sys.stdout.flush()

if __name__ == "__main__":
    wait_time = 100 # seconds
    p = ProgressBar(wait_time)
    p.unit = "%" # change the unit at the end of the progress bar
    p.empty_char = "."
    for step in range(wait_time+1):
        updateBar(step)
        time.sleep(1)
Ejemplo n.º 50
0
def recalc_to_file( collection, output_file = "" ) :
    model  = models.get_model_from_file(collection)
    lhoods = models.get_lhood_from_file(collection)
    outfile = collection.FileName if output_file == "" else output_file
    print "Output file is %s" % outfile

    # initialise the MC-variables
    MCVdict=v.mc_variables()

    chain = MCRecalcChain( collection )
    nentries = chain.GetEntries()

    begin = getattr( collection, "StartEntry", 0)
    end   = getattr( collection, "EndEntry", nentries+1)

    total_delta = 0

    # create trees in scope of outfile
    out = r.TFile(outfile,"recreate")
    chi2tree = chain.chains["predictions"].CloneTree(0)

    # might need to do address of on contirbvars
    nTotVars = chain.nTotVars["predictions"]
    contribvars = array('d',[0.0]*nTotVars)
    contribtree = r.TTree( 'contribtree', 'chi2 contributions')
    varsOutName = "vars[%d]/D" % ( nTotVars )
    contribtree.SetMaxTreeSize(10*chi2tree.GetMaxTreeSize())
    contribtree.Branch("vars",contribvars,varsOutName)

    # same with lhood
    nLHoods = len(lhoods.keys())
    lhoodvars = array('d',[0.0]*nLHoods)
    lhoodtree = r.TTree( 'lhoodtree', 'lhood contributions')
    varsOutName = "vars[%d]/D" % ( nLHoods )
    lhoodtree.SetMaxTreeSize(10*chi2tree.GetMaxTreeSize())
    lhoodtree.Branch("vars",lhoodvars,varsOutName)

    # want to save best fit point entry number: create new tree and branch
    bfname = getattr( collection, "BestFitEntryName", "BestFitEntry"  )
    bft=r.TTree(bfname, "Entry")
    bfn=array('i',[0])
    bft.Branch('EntryNo',bfn,'EntryNo/I')

    # and the minChi minEntry
    minChi=1e9
    minEntry=-1
    count=-1 # becuase the first entry has number 0

    prog = ProgressBar(begin, end, 77, mode='fixed', char='#')
    for entry in range(begin,end) :

        prog.increment_amount()
        print prog,'\r',
        stdout.flush()

        chain.GetEntry(entry)
        if good_point( chain.treeVars["predictions"], collection ) :
            delta = 0.
            chi2 = 0

            for constraint in model :
                MCV=MCVdict[constraint.short_name]
                v_index = MCV.get_index(collection)

                chi2_t = constraint.get_chi2( chain.treeVars["predictions"][v_index] )
                contribvars[v_index] = chi2_t
                chi2 += chi2_t
            for i,lh in enumerate(lhoods.values()) :
                chi2_t = lh.get_chi2( chain.treeVars["predictions"] )
                lhoodvars[i] = chi2_t
                chi2 += chi2_t

            chi2 += spectrum_constraints( chain.treeVars["predictions"], collection )

            if chi2 > getattr(collection, "MinChi2", 0 ) and \
               chi2 < getattr(collection, "MaxChi2", 1e9 ) :
                # This was inserted to check on if there was a significant
                # calculation error ( average deltachi2 per entry: 1e-15 )
                if __DEBUG :
                    delta_chi2_val = chi2_t - chain.treeVars["contributions"][key]
                    delta = delta + delta_chi2_val
                    total_delta = total_delta + abs(delta)
                chain.treeVars["predictions"][0] = chi2
                contribvars[0] = chi2
                chi2tree.Fill()
                contribtree.Fill()
                lhoodtree.Fill()
                count+=1
                #dealing with minChi
                if chi2 < minChi:
                    minChi=chi2
                    minEntry=count

    #Saving best fit Entry number
    bft.GetEntry(0)
    bfn[0]=minEntry
    bft.Fill()

    bft.AutoSave()
    chi2tree.AutoSave()
    contribtree.AutoSave()
    lhoodtree.AutoSave()

    out.Close()

    if __DEBUG :
        print "\n--------------------------\n"
        print "   TOTAL    (    MEAN    )"
        print "%10e(%10e)" % ( total_delta, (total_delta/(end-begin)) )
        print "\n--------------------------\n"
Ejemplo n.º 51
0
        writeTagContents('comment', "", soup, 0, revfile)
        writeTagContents('text', 'id', soup, 1, revfile)

#initialize variables
revcount = 0
linecount = 0
revblock = ""
isrevisionblock = False
ispageblock = False
isknownline = False
pagetitle = ""
pageid = ""
pagecount = 0
pageRedirect = False

prog = ProgressBar(linecount, numlinesinfile, 100, mode='fixed', char='#')

for txtline in open(xmlfile):
    try:
        isknownline = False
        prog.increment_amount()
        linecount += 1
        if isrevisionblock:
            revblock = revblock + txtline
            #start: this is what we do when we've finished a revision
            if txtline.find("</revision>") > 0:
                isrevisionblock = False
                isknownline = True
                try:
                    processRevision(cleanString(revblock))
                except:
def create_patches_at_center(img_basenames, annotation_dir, image_dir, size=50, step=40, grayscale=True, progressbar=True,downsample=1, objectclass=None, negative_discard_rate=.9):
    '''Extract a set of image patches with labels, from the supplied list of
    annotated images. Positive-labelled patches are extracted centered on the
    annotated bounding box; negative-lablled patches are extracted at random
    from any part of the image which does not overlap an annotated bounding box.'''
    if progressbar:
        pb = ProgressBar(len(img_basenames))

    if not annotation_dir[-1] == os.path.sep:
        annotation_dir = annotation_dir + os.path.sep

    if not image_dir[-1] == os.path.sep:
        image_dir = image_dir + os.path.sep

    color_type = 0

    if grayscale:
        channels=1

    else:
        channels =3

    pos = []
    neg = []
    s = 1
    for img_filename in img_basenames:
        if progressbar:
            pb.step(s)
        s +=1
        annotation_filename = annotation_dir + img_filename[:-3] + 'xml'
        boundingboxes = get_bounding_boxes_for_single_image(annotation_filename, objectclass)
        colortype = cv2.IMREAD_COLOR

        img = cv2.imread(image_dir + img_filename, colortype)
        height,width,channels=img.shape
        img = img.reshape((height, width,channels))
        img = np.rollaxis(img,2)
        image_pos = get_image_positives(img,boundingboxes,size,downsample=downsample)
        pos.append(image_pos)

        image_neg = get_image_negatives(img,boundingboxes,size,step,downsample=downsample,discard_rate=negative_discard_rate)
        neg.append(image_neg)

    pos = [item for sublist in pos for item in sublist]
    neg = [item for sublist in neg for item in sublist]
    patches = pos+neg

    index = np.arange(len(patches))
    np.random.seed(0)
    np.random.shuffle(index)

    np_patches = np.empty((len(patches),channels,size/downsample,size/downsample),dtype=np.uint8)
    np_labels = np.empty(len(patches),dtype=int)

    max_pos=len(pos)
    for i,j in zip(index,xrange(len(index))):
        if i < max_pos:
            np_patches[j,] = pos[i]
            np_labels[j] = 1
        else:
            np_patches[j,] = neg[i-max_pos]
            np_labels[j] = 0

    np_labels = np_labels.astype(np.uint8)
    return np_labels,np_patches
def fill_and_save_data_hists( mcf, modes, hlist, contribs,predicts ) :
    axes = [ "X", "Y", "Z" ]
    chain = MCAnalysisChain( mcf )
    nentries = chain.GetEntries()

    for h in hlist :
        histo_cont = {}
        contrib_cont = {}
        predict_cont = {}

        h_dim = get_histogram_dimension(h)
        dim_range = range(h_dim)

        axis_nbins = []
        axis_mins = []
        axis_maxs = []
        axis_bins = []
        axis_titles = []

        th_arg_list  = []

        user_notify_format = ""
        user_notify = []

        title_format = "%s"
        title_items = [ h.GetTitle() ]
        for axis in dim_range :
            axis_nbins.append( eval( "h.GetNbins%s()" % axes[axis] ) )
            axis_mins.append( eval( "h.Get%saxis().GetXmin()" % axes[axis] ) )
            axis_maxs.append( eval( "h.Get%saxis().GetXmax()" % axes[axis] ) )
            axis_bins.append( eval( "h.Get%saxis().GetXbins().GetArray()" % axes[axis] ) )
            axis_titles.append( eval( "h.Get%saxis().GetTitle()" % axes[axis] ) )

            th_arg_list.append( axis_nbins[-1] )
            th_arg_list.append( axis_mins[-1] )
            th_arg_list.append( axis_maxs[-1] )

            user_notify_format += ": [ %.2e, %.2e ] :"
            user_notify.append( axis_mins[-1] )
            user_notify.append( axis_maxs[-1] )

            title_format += ";%s"
            title_items.append( axis_titles[-1] )

        print user_notify_format % tuple(user_notify)

        title = title_format % tuple(title_items)

        firstbin, lastbin = get_histogram_bin_range(h)
        for mode in modes :
            # here need to add in check on contrib and make one for each contribution
            histo_cont[mode] = eval( 'r.TH%dD( h.GetName() + "_" + mode, title, *th_arg_list )' % h_dim )
            base_val = 1e9
            if mode == "pval" :
                base_val = 0.0
            for bin in range( firstbin, lastbin + 1 ) :
                histo_cont[mode].SetBinContent( bin, base_val )
        for c in contribs : # contribs is a list of Contribution objects
            contrib_cont[c.short_name] = eval( 'r.TH%dD( h.GetName() + "_dX_" + c.short_name, title, *th_arg_list )' % h_dim )
            for bin in range( firstbin, lastbin + 1 ) :
                contrib_cont[c.short_name].SetBinContent( bin, 0.0 )
        for p in predicts : # predicts is a list of Contribution objects
            predict_cont[p.short_name] = eval( 'r.TH%dD( h.GetName() + "_pred_" + p.short_name, title, *th_arg_list )' % h_dim )
            for bin in range( firstbin, lastbin + 1 ) :
                predict_cont[p.short_name].SetBinContent( bin, 0.0 )
            print "yes", p

        prog = ProgressBar(0, (lastbin-firstbin)+1, 77, mode='fixed', char='#')
        for i in range( firstbin, lastbin+1 ) :
            prog.increment_amount()
            print prog,'\r',
            stdout.flush()
            entry = int( h.GetBinContent(i) )
            if entry > 0 :
                chain.GetEntry(entry)
                fill_bins( histo_cont, contrib_cont,predict_cont, contribs,predicts  , i, chain, mcf )
        perform_zero_offset( histo_cont["dchi"] )
        print
        save_hdict_to_root_file( histo_cont,  mcf.FileName, mcf.DataDirectory)
        save_hdict_to_root_file( contrib_cont, mcf.FileName, mcf.DataDirectory)
        save_hdict_to_root_file( predict_cont, mcf.FileName, mcf.DataDirectory)
Ejemplo n.º 54
0
    def greedy_for_bound(self,best_in_next = 100, direction = 'west', disp_progress=True,width=40,wgpenalty=0,start=-180,init=True):
        if init:
            print 'initialization...'
            self.to_assign = np.random.permutation(range(self.N)).tolist()
            '''
            if not(np.all([self.centroids[i][1]<=self.centroids[i+1][1] for i in range(len(self.centroids)-1)])):
                print 'warning: centroids were not sorted by longitude. I reorder them'
                self.centroids = np.array(sorted(self.centroids, key=lambda x: x[1]))
                
            self.init_clusters_with_centroids()
            '''
            self.centroids = np.zeros((0,2))
            self.K = 0
        
        shiftedlong = np.where(self.X[self.to_assign][:,1]<start,self.X[self.to_assign][:,1]+360,self.X[self.to_assign][:,1])
        if direction=='west':
            print 'sorting gifts per longitude...'
            self.to_assign = np.array(self.to_assign)[np.argsort(shiftedlong)]
        elif direction=='east':
            print 'sorting gifts per longitude...'
            self.to_assign = np.array(self.to_assign)[np.argsort(shiftedlong)[::-1]]
        elif direction=='south':
            print 'sorting gifts per latitude...'
            self.to_assign = np.array(self.to_assign)[np.argsort(self.X[self.to_assign][:,0])[::-1]]
        elif direction=='north':
            print 'sorting gifts per latitude...'
            self.to_assign = np.array(self.to_assign)[np.argsort(self.X[self.to_assign][:,0])]
        else:
            raise ValueError('direction not implemented')
        self.to_assign = self.to_assign.tolist()
        print 'done.'
        
        if disp_progress:
            prog = ProgressBar(0, len(self.to_assign), 77, mode='fixed')
            oldprog = str(prog)

        while True:
            if disp_progress:
                #<--display progress
                prog.increment_amount()
                if oldprog != str(prog):
                        print prog, "\r",
                        sys.stdout.flush()
                        oldprog=str(prog)
                #-->
        
            
            if self.K == 0:
                self.create_new_cluster(self.to_assign[0])
                del self.to_assign[0]
                continue
            
            candidates = self.to_assign[:best_in_next]
                
            bounds_inc = []
            for i in candidates:
                km = np.searchsorted(self.centroids[:,1], self.X[i][1]-width)
                kp = np.searchsorted(self.centroids[:,1], self.X[i][1]+width)
                bounds_inc.extend([(self.bound_increase_for_adding_gift_in_cluster(i,k),i,k)
                                   for k in range(km,kp) if self.weight_per_cluster[k]+self.weights[i]<weight_limit-wgpenalty])
                
            if not bounds_inc:
                self.create_new_cluster(self.to_assign[0])
                del self.to_assign[0]
                continue
                
            sorted_bounds_inc = sorted(bounds_inc)
            assigned = False
            for inc,i,c in sorted_bounds_inc:
                if inc> 2*self.distances_to_pole[i]*sleigh_weight:
                    #import pdb;pdb.set_trace()
                    self.create_new_cluster(self.to_assign[0])
                    assigned = True
                    del self.to_assign[0]
                    #print 'one more clust '+str(self.K)
                    #if self.K>1500:
                    #    import pdb;pdb.set_trace()#TMP
                    break
                if self.weight_per_cluster[c]+self.weights[i]<weight_limit-wgpenalty:
                    self.add_in_tour(i,c)
                    assigned = True
                    self.to_assign.remove(i)
                    break

            if not assigned:
                raise Exception('not able to assign a trip in this window of longitudes.')

            if not(self.to_assign):
                break
Ejemplo n.º 55
0
    def __init__(self):
        self.editors = NestedDict()
    def add(self,editor,version):
        if editor in self.editors:
            if version in self.editors[editor]:
                self.editors[editor][version] += 1
            else:
                self.editors[editor][version] = 1
        else:
            self.editors[editor] = {}

r = Results()
editors = open(infile,'r')
lc = 0
lines = int(os.popen('wc -l %s' % (infile)).read().split(' ')[0])
pb = ProgressBar(lines)

for line in editors:
    lc += 1
    if not '-q' in sys.argv and not lc % 10000:
        pb.update_time(lines - (lines - lc))
        print "{0}\r".format(pb),
#    if not lc % 10000 : continue
#    if lc == 10000: break
    if 'JOSM' in line:
        ro = re.search('\d{4}', line)
        if ro:
            version = ro.group(0)
        else:
            version = None
        r.add('JOSM',version)
Ejemplo n.º 56
0
class ProgressBarController(object):
    """управление полоской прогресса"""
    __slots__ = ('_width', '_term_width', '_progress_bar', '_oldsignal')

    def __init__(self, min, max, width=0, value=None):
        self._width = None
        self._term_width = None
        self._oldsignal = None
        self._progress_bar = ProgressBar(min, max, min)
        if value is not None:
            self._progress_bar.set_value(value)
        # ширина в ноль означает, что полоска должна растянуться на все окно
        # при изменении размера окна полоска подстаивается под новый размер
        self.set_width(width)

    def update_term_width(self):
        # узнать ширину окна
        self._term_width = get_term_width()
        # self._term_height = int(rd[0])

    def sigwinch_handler(self, sig, frame):
        if self._width == 0:
            self.set_width(0)
        else:
            pass
        if hasattr(self._oldsignal, '__call__'):
            self._oldsignal(sig, frame)

    def redraw(self):
        import sys
        s = str(self._progress_bar)
        l = len(s)
        sys.stdout.write(s + ESC + str(l) + "D")
        sys.stdout.flush()

    def set_value(self, value, animated=False):
        import sys
        if not animated:
            self._set_value(value)
            sys.stdout.write(str(self._progress_bar))
        else:
            from animation_controller import \
                AnimationController, AnimationTimingQuad
            start_value = self.value()
            delta = value - start_value

            def step_hook(progress):
                self._set_value(start_value + (delta * progress))
                self.redraw()

            def complete_hook():
                self._set_value(value)
                self.redraw()
                sys.stdout.write(ESC + str(len(str(self._progress_bar))) + "C")

            anim = AnimationController(step=step_hook,
                                       timing=AnimationTimingQuad,
                                       complete=complete_hook)
            anim.begin()

    def _set_value(self, value):
        self._progress_bar.set_value(value)

    def value(self):
        return self._progress_bar.value()

    def set_width(self, width):
        self.update_term_width()
        if width < 0:
            raise ValueError('set_width: width must be >= 0')
        elif width == 0 or width >= self._term_width - 1:
            self._width = 0
            self._progress_bar.set_width(self._term_width - 1)
            self._oldsignal = \
                signal.signal(signal.SIGWINCH, self.sigwinch_handler)
        else:
            self._width = width
            self._progress_bar.set_width(width)
            sig = self._oldsignal if self._oldsignal else signal.SIG_DFL
            signal.signal(signal.SIGWINCH, sig)
            self._oldsignal = None

    def width(self):
        if self._width == 0:
            return self._term_width - 1
        else:
            return self._width

    def progress_bar(self):
        return self._progress_bar

    def set_progress_bar(self, pb):
        self._progress_bar = pb
        pb.set_width(self._width)

    def __str__(self):
        return str(self._progress_bar)

    def __repr__(self):
        pb = self._progress_bar
        return 'ProgressBarController(min=%f, max=%f, width=%d, value=%f)' %\
            (pb.min(), pb.max(), pb.width(), pb.value())