Esempio n. 1
0
def main():
    # sleep so rabbit can get all set up,
    # and we don't get mad throwing errors all around the place
    time.sleep(15)
    initialize_log()
    histogram = Histogram()
    histogram.run()
def BF_enter_hist(recongraph, uA, uB):
    '''
    Given a reconciliation graph and two mapping nodes,
    find the histogram that embodies the different pairs 
    of partial MPRs between uA and uB.
    '''
    if uA == uB:
        hist_dict = {}
        recon_trees = list(BF_enumerate_partial_MPRs(recongraph, uA))
        for recon_tree_i in range(0, len(recon_trees)):
            for recon_tree_j in range(recon_tree_i+1):
                recon_tree_A = recon_trees[recon_tree_i]
                recon_tree_B = recon_trees[recon_tree_j]
                diff_count = recon_trees_diff(recon_tree_A, recon_tree_B)
                if diff_count not in hist_dict:
                    hist_dict[diff_count] = 0
                hist_dict[diff_count] += 1
        return Histogram(hist_dict)
    else: # uA != uB
        hist_dict = {}
        uA_recon_trees = list(BF_enumerate_partial_MPRs(recongraph, uA))
        uB_recon_trees = list(BF_enumerate_partial_MPRs(recongraph, uB))
        for recon_tree_A in uA_recon_trees:
            for recon_tree_B in uB_recon_trees:
                diff_count = recon_trees_diff(recon_tree_A, recon_tree_B)
                if diff_count not in hist_dict:
                    hist_dict[diff_count] = 0
                hist_dict[diff_count] += 1
        return Histogram(hist_dict)
def GenerateExactMRCFromTrace(exactTraceName):

    """
    -------------------------------------------------------
    This program creates an exact MRC curve using my histogram
    class by parsing the output of a Parda trace analysis.
    
    This allows us to compare the cache curves resulting
    from my implementation of SHARDS against the actual cache curve.
    -------------------------------------------------------
    """    
    
    FIRST_LINE = 2
    PATH_TO_TRACE_DIR = os.path.normpath(os.path.join(os.getcwd(), ".."))
    
    fp = open(os.path.join(PATH_TO_TRACE_DIR, "Traces", exactTraceName), "r", encoding = "utf-8")
    
    actualMRCHistogram = Histogram()
    
    for i in range(0,FIRST_LINE):
        thisRecord = fp.readline().strip()
    
    thisRecord = fp.readline().strip()
    while thisRecord != "" and thisRecord[0].isdigit():
        thisRecord = thisRecord.split()
        actualMRCHistogram.AddBucket(int(thisRecord[0]), int(thisRecord[1]))
        thisRecord = fp.readline().strip()
    
    #Need to add the infinite stack depth indicated at the end of the file
    thisRecord = fp.readline().split()
    actualMRCHistogram.AddBucket(-1, int(thisRecord[1]))
    
    return actualMRCHistogram
Esempio n. 4
0
def create_bus_factor_chart(df, hotspots):
    top_hotspots = get_top_hotspots(hotspots)
    hotspot_files = [f.file for f in top_hotspots]
    hotspot_commit_df = df[df["file"].isin(hotspot_files)]
    without_github_user = hotspot_commit_df[
        hotspot_commit_df["author"] != "GitHub"]
    recent_date = datetime.now() - timedelta(days=365)
    recent_data = without_github_user[
        without_github_user["datetime"] >= recent_date]

    hotspot_unique_author_counts = Histogram("file_abbr", "author",
                                             recent_data)
    hotspot_unique_author_counts.set_aggregation("unique_count")
    hotspot_unique_author_counts.set_chart_type("barh")
    hotspot_unique_author_counts.set_max_groupings(10)
    hotspot_unique_author_counts.save_plot(
        "output/git_histogram_bus_factor.png")

    bus_factor_text = ""
    for file in recent_data["file_abbr"].unique():
        unique_authors = recent_data[recent_data["file_abbr"] ==
                                     file]["author"].unique()
        if len(unique_authors) == 1:
            bus_factor_text = bus_factor_text + f"{file:<50} - {unique_authors[0]}\n"
    if bus_factor_text != "":
        print("\n🚌 Hotspots with a high bus factor:")
        print(bus_factor_text)
Esempio n. 5
0
def main():
    bg_histogram = Histogram()
    db = DataMaker('mydb-servers')
    # make_json_urls('all_urls.json')
    # urls = load_urls('all_urls.json')
    # BigCrawler.get_servers(urls, db)

    servers = db.list_urls_servers()
    complete_histogram(bg_histogram, servers)

    # Making histogram
    bg_histogram_dict = bg_histogram.get_dict()
    sorted_x = sorted(bg_histogram_dict.items(), key=operator.itemgetter(1))

    keys = []
    values = []

    lenght = len(sorted_x)
    for i in range(1, 7):
        keys.append(sorted_x[lenght - i][0])
        values.append(sorted_x[lenght - i][1])

    X = list(range(len(keys)))

    plt.bar(X, values, align="center")
    plt.xticks(X, keys)

    plt.title(".bg servers")
    plt.xlabel("Server")
    plt.ylabel("Count")

    plt.savefig("all_sites.png")
Esempio n. 6
0
    def __init__(self,
                 speciesList,
                 suffix="",
                 filename="",
                 axis=None,
                 hist=None,
                 bin_edges=None,
                 fixedSpecies=[]):
        """
        Initialise a Histogram class from speciesList.

        Assuming for now all files have the same suffix.
        TODO: Add prefix?
        """
        self.speciesList = speciesList
        self.fixedSpecies = fixedSpecies
        self.suffix = suffix
        # Init Histogram
        self.Histogram = Histogram(speciesList[0] + suffix, filename, hist,
                                   bin_edges)
        for species in speciesList[1:]:
            self.Histogram.addHistogramFromFile(species + suffix,
                                                filename=filename)

        # Make sure the PDFs are normalised
        for species in speciesList:
            norm = np.sum(self.getHist(species))
            assert (norm >= .99
                    and norm <= 1.01)  # Work around float precision
Esempio n. 7
0
def get_histogram(all_links):
    hist = Histogram()
    our_headers = {
        "User-Agent": "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36"
    }

    # connections
    cnt = 0
    conn = sqlite3.connect(
        "/home/kaloyan/Documents/Hack_Bulgaria/week7/websites.db")
    conn.row_factory = sqlite3.Row
    cursor = conn.cursor()
    update_query = '''
    INSERT INTO websites(url, server)
    VALUES(?, ?)
    '''

    # getting the data if any
    get_query = '''
    SELECT url FROM websites
    '''

    data = cursor.execute(get_query)
    conn.commit()
    rows = data.fetchall()
    conn.commit()
    sites = set()
    for row in rows:
        sites.add(tuple(row))

    name = ''
    adding = []
    num = 0
    equal = 0
    for link in all_links:
        equal = 0
        for i in sites:
            if link == i[0]:
                equal = 1
        if not equal:
            try:
                print(link)
                req = requests.head(link,
                                    headers=our_headers,
                                    timeout=4,
                                    allow_redirects=True)
                name = req.headers['Server']
                hist.add(change_name(name))
                adding.append((link, name,))
                cnt += 1
                if cnt == 5:
                    cursor.executemany(update_query, adding)
                    conn.commit()
                    adding = []
                    cnt = 0

            except Exception as ex:
                print(ex)
    return hist
Esempio n. 8
0
 def optionHistograma(option):
     img = cv2.imread(Options.getFilename("art-angels"), 0)
     if option == "gera":
         return img, Histogram.gerar(img)
     if option == "acumulado":
         return img, Histogram.acumulado(img)
     if option == "equalizado":
         return img, np.uint8(Histogram.equaliza(img))
def get_histogram(all_links):
    hist = Histogram()
    our_headers = {
        "User-Agent": "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36"
    }

    # connections
    cnt = 0
    conn = sqlite3.connect("/home/kaloyan/Documents/Hack_Bulgaria/week7/websites.db")
    conn.row_factory = sqlite3.Row
    cursor = conn.cursor()
    update_query = """
    INSERT INTO websites(url, server)
    VALUES(?, ?)
    """

    # getting the data if any
    get_query = """
    SELECT url FROM websites
    """

    data = cursor.execute(get_query)
    conn.commit()
    rows = data.fetchall()
    conn.commit()
    sites = set()
    for row in rows:
        sites.add(tuple(row))

    name = ""
    adding = []
    num = 0
    equal = 0
    for link in all_links:
        equal = 0
        for i in sites:
            if link == i[0]:
                equal = 1
        if not equal:
            try:
                print(link)
                req = requests.head(link, headers=our_headers, timeout=4, allow_redirects=True)
                name = req.headers["Server"]
                hist.add(change_name(name))
                adding.append((link, name))
                cnt += 1
                if cnt == 5:
                    cursor.executemany(update_query, adding)
                    conn.commit()
                    adding = []
                    cnt = 0

            except Exception as ex:
                print(ex)
    return hist
Esempio n. 10
0
    def __init__(self, parent, id=wx.ID_ANY, size=wx.DefaultSize):
        super(HistogramPanel, self).__init__(parent, id, size=size)
        self.category = None

        self.hbs = wx.BoxSizer(wx.HORIZONTAL)
        self.categoryLabel = wx.StaticText(self, label=_('Category:'))
        self.categoryChoice = wx.Choice(self)
        self.Bind(wx.EVT_CHOICE, self.doChooseCategory, self.categoryChoice)

        self.hbs.Add(self.categoryLabel,
                     flag=wx.LEFT | wx.ALIGN_CENTRE_VERTICAL,
                     border=4)
        self.hbs.Add(self.categoryChoice, flag=wx.ALL, border=4)

        self.hbs.Add(wx.StaticText(self, label=_('Lap') + u':'),
                     flag=wx.LEFT | wx.ALIGN_CENTER_VERTICAL,
                     border=24)
        self.lapOption = wx.Choice(self, choices=[_('Last')])
        self.lapOption.SetSelection(0)
        self.lapOption.Bind(wx.EVT_CHOICE, self.doChooseLap)
        self.hbs.Add(self.lapOption, flag=wx.ALL, border=4)
        self.lap = 0

        self.hbs.Add(wx.StaticText(self, label=_('Bin by') + u':'),
                     flag=wx.LEFT | wx.ALIGN_CENTER_VERTICAL,
                     border=24)
        self.binOption = wx.Choice(self,
                                   choices=[
                                       _('Auto'),
                                       _('1 second'),
                                       _('30 seconds'),
                                       _('1 minute'),
                                       _('5 minutes')
                                   ])
        self.binOption.SetSelection(0)
        self.binOption.Bind(wx.EVT_CHOICE, self.doChooseBinOption)
        self.hbs.Add(self.binOption, flag=wx.ALL, border=4)

        self.hbs.Add(wx.StaticText(self, label=_('Bin width') + u':'),
                     flag=wx.LEFT | wx.ALIGN_CENTER_VERTICAL,
                     border=24)
        self.binWidthLabel = wx.StaticText(self)
        self.hbs.Add(self.binWidthLabel,
                     1,
                     flag=wx.LEFT | wx.ALIGN_CENTER_VERTICAL,
                     border=4)

        self.histogram = Histogram(self)
        bs = wx.BoxSizer(wx.VERTICAL)

        bs.Add(self.hbs, 0, wx.EXPAND)
        bs.Add(self.histogram, 1, wx.EXPAND)

        self.SetDoubleBuffered(True)
        self.SetSizer(bs)
    def get_single_histogram(self):
        """Create a single histogram of the local binary patterns in the
        image."""
        h = Histogram(self.bins, 0, self.bins)

        for y, x, value in self.image:
            h.add(self.pattern_callback(y, x, value))

        h.normalize()

        return h
Esempio n. 12
0
def create_single_histogram(field,
                            value_field,
                            aggregation,
                            df,
                            filename_suffix=""):
    histogram = Histogram(field, value_field, df)
    histogram.set_chart_type("barh")
    histogram.set_aggregation(aggregation)
    histogram.set_max_groupings(10)
    histogram.save_plot(
        f"output/git_histogram_{field}_{value_field}_{aggregation}{filename_suffix}.png"
    )
Esempio n. 13
0
    def getVolumeByHistIntegration(self, threshold=0):
        voxelCount = 0
        histogram = Histogram(1024, self.Reader)

        for i in range(0, histogram.getBinsCount()):
            binCenter = histogram.getBinCenter(i)
            binValue = histogram.getBinValueAtIndex(i)

            if binCenter > threshold:
                voxelCount = voxelCount + binValue

        return voxelCount * self.VoxelVolume
Esempio n. 14
0
def read_data(lab, class_list):
    h = Histogram(len(lab.get_anwerlist()) + 1, len(lab.get_anwerlist()))
    with open(str(lab.get_name()) + ".txt", 'r') as f:
        content = f.read()
        content_list = content.split("\n")
        for line in content_list:
            id, data = line.split(":")
            mark = lab.calculate_marks(data)
            h.append_marks(mark)
            for s in class_list:
                if id == s.get_id():
                    s.append_marks(mark)
    return h
Esempio n. 15
0
def go(traceFileName):

    #Initialize our thread manager
    myManager = Manager()
    globalProcessReferenceDict = myManager.dict()
    histogramList = myManager.list()

    #Keep a list of each of the processes who are processing a piece of the trace
    workers = []

    #Compute the partitioning of the trace
    fp = open(os.path.join(PATH_TO_TRACE_DIR, "Traces", traceFileName),
              "r",
              encoding="utf-8")
    tracePartitionLength = ChopTrace(GetNumberReferencesInFile(fp),
                                     NUMBER_OF_THREADS)

    #Start the worker threads!
    for i in range(0, NUMBER_OF_THREADS):
        pid = Process(target=Worker,
                      args=[
                          i, i * tracePartitionLength, tracePartitionLength,
                          globalProcessReferenceDict, histogramList,
                          traceFileName
                      ])
        pid.start()
        workers.append(pid)

    for thisWorker in workers:
        thisWorker.join()
    for thisWorker in workers:
        thisWorker.terminate()

    #Now, merge and sum the two dictionaries

    #Convert each sub dict (histogram) to a counter type
    for i in range(0, len(histogramList)):
        histogramList[i] = Counter(histogramList[i])

    #Sum them using counter sum method
    resultBuckets = histogramList[0]
    for i in range(1, len(histogramList)):
        resultBuckets = resultBuckets + histogramList[i]

    resultBuckets = dict(resultBuckets)

    result = Histogram()
    result.SetBuckets(resultBuckets)
    #result.CreateCacheCurve()

    return result
Esempio n. 16
0
    def test(self):
        numberOfBins = 4
        multiplicity = 2
        h = Histogram(numberOfBins, multiplicity)
        h.setCurrentToMinimum()
        h.accumulate(0, 1)
        h.accumulate(1, 2)
        h.accumulate(2, 2)
        h.accumulate(3, 1)

        frameTimes = [0, 1]
        recordedSpecies = [0, 1, 2]
        x = HistogramFrames(numberOfBins, multiplicity, recordedSpecies)
        x.setFrameTimes(frameTimes)
        for i in range(2):
            x.setCurrentToMinimum()
            for i in range(len(frameTimes)):
                for j in range(len(recordedSpecies)):
                    x.histograms[i][j].merge(h)
        assert not x.hasErrors()

        stream = StringIO()
        writer = XmlWriter(stream)
        writer.beginDocument()
        x.writeXml(writer, 'model', 'method')
        writer.endDocument()
Esempio n. 17
0
def _(data: Iterable[CipherText],
      return_len: int = 5) -> Sequence[tuple[CipherText, str]]:
    with multiprocessing.Pool() as pool:
        o = pool.map(recover_plaintext, data)

    flattened = []
    for sub_list in o:
        flattened.extend(sub_list)

    histogram = Histogram()
    sorted_items = sorted(flattened,
                          key=lambda x: histogram.score(x[0].get_bytes()))

    return sorted_items[:return_len]
Esempio n. 18
0
	def define(self, win, chunk):
		width, height = corner = win.getwinsize()
		bounds = (0, 0), corner
		self.chunk = chunk
		self.step = (len(chunk)-1)/(width/2+1) + 1
		ydata = _make_ydata(chunk, self.step)
		return Histogram.define(self, (win, bounds, ydata, (0, 128)))
Esempio n. 19
0
 def setRecordedSpecies(self, recordedSpecies):
     """Construct histograms for each recorded species."""
     self.recordedSpecies = recordedSpecies
     self.histograms = []
     for s in self.recordedSpecies:
         self.histograms.append(
             Histogram(self.numberOfBins, self.multiplicity))
Esempio n. 20
0
    def AddHistogram(self, name, histo, layer=0, legendLine=None, stack=True):
        '''Add a ROOT histogram, with a given name.

        Histograms will be drawn by increasing layer.'''
        tmp = Histogram(name, histo, layer, legendLine, stack=stack)
        self.histos.append(tmp)
        self.histosDict[name] = tmp
        return tmp
Esempio n. 21
0
def runVisualization(reader, actor):
    renderer = vtk.vtkRenderer()
    renderer.AddActor(actor)

    h = Histogram(10, reader)
    h.plotHistogram(renderer)

    renderer.SetBackground(1, 0.25, 0.25)

    window = vtk.vtkRenderWindow()
    window.AddRenderer(renderer)

    iren = vtk.vtkRenderWindowInteractor()
    iren.SetRenderWindow(window)

    window.Render()
    iren.Start()
Esempio n. 22
0
 def initialize(self):
     """Construct histograms for each frame and recorded species."""
     assert self.frameTimes is not None and self.recordedSpecies
     self.histograms = []
     for t in self.frameTimes:
         f = []
         for s in self.recordedSpecies:
             f.append(Histogram(self.numberOfBins, self.multiplicity))
         self.histograms.append(f)
Esempio n. 23
0
 def openTargetFileDialog(self):
     options = QtWidgets.QFileDialog.Options()
     filename, _ = QtWidgets.QFileDialog.getOpenFileName(
         None,
         "QFileDialog.getOpenFileName()",
         "",
         "PNG Files (*.png);;JPG Files (*.jpg)",
         options=options)
     if filename:
         pixMap = QtGui.QPixmap(filename)
         self.target_image_label.setPixmap(pixMap)
         hist = Histogram(filename)
         output = "target_histogram.png"
         hist.createHistogramPlotImage(output)
         pixMapHist = QtGui.QPixmap(output)
         self.target_hist_label.setPixmap(pixMapHist)
         # Save the object for matching
         self.target_hist = hist
Esempio n. 24
0
    def setup_histograms(self):
        cells_in_width = int(ceil(self.image.width / float(self.cell_size)))
        cells_in_height = int(ceil(self.image.height / float(self.cell_size)))
        self.histograms = []

        for i in xrange(cells_in_height):
            self.histograms.append([])

            for j in xrange(cells_in_width):
                self.histograms[i].append(Histogram(self.bins, 0, self.bins))
Esempio n. 25
0
 def testEmptyBins(self):
     x = Histogram(10, 2)
     self.assertEqual(x.size(), 10)
     self.assertEqual(x.min(), float('inf'))
     self.assertEqual(x.max(), 1.)
     self.assertEqual(x._upperBound(), 10.)
     self.assertEqual(len(x.histograms), 2)
     p = x.getProbabilities()
     self.assertTrue((p == numpy.zeros(10, numpy.float64)).all())
Esempio n. 26
0
def addFile(snortFile, repositoryLocations, substringLength, fileToAdd):
    """Create a snort rule for a specified input file.

    Keyword arguments:
    snortFile -- path to the file to write snort rules out to
    repositoryLocations -- list of strings of the paths to each repository location
    substringLength -- length of the unique substring to select
    fileToAdd -- path of the input file the substring was generated from
    """
    
    #old: hist = Histogram(repositoryLocationFile, substringLength, fileToAdd)
    hist = Histogram(repositoryLocations, substringLength, fileToAdd)
    substring = hist.selectSubstring()
    
    #if a unique substring is not found, don't add that rule
    if substring != "":
        rule = RuleCreator(snortFile, repositoryLocations, fileToAdd, substring)
        rule.addSnortRule()
        rule.regexInRepository()
        print "rule added for " + fileToAdd
Esempio n. 27
0
def read_data(assessment, classlist):

    questions = len(assessment.get_answer_list())
    histogram = Histogram(questions + 1, questions)

    txt_name = assessment.get_name() + ".txt"
    txt_file = open(txt_name, "r")
    txt_content = txt_file.read()
    txt_content = txt_content.split()
    txt_file.close()

    for r in range(len(txt_content)):
        field = txt_content[r].split(":")
        x = assessment.calculate_marks(field[1])
        histogram.append_marks(x)
        for y in range(len(classlist)):
            if r == y:
                classlist[y].append_marks(x)

    return histogram
    def __init__(self, parent=None):
        super(HistogramItem, self).__init__()

        self._histogram = Histogram()
        self._brush = QBrush()
        self._pen = QPen()
        self._polygonItem = QGraphicsPolygonItem(self)
        self._lines = []
        self._margins = QMargins(0, 0, 0, 0)
        self.functionType = self.TypeNormal

        self.setBrush(Qt.NoBrush)
        self.setPen(Qt.NoPen)
Esempio n. 29
0
def calculate_incomparable_enter_hist(zero_loss, enter_table, u, uA,
                                      uA_loss_events, uB, uB_loss_events,
                                      hist_both_exit):
    """Returns the enter table entry for [uA][uB] with the assumption that A is on a different part of the species
    tree from B
    :param zero_loss:           Whether losses should not count
    :param enter_table:         The DP table we are computing part of
    :param u:                   The gene node whose group we are in
    :param uA:                  The first mapping node to compare
    :param uA_loss_events:      A list of the loss events on that mapping node
    :param uB:                  The second mapping node to compare
    :param uB_loss_events:      A list of the loss events on that mapping node
    :param hist_both_exit:      The histogram of the double-exit that was previously calculated for uA and uB
    """
    hists = [hist_both_exit]
    lost_hists = []

    # We add up all of the hists for both uA's and uB's loss events.
    for event in uA_loss_events:
        a_child = event[1][1]
        hists.append(enter_table[u][(u,
                                     a_child)][uB] << cost(event, zero_loss))
    for event in uB_loss_events:
        b_child = event[1][1]
        hists.append(enter_table[u][uA][(u,
                                         b_child)] << cost(event, zero_loss))
    # The previous histograms will overcount the possibility of taking a loss in both children.
    # Since enter[u][(u, a_child)][(u, b_child)] is counted by both
    # enter[u][uA][(u, b_child)] and enter[u][(u, a_child)][uB]
    # Here we compute enter[u][(u, a_child)][(u, b_child)] in order to subtract it off
    for loss_event_A, loss_event_B in product(uA_loss_events, uB_loss_events):
        a_child = loss_event_A[1][1]
        b_child = loss_event_B[1][1]
        loss_cost = cost(loss_event_A, zero_loss) + cost(
            loss_event_B, zero_loss)
        lost_hists.append(enter_table[u][(u, a_child)][(u,
                                                        b_child)] << loss_cost)
    return Histogram.sum(hists) - Histogram.sum(lost_hists)
Esempio n. 30
0
def histogramify(input, hists):
    X = pd.DataFrame()
    for i in range(hists):
        temp = input.iloc[0: , (10*i):((10*i)+10)]
        new_column = []
        for j in range(len(input)):
            hist = Histogram(temp.iloc[j , 0:])
            new_column.append(hist)      
            
        X['h' + str(i+1)] = new_column
    
    temp = input.iloc[0:, 50:]
    temp = pd.concat([X, temp], axis = 1)
    return temp
Esempio n. 31
0
 def equalizeHistogram(self):
     if (self.input_image_label.pixmap() is not None
             and self.target_image_label.pixmap() is not None):
         filename = "result.png"
         matcher = HistogramMatcher(self.input_hist, self.target_hist,
                                    filename)
         matcher.createLookupTable()
         matcher.constructImage()
         # Show new image
         pixMap = QtGui.QPixmap(filename)
         self.result_image_label.setPixmap(pixMap)
         # Create histogram of the result
         hist = Histogram(filename)
         hist_filename = "result_histogram.png"
         hist.createHistogramPlotImage(hist_filename)
         # Show histogram
         pixMapHist = QtGui.QPixmap(hist_filename)
         self.result_hist_label.setPixmap(pixMapHist)
         self.result_hist = hist
     else:
         self.Ui_Dialog = Ui_Dialog()
         self.Ui_Dialog.setupUi(self.Ui_Dialog)
         self.Ui_Dialog.show()
Esempio n. 32
0
    def get_single_histogram(self):
        """Create a single histogram of the local binary patterns in the
        image."""
        h = Histogram(self.bins, 0, self.bins)

        for y, x, value in self.image:
            h.add(self.pattern_callback(y, x, value))

        h.normalize()

        return h