Esempio n. 1
0
def test_smo_fit():
    df = test_load_data()
    df.set_index("ID", inplace=True)

    som = SOM(d_=1)
    som.fit(df.values)

    return som, df
Esempio n. 2
0
def run_from_som():
    img = Dataset("./image/Audrey.png")
    data = img.data
    carte = SOM(data, kohonen())
    load_som_as_image("./results/deep/star_12n_3x3_500epoch_comp.png", carte)
    img.compression(carte, "reconstruction_500epoch.png")
    im2 = display_som(carte.get_som_as_list())
    im2.save(output_path + "som_500epoch.png")
Esempio n. 3
0
def test_smo_predict():
    df = test_load_data()
    df.set_index("ID", inplace=True)
    x = df.values
    som = SOM(d_=1, k_=4)
    som.fit(x)
    rst = som.predict(x)
    print(rst)
    return som, df
Esempio n. 4
0
def calc_deltat_over_t(axis, axis_err2=None):
    """
    This function takes a TOF axis and calculates the quantity Delta t / t
    for every element. 

    @param axis: The TOF axis from which Delta t / t will be calculated
    @type axis: C{nessi_list.NessiList}
    
    @param axis_err2: (OPTIONAL) The error^2 on the incoming TOF axis
    @type axis_err2: C{nessi_list.NessiList}


    @return: The calculated Delta t / t
    @rtype: C{SOM.SOM}
    """
    import nessi_list

    # Check to see if incoming is really a NessiList
    try:
        axis.__type__
    except AttributeError:
        raise RuntimeError("The object passed to this function needs to be a "\
                           +"NessiList. Do not understand how to deal with "\
                           +"%s" % type(axis))

    len_axis = len(axis)
    if axis_err2 is None:
        axis_err2 = nessi_list.NessiList(len_axis)

    deltat = nessi_list.NessiList()
    deltat_err2 = nessi_list.NessiList()

    # Calculate bin deltas, assume axis in ascending order
    for i in xrange(len_axis - 1):
        deltat.append(axis[i + 1] - axis[i])
        deltat_err2.append(axis_err2[i + 1] - axis_err2[i])

    # Calculate bin centers
    import utils
    (binc, binc_err2) = utils.calc_bin_centers(axis, axis_err2)

    # Calculate delta t / t
    import array_manip
    dtot = array_manip.div_ncerr(deltat, deltat_err2, binc, binc_err2)

    import SOM
    som = SOM.SOM()
    so = SOM.SO()
    so.y = dtot[0]
    so.var_y = dtot[1]
    som.append(so)

    som.setDataSetType("density")
    som.setYLabel("deltat_over_t")

    return som
Esempio n. 5
0
 def somClustering(self,
                   groupedXYZ=False,
                   mapFileName=None,
                   confFileName="SOM.conf",
                   threshold=None):
     matrix = self.matrix
     vectorNames, vectors = self.matrix2vectors(matrix,
                                                groupedXYZ=groupedXYZ)
     if groupedXYZ:
         som = SOM.SOM(vectors,
                       vectorNames,
                       distFunc=self.distFunc,
                       mapFileName=mapFileName,
                       confname=confFileName)
     else:
         som = SOM.SOM(vectors,
                       vectorNames,
                       mapFileName=mapFileName,
                       confname=confFileName)
     if mapFileName == None:
         map = som.learn()
     else:
         map = som.M
     somA = somAnalysis.analysis(map=map)
     if threshold == None:
         threshold = somA.xiT()
     clustersMap = som.clusterDiscovery(map, threshold)
     #  print clustersMap
     distMatrix = som.clusterDistance(clustersMap, map)
     order = self.upgma(distMatrix, reverse=True)
     #  print order
     nameClusters = som.calibration(map, clustersMap, name=True)[1]
     clusters = nameClusters.values()
     #  print clusters
     print 'Number of clusters found: %s' % len(clusters)
     #  clusters = som.calibration(map, clustersMap, name = True)
     #  clusterValues = clusters[0]
     #  clusterNames = clusters[1]
     #  clusters = som.tree(map)
     #  print clusters
     #  clusters = [clusters[e] for e in order]
     sortedClusters = []
     for e in order:
         try:
             sortedClusters.append(clusters[e])
         except IndexError:
             pass
     return sortedClusters
Esempio n. 6
0
    def __set_axes(self, som):
        """
        This method sets up the x and y axes for the spectrum object

        @param som: The object to have its information read from file.
        @type som: L{SOM.SOM}
        """
        import os
        # Need the top four lines of the file to get the number of axis
        # elements
        for i in xrange(4):
            line = self.__file.readline().rstrip(os.linesep)
            if i == 1:
                self.__ny = int(line)
            elif i == 3:
                self.__nx = int(line)

        self.__axis_info = []

        so = SOM.SO(id=0, dim=2, construct=True)
        som.append(so)

        # Y axis is fastest runner, so do it first
        self.__create_axis("y", som)
        self.__create_axis("x", som)

        # Get the x and y axis label and units
        som.setAllAxisLabels([self.__axis_info[1][0], self.__axis_info[0][0]])
        som.setAllAxisUnits([self.__axis_info[1][1], self.__axis_info[0][1]])
Esempio n. 7
0
    def __init__(self, value, error=None, units=None, **kwargs):
        """
        The class constructor

        @param value: The parameter value
        @type value: C{float}
        
        @param error: The parameter error. Note this can be error^2
        @type error: C{float}
        
        @param units: The parameter units
        @type units: C{string}
        
        @param kwargs: A list of keyword arguments that the function accepts:
        
        @keyword is_square: Flag that tells the object if the provided error is
                            the error squared (error^2). The default behavior
                            is I{True}.
        @type is_square: C{boolean}
        """
        self.__nxpar = SOM.NxParameter(float(value), units)
        self.__error = float(error)
        try:
            self.__is_square = kwargs["is_square"]
        except KeyError:
            self.__is_square = True
Esempio n. 8
0
    def readSO(self, som, so_id, parts):
        """
        This method reads the data lines and creates the appropriate SOs for
        the data.

        @param som: The object to have its information set from file.
        @type som: L{SOM.SOM}

        @param so_id: The identifier for the individual spectrum
        @type so_id: C{tuple}
        
        @param parts: The object containing the data
        @type parts: C{list}
        """
        so = SOM.SO()

        # Set the spectrum ID
        so.id = so_id
        # Get the value
        so.y = float(parts[-2])
        # Need to square the error since we carry around error^2
        if self.__no_sqr__:
            so.var_y = float(parts[-1])
        else:
            so.var_y = (float(parts[-1]) * float(parts[-1]))

        som.append(so)
Esempio n. 9
0
def generate_som(data_type="histogram", dim=1, number=2):
    """
    This function generates a C{SOM} for testing purposes.

    @param data_type: (OPTIONAL) is defined as histogram or density
    @type data_type: C{string}
    @param dim: (OPTIONAL) is the dimensionality of the individual C{SO}s
    @type dim: C{int}
    @param number: (OPTIONAL) is the number of C{SO}s generated
    @type number: C{int}

    @return: A C{SOM} containing the requested information
    @rtype: C{SOM.SOM}
    """

    som = SOM.SOM()
    som.setDataSetType(data_type)
    count = 0
    for i in range(number):
        so = generate_so(data_type, count, count + 5, dim)
        so.id = i + 1
        som.append(so)
        count += 5

    return som
Esempio n. 10
0
    def getSOM(self, som_id=None, **kwargs):
        """
        This method parses the resource and creates a SOM from the information.

        @param som_id: The name of the SOM. The default value is C{None}. This
        retrieves all information.

        @param kwargs: A list of keyword arguments that the function accepts:

        @keyword no_sqr: Do not square the error values from the file. This is
                         important if the data will be subsequently plotted.
                         The default value is I{False}.
        @type no_sqr: C{boolean}        
        """
        try:
            self.__no_sqr__ = kwargs["no_sqr"]
        except KeyError:
            self.__no_sqr__ = False

        som = SOM.SOM()
        som.setDataSetType("density")

        self.__set_axes(som)
        self.__readData(som)

        som.setYLabel("Counts")
        uscale = som.getAxisUnits(1) + " " + som.getAxisUnits(0)
        som.setYUnits("Counts / " + uscale)

        som.attr_list = dst_utils.parse_spec_header(self.__file)

        return som
Esempio n. 11
0
def empty_result(obj1, obj2=None):
    """
    This function inspects the arguments and returns an appropriate
    return type for an operation using the arguments. The object can
    be filled using L{result_insert}.

    @param obj1: The first object to be inspected
    @type obj1: C{SOM.SOM}, C{SOM.SO} or C{tuple}
    
    @param obj2: (OPTIONAL) The second object to be inspected
    @type obj2: C{SOM.SOM}, C{SOM.SO} or C{tuple}

    
    @return: A C{tuple} containing the requested object (C{SOM}, C{SO} or
             C{tuple}) and the corresponding descriptor
    @rtype: C{tuple}
    """

    obj1_type = get_type(obj1)

    if obj2 is None:
        if obj1_type == SOM_type:
            return (SOM.SOM(), SOM_type)
        elif obj1_type == SO_type:
            return (SOM.SO(obj1.dim()), SO_type)
        elif obj1_type == list_type:
            return ([], list_type)
        else:
            return ([], num_type)
    # If obj2 is not None, go on.
    else:
        pass
    
    obj2_type = get_type(obj2)

    if obj1_type == SOM_type or obj2_type == SOM_type:
        return (SOM.SOM(), SOM_type)
    elif obj1_type == SO_type or obj2_type == SO_type:
        if obj1_type == SO_type:
            return (SOM.SO(obj1.dim()), SO_type)
        elif obj2_type == SO_type:
            return (SOM.SO(obj2.dim()), SO_type)
    elif obj1_type == list_type or obj2_type == list_type:
        return ([], list_type)
    else:
        return ([], num_type)
Esempio n. 12
0
def sort_cities(weights, cities_dataset):
    
    cities_indexes_dict = {}
    n_cities = cities_dataset.shape[0]
    for city_index in range(n_cities):
        nearest_idx = SOM.get_best_matching_neuron(weights, cities_dataset[city_index, :])
        cities_indexes_dict[city_index] = nearest_idx
        
    sorted_cities_index = dict(sorted(cities_indexes_dict.items(), key=lambda item: item[1]))
    return sorted_cities_index.keys()
Esempio n. 13
0
    def __readSO(self, dlines, nx_id, som):
        """
        This method handles parsing the data into the respective spectrum
        objects.

        @param dlines: The lines from the file containing the data information
        @type dlines: C{list} of C{string}s

        @param nx_id: The NeXus pixel ID
        @type nx_id: L{SOM.NeXusId}

        @param som: The object to have its information read from file.
        @type som: L{SOM.SOM}
        """
        len_dlines = len(dlines)
        if not len_dlines or nx_id is None:
            return

        if not len(som.getDataSetType()):
            if len(dlines[-1].split()) == self.__columns:
                som.setDataSetType("density")
            else:
                som.setDataSetType("histogram")

        try:
            so_id = nx_id.toTuple()
        except AttributeError:
            so_id = nx_id

        so = SOM.SO(construct=True, id=so_id, dim=self.__x_axes)

        if som.getDataSetType() == "histogram":
            num_lines = len_dlines - 1
        else:
            num_lines = len_dlines

        for i in xrange(num_lines):
            parts = dlines[i].split()

            for j in xrange(self.__x_axes):
                so.axis[j].val.append(float(parts[j]))

            so.y.append(float(parts[-2]))
            if self.__no_sqr__:
                so.var_y.append(float(parts[-1]))
            else:
                so.var_y.append(float(parts[-1]) * float(parts[-1]))

        if som.getDataSetType() == "histogram":
            parts = dlines[-1].split()
            for j in xrange(self.__x_axes):
                so.axis[j].val.append(float(parts[j]))

        som.append(so)
Esempio n. 14
0
 def plotSomBmus(self,
                 groupedXYZ=False,
                 mapFileName=None,
                 confFileName='SOM.conf'):
     matrix = self.matrix
     vectorNames, vectors = self.matrix2vectors(matrix,
                                                groupedXYZ=groupedXYZ)
     if groupedXYZ:
         som = SOM.SOM(vectors,
                       vectorNames,
                       distFunc=self.distFunc,
                       mapFileName=mapFileName,
                       confname=confFileName)
     else:
         som = SOM.SOM(vectors,
                       vectorNames,
                       mapFileName=mapFileName,
                       confname=confFileName)
     if mapFileName == None:
         map = som.loadMap('map_%sx%s.dat' % (som.X, som.Y))
     else:
         map = som.M
     cardinal = som.cardinal
     k = itertools.count()
     rm = []
     for v in vectors:
         ij = som.findBMU(k.next(), map)
         bmu = map[ij[0], ij[1]]
         if groupedXYZ:
             flattenBmu = list(bmu.flat)
             rm.extend([
                 flattenBmu[0:3 * cardinal],
                 flattenBmu[3 * cardinal:2 * 3 * cardinal],
                 flattenBmu[2 * 3 * cardinal:3 * 3 * cardinal]
             ])
         else:
             rm.append(bmu)
     return numpy.array(rm)
Esempio n. 15
0
def sort_animals(weights, animals_dataset):
    animals_names = get_name_animals()
    
    animals_indexes_dict = {}
    n_animals = animals_dataset.shape[0]
    for animal_index in range(n_animals):
        nearest_idx = SOM.get_best_matching_neuron(weights, animals_dataset[animal_index, :])
        animals_indexes_dict[animal_index] = nearest_idx
        
    sorted_animals_index = sorted(animals_indexes_dict.items(), key=lambda item: item[1])
    animals_sorted = []
    for animal_idx in sorted_animals_index:
        animals_sorted.append(animals_names[animal_idx[0]])
    return animals_sorted
Esempio n. 16
0
def main():
    env = gym.make('internet.SlitherIO-v0')
    env.configure(remotes=1)
    observation_n = env.reset()
    top_left_x = 20
    top_left_y = 85
    tiny_image_h = 12
    tiny_image_w = 18  # for tiny image
    SOM_WIDTH = 6
    SOM_HEIGHT = 6
    som = SOM.SOM(SOM_WIDTH, SOM_HEIGHT, tiny_image_w, tiny_image_h, radius=3)

    bottom_right_x = 520
    bottom_right_y = 385

    ## Define Actions on keyboard
    possible_actions = []
    left = [('KeyEvent', 'ArrowUp', True), ('KeyEvent', 'ArrowLeft', True),
            ('KeyEvent', 'ArrowRight', False)]
    right = [('KeyEvent', 'ArrowUp', True), ('KeyEvent', 'ArrowLeft', False),
             ('KeyEvent', 'ArrowRight', True)]
    forward = [('KeyEvent', 'ArrowUp', True), ('KeyEvent', 'ArrowLeft', False),
               ('KeyEvent', 'ArrowRight', False)]
    possible_actions.append(left)
    possible_actions.append(right)
    possible_actions.append(forward)

    ## Setup SOM
    default_action_n = [forward for ob in observation_n]
    # main logic
    while True:
        # increment a counter for number of iterations
        observation_n, reward_n, done_n, info = env.step(default_action_n)
        env.render()
        if observation_n[0] != None:
            photo_array = crop_photo(observation_n[0]["vision"], top_left_x,
                                     top_left_y, bottom_right_x,
                                     bottom_right_y)
            #shrunken_image = scipy.misc.imresize(photo_array, 0.5, "nearest")
            shrunken_image = cv2.cvtColor(photo_array, cv2.COLOR_RGB2BGR)
            #shrunken_image2 = cv2.cvtColor(shrunken_image, cv2.COLOR_BGR2GRAY)

            tiny = find_orbs(shrunken_image)

            tiny = cv2.resize(tiny, (18, 12))
            som.train(tiny)

            cv2.imshow("RobotVision", shrunken_image)
            print_som(som)
            cv2.waitKey(1)
Esempio n. 17
0
def generate_so(data_type, start, stop=0, dim=1, extra=0):
    """
    This function generates a C{SO} for testing purposes. The C{SO} can be
    either histogram or density data.

    @param data_type: is defined as histogram or density
    @type data_type: C{string}
    @param start: is the starting value for number generation
    @type start: C{int}
    @param stop: (OPTIONAL) is the stopping value for number generation
    @type stop: C{int}
    @param dim: (OPTIONAL) allows for making C{SO}s multi-dimensional
    @type dim: C{int}
    @param extra: (OPTIONAL) is an offset added to the B{start} and B{stop}
    values
    @type extra: C{int}              
    
    @return: A C{SO} filled with default information
    @rtype: C{SOM.SO}
    """
    
    if stop < start:
        stop = start
        start = 0
        
    so = SOM.SO(dim, construct=True)
    if start == stop:
        return so

    if data_type.lower() == "histogram":
        num = stop - start + 1
    else:
        num = stop - start

    for i in range(dim):
        so.axis[i].val.extend(range(num))
        size = len(so.axis[i].val)

        if i == 0:
            so.y.extend(range(start + extra, stop + extra))
            so.var_y.extend(range(start + extra, stop + extra))
        else:
            counter = 0
            while counter < (size - 2):
                so.y.extend(range(start + extra, stop + extra))
                so.var_y.extend(range(start + extra, stop + extra))
                counter += 1

    return so
Esempio n. 18
0
def main():
    env = gym.make('internet.SlitherIO-v0')
    env.configure(remotes=1)
    observation_n = env.reset()
    top_left_x = 20
    top_left_y = 85
    bottom_right_x = 520
    bottom_right_y = 385
    scale_factor = 0.1
    radius = 3
    SOM_WIDTH = 4
    SOM_HEIGHT = 4
    image_height = int((bottom_right_y - top_left_y) * scale_factor)
    image_width = int((bottom_right_x - top_left_x) * scale_factor)

    som = SOM.SOM(SOM_WIDTH, SOM_HEIGHT, image_width, image_height, radius)
    ## Define Actions on keyboard
    possible_actions = []
    left = [('KeyEvent', 'ArrowUp', True), ('KeyEvent', 'ArrowLeft', True),
            ('KeyEvent', 'ArrowRight', False)]
    right = [('KeyEvent', 'ArrowUp', True), ('KeyEvent', 'ArrowLeft', False),
             ('KeyEvent', 'ArrowRight', True)]
    forward = [('KeyEvent', 'ArrowUp', True), ('KeyEvent', 'ArrowLeft', False),
               ('KeyEvent', 'ArrowRight', False)]
    possible_actions.append(left)
    possible_actions.append(right)
    possible_actions.append(forward)

    ## Setup SOM
    default_action_n = [forward for ob in observation_n]
    # main logic
    while True:
        # increment a counter for number of iterations
        observation_n, reward_n, done_n, info = env.step(default_action_n)
        env.render()
        if observation_n[0] != None:
            photo_array = crop_photo(observation_n[0]["vision"], top_left_x,
                                     top_left_y, bottom_right_x,
                                     bottom_right_y)
            shrunken_image = scipy.misc.imresize(photo_array, scale_factor,
                                                 "nearest")
            cvImage = cv2.cvtColor(shrunken_image, cv2.COLOR_RGB2BGR)
            som.train(shrunken_image)
            cv2.imshow("RobotVision", cvImage)
            cv2.waitKey(1)
            # print(image)
            print_som(som)
Esempio n. 19
0
File: Run.py Progetto: johng/HeatMap
def main():

    args = sys.argv

    if len(args) != 2:
        print "<file_name>"
        exit(-1)

    data_loc = args[1]

    print data_loc
    data = pd.read_csv(data_loc, sep=',', header=1).as_matrix()

    print data.shape

    som = s.SelfOrganisingMap([data.shape[1], 20 * 20], 20)
    som.Train(data, 5000, 0.5)
Esempio n. 20
0
def scale_proton_charge(ipc, scale_units):
    """
    This function takes a proton charge and scales it to either Coulombs
    (I{C}), milliCoulombs (I{mC}) or microCoulombs (I{uC}).

    @param ipc: The proton charge to be scaled
    @type ipc: C{SOM.NxParameter}

    @param scale_units: The short units to scale the proton charge into
    @type scale_units: C{string}


    @return: The scaled proton charge
    @rtype: C{SOM.NxParameter}
    """
    scale_info = {"C": {"scale": 1.0e-12, "units": "Coulomb"},
                  "mC": {"scale": 1.0e-9, "units": "milliCoulomb"},
                  "uC": {"scale": 1.0e-6, "units": "microCoulomb"}}

    pc_new = ipc.getValue() * scale_info[scale_units]["scale"]

    return SOM.NxParameter(pc_new, scale_info[scale_units]["units"])
Esempio n. 21
0
 def __init__(self, MapFile=None, som=None):
     self.MapFile = MapFile
     if som == None:
         self.som = SOM.SOM(filename='ALL_vectors')
     else:
         self.som = som
     inputFile = open(self.som.AuPosSOM_inputFile, 'r')
     lines = inputFile.readlines()
     inputFile.close()
     test = 2
     KL = []
     for line in lines:
         if re.findall('<Known_ligands>', line):
             test = test - 1
         if test == 1 and not re.findall(
                 '<Known_ligands>', line) and len(line.split('#')) != 2:
             KL.append(line.strip())
     self.KL = KL  # list of known ligands
     test = 2
     UL = []
     for line in lines:
         if re.findall('<Unknown_ligands>', line):
             test = test - 1
         if test == 1 and not re.findall(
                 '<Unknown_ligands>', line) and len(line.split('#')) != 2:
             UL.append(line.strip())
     self.UL = UL  # list of unknown ligands
     if MapFile == None:
         self.som.learn()
     else:
         MapFileFile = open(MapFile, 'r')
         self.som.Map = cPickle.load(MapFileFile)
         MapFileFile.close()
     self.BMUs = []
     for k in range(len(self.som.inputvectors)):
         self.BMUs.append(self.som.findBMU(k, self.som.Map))
Esempio n. 22
0
 def __init__(self, dirStr_result):
     self.dirStr_result = dirStr_result
     with open(self.dirStr_result +  "\\nodes.bin", "rb") as nodes:
         self.nodes = pickle.load(nodes)        
     
     #todo
     self.actNodesRealNum = self.nodes[:,-1].reshape(SOM.conf.N, SOM.conf.N)
     self.actNodes = np.round(self.actNodesRealNum)
     self.correctActNodes = SOM.getAnsNodes(np.round(self.nodes)).reshape(SOM.conf.N, SOM.conf.N)
     self.afterNodesRounded_hamming = getColoredNodes(np.round(self.nodes),
                                     color="bits-scale")
     self.afterNodesRounded = getColoredNodes(np.round(self.nodes),
                                     color="bits2decimal-scale")
 
     #todo
     self.afterNodesReverse = np.round(self.nodes)[:,0:-1] #get 6bit nodes
     #todo
     self.afterNodesReverse = getColoredNodes(self.afterNodesReverse[:,::-1], color="bits2decimal-scale")
 
     self.afterNodesSeparated = self.afterNodesRounded.copy()
     self.afterNodesColored = getColoredNodes(np.round(self.nodes), color="colored") 
     
     #全分類子のマッピング        
     #mappingDataList = np.array(generateMUXNodes(100,includeAns=True))
     inp_sequencial = []
     for i in range(0,64):
         inp_sequencial.append(str(bin(i))[2:].zfill(6))
         
     inp_sequencial = [list(x) for x in inp_sequencial]
     for i, row in enumerate(inp_sequencial):
         inp_sequencial[i] = [int(x) for x in row]
     
     for cl in inp_sequencial:
         cl.append(util.getAns(cl))
     
     self.inp_sequencial = np.array(inp_sequencial).reshape(len(inp_sequencial), len(inp_sequencial[0]))
Esempio n. 23
0
if glob.glob(inputMatrixFileName) == []:
    print 'No inputMatrix.dat file!'
else:
    if inputMatrixFileName.split('.')[1] == 'npy':
        inputMatrix = numpy.load(inputMatrixFileName)
    else:
        inMfile = open(inputMatrixFileName)
        inputMatrix = cPickle.load(inMfile)
        inMfile.close()

#Learning #############################################################################################################
if glob.glob(mapFileName) == []:
    som = SOM.SOM(inputMatrix,
                  range(inputMatrix.shape[0]),
                  metric='euclidean',
                  autoParam=autoParam,
                  sort2ndPhase=sort2ndPhase,
                  toricMap=toricMap,
                  randomInit=randomInit,
                  autoSizeMap=autoSizeMap)
    som.learn()
else:
    mapFileName = glob.glob(mapFileName)[0]
    print "Map file: %s" % mapFileName
    som = SOM.SOM(inputMatrix,
                  range(inputMatrix.shape[0]),
                  mapFileName=mapFileName,
                  metric='euclidean',
                  autoParam=autoParam,
                  sort2ndPhase=sort2ndPhase,
                  toricMap=toricMap,
                  randomInit=randomInit,
Esempio n. 24
0
        hlr_utils.result_insert(result, res_descr, rev_value, map_so, "x",
                                axis)

    return result


if __name__ == "__main__":
    import hlr_test
    import SOM

    ple = (20.0, 0.1)
    pa = (0.785, 0.005)
    ao = (0.785, 0.000)

    som1 = hlr_test.generate_som()
    som1.setAllAxisUnits(["microseconds"])
    som1.attr_list.instrument = SOM.ASG_Instrument()

    print "********** SOM1"
    print "* ", som1[0]
    print "* ", som1[1]

    print "********** tof_to_scalar_Q"
    print "* tof_to_scalar_Q som :", tof_to_scalar_Q(som1)
    print "* tof_to_scalar_Q so  :", tof_to_scalar_Q(som1[0],
                                                     pathlength=ple,
                                                     polar=pa)
    print "* tof_to_scalar_Q som (offset):", tof_to_scalar_Q(som1,
                                                             angle_offset=ao)
Esempio n. 25
0
def sum_by_rebin_frac(obj, axis_out, **kwargs):
    """
    This function uses the C{axis_manip.rebin_axis_1D_frac} function from the
    SCL to perform the rebinning. The function tracks the counts and fractional
    area from all spectra separately. The counts and fractional area are
    divided after all spectra have been parsed. 
    
    @param obj: Object to be rebinned and summed
    @type obj: C{SOM.SOM} or C{SOM.SO}
    
    @param axis_out: The axis to rebin the C{SOM} or C{SO} to
    @type axis_out: C{NessiList}

    @param kwargs: A list of keyword arguments that the function accepts:
    
    @keyword configure: This is the object containing the driver configuration.
                        This will signal the function to write out the counts
                        and fractional area to files.
    @type configure: C{Configure}


    @return: Object that has been rebinned and summed according to the
             provided axis
    @rtype: C{SOM.SOM} or C{SOM.SO}


    @raise TypeError: The rebinning axis given is not a C{NessiList}
    @raise TypeError: The object being rebinned is not a C{SOM} or a C{SO}
    @raise TypeError: The dimension of the input object is not 1D
    """
    # import the helper functions
    import hlr_utils

    # set up for working through data
    try:
        axis_out.__type__
    except AttributeError:
        raise TypeError("Rebinning axis must be a NessiList!")

    o_descr = hlr_utils.get_descr(obj)

    if o_descr == "number" or o_descr == "list":
        raise TypeError("Do not know how to handle given type: %s" % \
                        o_descr)
    else:
        pass

    try:
        if obj.getDimension() != 1:
            raise TypeError("The input object must be 1D!. This one is "\
                            +"%dD." % obj.getDimension())
    except AttributeError:
        # obj is a SO
        if obj.dim() != 1:
            raise TypeError("The input object must be 1D!. This one is "\
                            +"%dD." % obj.dim())

    # Check for keywords
    try:
        config = kwargs["configure"]
    except KeyError:
        config = None

    (result, res_descr) = hlr_utils.empty_result(obj)

    result = hlr_utils.copy_som_attr(result, res_descr, obj, o_descr)

    import array_manip
    import axis_manip

    len_data = len(axis_out) - 1

    counts = nessi_list.NessiList(len_data)
    counts_err2 = nessi_list.NessiList(len_data)
    frac_area = nessi_list.NessiList(len_data)
    frac_area_err2 = nessi_list.NessiList(len_data)

    for i in xrange(hlr_utils.get_length(obj)):
        axis_in = hlr_utils.get_value(obj, i, o_descr, "x", 0)
        val = hlr_utils.get_value(obj, i, o_descr)
        err2 = hlr_utils.get_err2(obj, i, o_descr)

        value = axis_manip.rebin_axis_1D_frac(axis_in, val, err2, axis_out)

        (counts, counts_err2) = array_manip.add_ncerr(counts, counts_err2,
                                                      value[0], value[1])

        (frac_area,
         frac_area_err2) = array_manip.add_ncerr(frac_area, frac_area_err2,
                                                 value[2], frac_area_err2)

    # Divide the total counts by the total fractional area
    value1 = array_manip.div_ncerr(counts, counts_err2, frac_area,
                                   frac_area_err2)
    xvals = []
    xvals.append(axis_out)

    map_so = hlr_utils.get_map_so(obj, None, 0)

    hlr_utils.result_insert(result, res_descr, value1, map_so, "all", 0, xvals)

    if config is not None:
        if o_descr == "SOM":
            import SOM
            o_som = SOM.SOM()
            o_som.copyAttributes(obj)

            so = hlr_utils.get_map_so(obj, None, 0)
            so.axis[0].val = axis_out
            so.y = counts
            so.var_y = counts_err2
            o_som.append(so)

            # Write out summed counts into file
            hlr_utils.write_file(config.output,
                                 "text/Spec",
                                 o_som,
                                 output_ext="cnt",
                                 verbose=config.verbose,
                                 data_ext=config.ext_replacement,
                                 path_replacement=config.path_replacement,
                                 message="summed counts")

            # Replace counts data with fractional area. The axes remain the
            # same
            o_som[0].y = frac_area
            o_som[0].var_y = frac_area_err2

            # Write out summed fractional area into file
            hlr_utils.write_file(config.output,
                                 "text/Spec",
                                 o_som,
                                 output_ext="fra",
                                 verbose=config.verbose,
                                 data_ext=config.ext_replacement,
                                 path_replacement=config.path_replacement,
                                 message="fractional area")

    return result
Esempio n. 26
0
            else:
                fixed_pixel = stripe_list[0]

            result[stripe_count].id = fixed_pixel
            stripe_count += 1

        result.attr_list["summed_ids"] = so_id_list

    return result


if __name__ == "__main__":
    import hlr_test
    import SOM

    som1 = SOM.SOM()
    so1 = hlr_test.generate_so("histogram", 0, 5, 1, 1)
    so1.id = 1
    som1.append(so1)
    so2 = hlr_test.generate_so("histogram", 0, 5, 1, 3)
    so2.id = 2
    som1.append(so2)
    so3 = hlr_test.generate_so("histogram", 0, 5, 1, 2)
    so3.id = 3
    som1.append(so3)

    axis_new = hlr_utils.make_axis(0, 5, 2.5)

    print "********** SOM1"
    print "* ", som1[0]
    print "* ", som1[1]
Esempio n. 27
0
dataset = data.copy()
# normalisasi
dataset = (dataset - dataset.min()) / (dataset.max() - dataset.min())
# Inisialisasi bobot

weight = np.array([[0.61, 0.88, 0.79, 0.60], [0.59, 0.96, 0.41, 0.97],
                   [0.76, 0.15, 0.05, 0.38]])
# weight = np.around(np.random.uniform(low=0.01, high=0.99, size=(5, 4)), decimals=2)

R = 1
Alpha = 0.1
c = 0.1
Et = 18
E0 = 1
model = som.train_SOM(weight, dataset.values, Alpha, c, R, Et, E0)
klaster = som.test_SOM(model, dataset.values)
klaster = np.ravel(klaster)
sil = silhouette_score(dataset.values, klaster, metric='euclidean')
print(sil)

dataset['klaster'] = klaster
fig, ax = plt.subplots(figsize=(17, 10))
print(set(dataset['klaster'].values))
plot = ax.scatter(data['longitude'],
                  data['latitude'],
                  s=dataset['confidence'] * 100,
                  Alpha=0.5,
                  c=dataset['klaster'],
                  cmap='jet')
Esempio n. 28
0
    # import load data
    data = readData.loadResidentialData()
    n_customer = data.shape[1]
    # load sum, 2 years of data
    sumLoad = np.zeros((365 * 2 * T, ))
    # sum up the load data
    for i in range(n_customer):
        customer_load = readData.getUserData(data, i)
        sumLoad += np.nan_to_num(customer_load)

    minLoad = np.min(sumLoad)
    maxLoad = np.max(sumLoad)
    sumLoad = (sumLoad - minLoad) / (maxLoad - minLoad)

    (X_train_dict, y_train_dict, X_test_dict,
     y_test_dict) = SOM.callSOM(M, N, T, n_train, n_lag, sumLoad)

    MAPE_total = 0
    RMSPE_total = 0
    days_total = 0
    for cluster in range(M * N):
        if cluster in X_train_dict:
            X_train = X_train_dict[cluster]
            y_train = y_train_dict[cluster]
            X_test = X_test_dict[cluster]
            y_test = y_test_dict[cluster]

            (MAPE, RMSPE, days) = NN_forecast(n_lag, T, X_train, y_train,
                                              X_test, y_test, maxLoad, minLoad)
            print('forecast result group 2 : MAPE: %.2f, RMSPE: %.2f' %
                  (MAPE, RMSPE))
def sum_spectra_weighted_ave(obj, **kwargs):
    """
    This function takes a set of data and sums the individual bins by weighted
    average. That information is then assembled back into a single spectrum.
    The individual spectra should already have been rebinned.
    
    @param obj: Object containing data spectra
    @type obj: C{SOM.SOM} or C{SOM.SO}

    @param kwargs: A list of keyword arguments that the function accepts:
    
    @return: The summed spectra (one)
    @rtype: C{SOM.SOM}
    """

    if obj is None:
        return None

    # import the helper functions
    import hlr_utils

    # set up for working through data
    (result, res_descr) = hlr_utils.empty_result(obj)
    o_descr = hlr_utils.get_descr(obj)

    result = hlr_utils.copy_som_attr(result, res_descr, obj, o_descr)

    # Get the number of axis channels
    len_axis = len(obj[0])

    import nessi_list
    import SOM
    import utils

    # Empty SO for final spctrum
    so = SOM.SO()

    len_som = hlr_utils.get_length(obj)

    # Slice data, calculate weighted average and repackage spectra
    for i in xrange(len_axis):

        sliced_data = nessi_list.NessiList()
        sliced_data_err2 = nessi_list.NessiList()

        for j in xrange(len_som):
            obj1 = hlr_utils.get_value(obj, j, o_descr, "all")
            if i == 0 and j == 0:
                map_so = hlr_utils.get_map_so(obj, None, j)
                hlr_utils.result_insert(so, "SO", map_so, None, "all")

            sliced_data.append(obj1.y[i])
            sliced_data_err2.append(obj1.var_y[i])

        len_fit = len(sliced_data)

        value = utils.weighted_average(sliced_data, sliced_data_err2, 0,
                                       len_fit - 1)
        so.y[i] = value[0]
        so.var_y[i] = value[1]

    hlr_utils.result_insert(result, res_descr, so, None, "all")

    return result
Esempio n. 30
0
    for i in xrange(hlr_utils.get_length(obj1, obj2)):
        val1 = hlr_utils.get_value(obj1, i, o1_descr, "all")
        val2 = hlr_utils.get_value(obj2, i, o2_descr, "x")

        value = common_lib.rebin_axis_1D(val1, val2)

        hlr_utils.result_insert(result, res_descr, value, None, "all")

    return result


if __name__ == "__main__":
    import hlr_test
    import SOM

    som1 = SOM.SOM()
    som1.setAllAxisUnits(["Angstroms"])
    so1 = SOM.SO()
    so1.id = 1
    so1.axis[0].val.extend(range(0, 7, 2))
    so1.y.extend(0.994, 0.943, 0.932)
    so1.var_y.extend(0.010, 0.012, 0.013)
    som1.append(so1)
    so2 = SOM.SO()
    so2.id = 2
    so2.axis[0].val.extend(range(0, 7, 2))
    so2.y.extend(0.934, 0.986, 0.957)
    so2.var_y.extend(0.011, 0.010, 0.015)
    som1.append(so2)

    som2 = hlr_test.generate_som()
Esempio n. 31
0
sess = tf.Session()
x = tf.placeholder("float", [None, df.shape[1]])
autoencoder = Autoencoder.create(x, [48, 24, 12])
EWMACost = 0
Autoencoder.train_AE(df=training_data, sess=sess, x=x,
                     denoising=False, verbose=False, autoencoder=autoencoder)

##########################

### Self Organizing Map ###

if not os.path.exists('./SOM_IMAGES'):
    os.makedirs('./SOM_IMAGES')

import SOM
som = SOM.trainSOM(training_data, 10, 10)

neuron_heatmap = SOM.getNeuronHeatMap(som)
neuron_heatmap = neuron_heatmap.resize((1024, 768))
neuron_heatmap.save('./SOM_IMAGES/Neuron_Heatmap.png')

from tqdm import tqdm
import numpy as np

for i in tqdm(range(100)):
    cue_distance_heatmap = SOM.getDistanceMap(som, training_data.loc[np.random.randint(0, training_data.shape[0])])
    cue_distance_heatmap = cue_distance_heatmap.resize((1024, 768))
    cue_distance_heatmap.save("./SOM_IMAGES/Cue_Distance_Heatmap_" + str(i) + ".png")

###########################