예제 #1
0
    def load_from_file(load_filename, memory_size=10000):
        """Loads and returns a GroupStore object from file."""

        store = GroupStore(load_filename, memory_size)

        file = open(load_filename, "r")

        i = 0
        for line in file:
            input = line.split(",", 1)
            modularity = float(input[0])
            grouping = Grouping.create_from_string(input[1], None, modularity)
            store.groups[i] = grouping
            i = i + 1

            if not (i < memory_size):
                i = 0
                store.update_stats()

        # Clear the buffer...
        for j in xrange(i, memory_size):
            store.groups[j] = None

        store.update_stats()

        # Now clear the rest...
        for j in xrange(0, i):
            store.groups[i] = None

        return store
예제 #2
0
 def load_from_file(load_filename, memory_size = 10000):
     """Loads and returns a GroupStore object from file."""
     
     store = GroupStore(load_filename, memory_size)
     
     file = open(load_filename, "r")
     
     i = 0
     for line in file:
         input = line.split(",",1)
         modularity = float(input[0])
         grouping = Grouping.create_from_string(input[1],None,modularity)
         store.groups[i] = grouping
         i = i+1
         
         if not (i < memory_size):
             i = 0
             store.update_stats()
     
     # Clear the buffer...        
     for j in xrange(i,memory_size):
         store.groups[j] = None
     
     store.update_stats()
     
     # Now clear the rest...
     for j in xrange(0, i):
         store.groups[i] = None
     
     return store
예제 #3
0
    def _get_msg_group(self):
        msg_group = []
        dataset_list = ['eth', 'eth', 'ucy', 'ucy', 'ucy']
        dataset_idx_list = [0, 1, 0, 1, 2]
        #dataset_list = ['eth']
        #dataset_idx_list = [0]

        for i in range(len(dataset_list)):
            dataset = dataset_list[i]
            dataset_idx = dataset_idx_list[i]
            msg = Message()
            data = DataLoader(dataset, dataset_idx, self.fps)
            msg = data.update_message(msg)
            gp = Grouping(msg, self.history)
            msg = gp.update_message(msg)
            msg_group.append(msg)

        return msg_group
예제 #4
0
def startHenshu(basepath):
    func = {
        'G1': getInfoImage,
        'G2': getInfoZip,
        'G3': getInfoZip,
        'G4': getInfoZip,
        'G5': getInfoZip,
        'G6': getInfoZip
    }

    tmpDic = dict()
    for (path, dir, filenames) in os.walk(basepath):

        tmpList = list()
        for filename in filenames:
            # 확장자 축출 @[1:] .이후 문자를 가져옴
            fext = os.path.splitext(filename)[-1][1:].lower()

            vDic = dict()
            fullpath = os.path.join(path, filename)
            dirname = path.split('\\')[-1]

            # 분류를 위한 평가요소는 확장자, 파일명(키워드포함), 폴더명(키워드포함)
            # 결과 ext는 확장자평가, cat는 파일명/폴더명내 포함된 키워드에 의한 평가
            # 파일명에 확장자 제외
            fn = filename.split('.')[0]
            ext, cat = grp.getKey(fext, fn, dirname)

            # 어느 것에도 기타로 분류될 경우, 대상외로 함.
            if ext == 'G6' and cat == 'C5':
                continue
            try:
                vDic = func[ext](fullpath)
            except:
                # 에러가 발생하더라도 계속 수행
                log.printlog("ERR:{}".format(fullpath))
                continue
            else:
                # 정상적인 경우 집계절차
                vDic['filename'] = filename
                vDic['ext'], vDic['cat'] = ext, cat
                tmpList.append(vDic)

        if tmpList:
            tmpDic[dirname] = tmpList
            log.printlog(str(dirname))
    # print(tmpDic)
    with io.open('./fileCollection/result.txt', 'w', encoding='utf-8') as f:
        json.dump(tmpDic, f, ensure_ascii=False, indent=1)

    log.printlog("********Henshu complete*******")
예제 #5
0
def run_test():
    ######################################################
    #
    # Runs a couple sanity checks on the network:
    #
    #    0      0
    #    |\    /|
    #    | 0--0 |
    #    |/    \|
    #    0      0
    #
    # with the obvious correct grouping followed
    # by a slightly non-optimal grouping.
    #
    # Results should be Q1 ~ .357, Q2 ~ .122
    #
    ######################################################

    network = create_test_network()

    print "A = ", network

    group1 = Grouping.create_from_vector([1, 1, 1, 0, 0, 0], network)
    print "S1 = ", group1

    group2 = Grouping.create_from_vector([1, 1, 1, 1, 0, 0], network)
    print "S2 = ", group2

    group3 = Grouping.create_from_vector([1, 1, 0, 1, 0, 0], network)
    print "S3 = ", group3

    Q1 = group1.get_modularity()
    Q2 = group2.get_modularity()
    Q3 = group3.get_modularity()

    print "Modularities:"

    print "Q_1 = ", Q1
    print "Q_2 = ", Q2
    print "Q_3 = ", Q3

    if ((Q1 - 0.357) < 0.001) and ((Q2 - 0.122) < 0.001):
        print "Test passed!"
    else:
        print "*** Test Failed! ***"
        return False

    print "Trying point patch method..."

    pointpatch = PointPatch(3, 0, 1)

    Q4 = pointpatch.get_modularity(group1, network)

    print "Q_4 = ", Q4

    print group1.get_nodes()
    print group1.get_groups()

    pointpatch.modify(group1)

    print group1.get_nodes()
    print group1.get_groups()

    Q5 = group1.calculate_modularity(network)

    print "Q_5 = ", Q5

    if Q5 - Q2 < 0.00001:
        print "Test passed!"
    else:
        print "*** Test Failed! ***"
        return False

    return True
예제 #6
0
    def create_graphml_file(self, filename, grouping=None):
        """Creates a graphml file that can be used to generate a visualization
           of the network using the optional partition to color the nodes."""

        file = open(filename + ".graphml", "w")

        if grouping == None:
            grouping = Grouping.create_from_vector([0] * self.number_of_nodes)

        n = self.number_of_nodes
        m = self.number_of_edges

        nodes = grouping.get_nodes()
        non_empty_groups = grouping.get_non_empty_groups()
        c = len(non_empty_groups)
        A = self.adjacency_matrix

        print n, m, c, len(nodes), len(colors)

        # set palettes...
        if c <= len(colors):
            if c == 1:
                palette = ["#FFFFFF"]
            else:
                palette = graphml_colors
        else:
            palette = map(lambda x: "gray" + str(x), range(0, 100))

        # Preamble stuff...
        file.write(
            '<?xml version="1.0" encoding="UTF-8" standalone="no"?>\n<graphml xmlns="http://graphml.graphdrawing.org/xmlns/graphml" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:y="http://www.yworks.com/xml/graphml" xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns/graphml http://www.yworks.com/xml/schema/graphml/1.0/ygraphml.xsd">\n<key for="node" id="d0" yfiles.type="nodegraphics"/>\n<key attr.name="description" attr.type="string" for="node" id="d1"/>\n<key for="edge" id="d2" yfiles.type="edgegraphics"/>\n<key attr.name="description" attr.type="string" for="edge" id="d3"/>\n<key for="graphml" id="d4" yfiles.type="resources"/>\n'
        )

        # Graph header

        file.write(
            """<graph edgedefault="directed" id="G" parse.edges="%d" parse.nodes="%d" parse.order="free">\n""" % (m, n)
        )

        # Now we write the nodes...
        for i in xrange(0, n):
            file.write(
                """<node id="n%d">
      <data key="d0">
        <y:ShapeNode>
          <y:Geometry height="30.0" width="30.0" x="80.56138057743618" y="0.0"/>
          <y:Fill color="%s" transparent="false"/>
          <y:BorderStyle color="#000000" type="line" width="1.0"/>
          <y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="18.701171875" modelName="internal" modelPosition="c" textColor="#000000" visible="true" width="33.0" x="-1.5" y="5.6494140625">%d</y:NodeLabel>
          <y:Shape type="ellipse"/>
        </y:ShapeNode>
      </data>
      <data key="d1"/>
    </node>\n"""
                % (i, palette[non_empty_groups.index(nodes[i])], i)
            )

        current_edge = 0
        # Now we do the edges...
        for i in range(0, n):
            for j in range(0, i + 1):
                if A[i, j] == 1:
                    file.write(
                        """<edge id="e%d" source="n%d" target="n%d">
      <data key="d2">
        <y:PolyLineEdge>
          <y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
          <y:LineStyle color="#000000" type="line" width="1.0"/>
          <y:Arrows source="none" target="none"/>
          <y:BendStyle smoothed="false"/>
        </y:PolyLineEdge>
      </data>
      <data key="d3"/>
    </edge>\n"""
                        % (current_edge, i, j)
                    )
                    current_edge += 1

        # close off tags...
        file.write(
            """ </graph>
  <data key="d4">
    <y:Resources/>
  </data>
</graphml>"""
        )
        file.close()
예제 #7
0
파일: driver.py 프로젝트: uhasan1/stokastik
print "Reading data file..."

df = pd.read_csv(file_path, sep=",", encoding='utf-8')
df['attr_val_pairs'] = df['attr_val_pairs'].apply(lambda x: json.loads(x))
df['variant_criterias'] = df['variant_criterias'].apply(
    lambda x: json.loads(x))

print "Reading product data..."
product_data = list(df.itertuples(index=False))

print "Getting items..."
items = utils.get_unique_items_pt(product_data, item_type)

print "Creating groups..."
grp_instance = Grouping(items)
grp_instance.init_groups()

print "Cluster entropy score = ", grp_instance.get_clustering_scores()

groups1 = grp_instance.auto_groups if group_type == 'auto' else grp_instance.true_groups

print "Reading excluded attribute list..."
with open('excluded_attr_list.txt', 'rb') as x_attr:
    excluded_attrs = x_attr.readlines()

excluded_attrs = [x.strip() for x in excluded_attrs]
excluded_attrs = set(excluded_attrs)

print "Getting variants..."
get_variants(items, groups1, excluded_attrs)
예제 #8
0
dataset_idx = 0
history = 16
frame_idx = 500
group_idx = 20

# Initialize a message
msg = Message()

# Initialize dataloader
data = DataLoader(dataset, dataset_idx)

# Update the message
msg = data.update_message(msg)

# Initialize grouping
gp = Grouping(msg, 16)

# Update the message
msg = gp.update_message(msg)
# This shows what group ids are in this frame
print(msg.video_labels_matrix[frame_idx])

# Initialize group shape generation
gs_gen = GroupShapeGeneration(msg)
vertices, pedidx = gs_gen.generate_group_shape(frame_idx, group_idx)
# The returned vertices for the group shape
print(vertices)
print(pedidx)

# We can also draw it on an image (blank canvas in this case)
canvas = np.zeros((msg.frame_height, msg.frame_width, 3), dtype=np.uint8)
예제 #9
0
def run_test():
    ######################################################
    #
    # Runs a couple sanity checks on the network:
    #
    #    0      0
    #    |\    /|
    #    | 0--0 |
    #    |/    \|
    #    0      0
    #
    # with the obvious correct grouping followed
    # by a slightly non-optimal grouping.
    #
    # Results should be Q1 ~ .357, Q2 ~ .122
    #
    ######################################################

    network = create_test_network()

    print "A = ", network

    group1 = Grouping.create_from_vector([1, 1, 1, 0, 0, 0], network)
    print "S1 = ", group1

    group2 = Grouping.create_from_vector([1, 1, 1, 1, 0, 0], network)
    print "S2 = ", group2

    group3 = Grouping.create_from_vector([1, 1, 0, 1, 0, 0], network)
    print "S3 = ", group3

    Q1 = group1.get_modularity()
    Q2 = group2.get_modularity()
    Q3 = group3.get_modularity()

    print "Modularities:"

    print "Q_1 = ", Q1
    print "Q_2 = ", Q2
    print "Q_3 = ", Q3

    if ((Q1 - .357) < .001) and ((Q2 - .122) < .001):
        print "Test passed!"
    else:
        print "*** Test Failed! ***"
        return False

    print "Trying point patch method..."

    pointpatch = PointPatch(3, 0, 1)

    Q4 = pointpatch.get_modularity(group1, network)

    print "Q_4 = ", Q4

    print group1.get_nodes()
    print group1.get_groups()

    pointpatch.modify(group1)

    print group1.get_nodes()
    print group1.get_groups()

    Q5 = group1.calculate_modularity(network)

    print "Q_5 = ", Q5

    if Q5 - Q2 < .00001:
        print "Test passed!"
    else:
        print "*** Test Failed! ***"
        return False

    return True
예제 #10
0
    def create_graphml_file(self, filename, grouping=None):
        """Creates a graphml file that can be used to generate a visualization
           of the network using the optional partition to color the nodes."""

        file = open(filename + ".graphml", "w")

        if grouping == None:
            grouping = Grouping.create_from_vector([0] * self.number_of_nodes)

        n = self.number_of_nodes
        m = self.number_of_edges

        nodes = grouping.get_nodes()
        non_empty_groups = grouping.get_non_empty_groups()
        c = len(non_empty_groups)
        A = self.adjacency_matrix

        print n, m, c, len(nodes), len(colors)

        # set palettes...
        if c <= len(colors):
            if c == 1:
                palette = ["#FFFFFF"]
            else:
                palette = graphml_colors
        else:
            palette = map(lambda x: "gray" + str(x), range(0, 100))

        # Preamble stuff...
        file.write(
            '<?xml version="1.0" encoding="UTF-8" standalone="no"?>\n<graphml xmlns="http://graphml.graphdrawing.org/xmlns/graphml" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:y="http://www.yworks.com/xml/graphml" xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns/graphml http://www.yworks.com/xml/schema/graphml/1.0/ygraphml.xsd">\n<key for="node" id="d0" yfiles.type="nodegraphics"/>\n<key attr.name="description" attr.type="string" for="node" id="d1"/>\n<key for="edge" id="d2" yfiles.type="edgegraphics"/>\n<key attr.name="description" attr.type="string" for="edge" id="d3"/>\n<key for="graphml" id="d4" yfiles.type="resources"/>\n'
        )

        # Graph header

        file.write(
            """<graph edgedefault="directed" id="G" parse.edges="%d" parse.nodes="%d" parse.order="free">\n"""
            % (m, n))

        # Now we write the nodes...
        for i in xrange(0, n):
            file.write("""<node id="n%d">
      <data key="d0">
        <y:ShapeNode>
          <y:Geometry height="30.0" width="30.0" x="80.56138057743618" y="0.0"/>
          <y:Fill color="%s" transparent="false"/>
          <y:BorderStyle color="#000000" type="line" width="1.0"/>
          <y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="18.701171875" modelName="internal" modelPosition="c" textColor="#000000" visible="true" width="33.0" x="-1.5" y="5.6494140625">%d</y:NodeLabel>
          <y:Shape type="ellipse"/>
        </y:ShapeNode>
      </data>
      <data key="d1"/>
    </node>\n""" % (i, palette[non_empty_groups.index(nodes[i])], i))

        current_edge = 0
        # Now we do the edges...
        for i in range(0, n):
            for j in range(0, i + 1):
                if A[i, j] == 1:
                    file.write("""<edge id="e%d" source="n%d" target="n%d">
      <data key="d2">
        <y:PolyLineEdge>
          <y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
          <y:LineStyle color="#000000" type="line" width="1.0"/>
          <y:Arrows source="none" target="none"/>
          <y:BendStyle smoothed="false"/>
        </y:PolyLineEdge>
      </data>
      <data key="d3"/>
    </edge>\n""" % (current_edge, i, j))
                    current_edge += 1

        # close off tags...
        file.write(""" </graph>
  <data key="d4">
    <y:Resources/>
  </data>
</graphml>""")
        file.close()
예제 #11
0
    def predict(self, positions, velocities):
        params = self._load_parameters()
        
        position_array = []
        velocity_array = []
        num_people = len(positions)

        if num_people == 0:
            raise Exception('People Needed!')

        seq_length = len(positions[0])
        pred_seq_length = 8
        #gp = Grouping(self.msg, seq_length)
        #self.msg = gp.update_message(self.msg)
        for i in range(num_people):
            position_array.append(positions[i][-1])
            velocity_array.append(velocities[i][-1])    
            labels = Grouping.grouping(position_array, velocity_array, params)

        all_labels = np.unique(labels)
        num_groups = len(all_labels)
        all_pred_img_sequences = []
        #gsg = GroupShapeGeneration(self.msg)
        for curr_label in all_labels:
            group_positions = []
            group_velocities = []
            for i, l in enumerate(labels):
                if l == curr_label:
                    group_positions.append(positions[i])
                    group_velocities.append(velocities[i])
            
            vertice_sequence = []
            for i in range(seq_length):
                frame_positions = []
                frame_velocities = []
                for j in range(len(group_positions)):
                    frame_positions.append(group_positions[j][i])
                    frame_velocities.append(group_velocities[j][i])
                vertices = GroupShapeGeneration.draw_social_shapes(frame_positions, 
                                                                   frame_velocities)
                vertice_sequence.append(vertices)

            dgs = DrawGroupShape(self.msg)
            dgs.set_center(vertice_sequence)
            dgs.set_aug(angle=0)
            img_sequence = []
            for i, v in enumerate(vertice_sequence):
                canvas = np.zeros((self.msg.frame_height, self.msg.frame_width, 3), dtype=np.uint8)
                img = dgs.draw_group_shape(v, canvas, center=True, aug=False)
                img_sequence.append(img)

            pimg = ProcessImage(self.msg, img_sequence)
            for i, img in enumerate(img_sequence):
                img_sequence[i] = pimg.process_image(img, debug=False) 

            pred_img_sequence = self._predict_sequence(img_sequence, pred_seq_length)

            group_pred_img_sequence = []
            for i, img in enumerate(pred_img_sequence):
                #img = np.round(np.repeat(img, 3, axis=2)) * 255
                img = np.round(np.repeat(img, 3, axis=2))
                pred_img = pimg.reverse_process_image(img, debug=True)
                pred_img = dgs.reverse_move_center_img(pred_img)
                group_pred_img_sequence.append(pred_img[:, :, 0])
            all_pred_img_sequences.append(group_pred_img_sequence)

        fnl_pred_img_sequence = []
        for i in range(pred_seq_length):
            canvas = np.zeros((self.msg.frame_height, self.msg.frame_width), dtype=np.uint8)
            for j in range(num_groups):
                img = all_pred_img_sequences[j][i]
                #img = np.round(img)
                canvas += img
            fnl_pred_img_sequence.append(np.clip(canvas, 0, 1))

        return fnl_pred_img_sequence