def intersectingclusters(all_clusters):
    n = len(all_clusters)
    to_merge_with = np.zeros((n, n))
    to_merge_with = to_merge_with.tolist()
    for i in range(n):
        hull1 = gps.getconvexhull(all_clusters[i])
        for j in range(n):
            if i == j:
                continue
            hull2 = gps.getconvexhull(all_clusters[j])
            to_merge_with[i][j] = 1 if gps.dohullsintersect(hull1, hull2) else 0
    return to_merge_with
def createPolygon(cluster_points, cluster_no):
    try:
        boundary_vertices = gps.getconvexhull(cluster_points)
    except:
        print "There was an error trying to get the hull for cluster no. " + str(cluster_no) + ", skipping"
        return ""
    toWrite = "var cluster_" + str(cluster_no) + " = [\n"
    for vertex in boundary_vertices:
        toWrite += "{lat:" + str(vertex[0]) + ", lng:" + str(vertex[1]) + "},\n"
    toWrite += "];\n\n"
    toWrite += (
        "var cluster_" + str(cluster_no) + "_c = new google.maps.Polygon({\n"
        "paths: cluster_" + str(cluster_no) + ", \n "
        "strokeColor: '#00FF00', \n"
        "strokeOpacity: 0.8, \n"
        "strokeWeight: 2, \n"
        "fillColor: '#00FF00', \n"
        "fillOpacity: 0.35\n});\ncluster_" + str(cluster_no) + "_c.setMap(map);\n\n"
    )
    return toWrite
 def get_travelling_and_stationary_clusters(self, eps_list=range(20, 51, 10), min_sample_list=[3, 5, 7]):
     '''
     for each data point within the participant data, distinguish between the travelling, and non-travelling data.
     Once all the travelling clusters, and non-travelling points have been extracted perform the DBSCAN clustering
     on the non-travelling points to obtain the stationary clusters, and noise markers.
     :return:
     '''
     for data_sample in self.__participant_data:
         try:
             self.__internal_location_info[(data_sample[SurveyConstants.PATIENT_ID],
                                            data_sample[SurveyConstants.CONDITION_ID],
                                            data_sample[SurveyConstants.SESSION_ID])] = \
                 LocationContext.LOCATION_CONTEXT_VALUES[data_sample[SurveyConstants.LOCATION_CONTEXT]]
             gps_coords_clean = pr.getcleangpsdata(data_sample[34], remove_duplicates=True,
                                                   pid=data_sample[0], cid=data_sample[1], sid=data_sample[2])
             if not gps_coords_clean:
                 continue
         except IOError:
             self.__error_files += 1
             continue
         # TODO: the speed limit has to be decided, are people walking also considered travelling?
         travel_result = self.find_travelling(gps_coords_clean)
         if travel_result[0]:
             # travel_clusters.append(gps_coords_clean)
             if not 0 == len(travel_result[1]):
                 self.__travel_clusters.append(travel_result[1])
             if not 0 == len(travel_result[2]):
                 self.__stationary_points += travel_result[2]
         else:
             self.__stationary_points += gps_coords_clean
     '''
     since all the stationary points are being collected for a given participants, the hull intersection functions
     never get called.
     '''
     print 'collected all points, clustering, eps_list:', eps_list, ', min_sample_list:', min_sample_list
     sc_nz = clusters.getdbscanclusters(self.__stationary_points, eps_list, min_sample_list)
     print 'done'
     if sc_nz is not None:
         if not ([] == sc_nz['sc']):
             self.__stationary_clusters = sc_nz['sc']
         if not ([] == sc_nz['nz']):
             self.__noise_markers = sc_nz['nz']
     print 'stationary clusters: ' + str(len(self.__stationary_clusters)) + ', travel clusters: ' + str(
         len(self.__travel_clusters))
     x = 0
     for cluster_points in self.__stationary_clusters:
         x += 1
         try:
             boundary_points = gps.getconvexhull(cluster_points)
         except:
             print 'Error getting the convex hull of the cluster. cluster #', x
             self.__stationary_cluster_label.append("Error,C-"+str(x))
             continue
         self.__stationary_cluster_boundaries.append(boundary_points)
         cluster_point_types = []
         for cluster_point in cluster_points:
             cluster_point_types.append(self.__internal_location_info[(cluster_point[-3],
                                                                       cluster_point[-2],
                                                                       cluster_point[-1])])
         label_counts = collections.Counter(cluster_point_types)
         most_common_label = label_counts.most_common(1)
         self.__stationary_cluster_label.append(most_common_label[0][0])
     return self.__travel_clusters, self.__stationary_clusters, self.__stationary_cluster_boundaries, \
            self.__stationary_cluster_label, self.__noise_markers, self.__error_files, self.__stationary_points