Example #1
0
 def calc_features(self):
     all_features = {
         name: func(self)
         for name, func in feature_funcs.feature_dict()
     }
     # this only retains features that were actually decorated
     # all_features will also contain any features used for calculations
     self.features = {
         k: v
         for (k, v) in all_features.items() if k in feature_funcs.labels()
     }
def classify_all(src,contour_list,train=True):

    print "Classifying..."
    image = Sar_Image(src)

    print "number of contours:",len(contour_list)
    potential_spill_regions = [Region(x) for x in contour_list if cv2.contourArea(x) > MINSIZE]
    print "potential spill regions:",len(potential_spill_regions)


    data_dir = "data"
    region_name_base = "region"
    if not os.path.exists(data_dir):
        os.makedirs(data_dir)

    for num,region in enumerate(potential_spill_regions):
#        print "setting up neighborhood matrix..."
        region.setup_mat(image)
        if train:
            region.save(data_dir,region_name_base+"_"+str(num))
        else:
            region.calc_features()
#        print "cleaning up region data"
        region.cleanup()

    if train:
        print "chipping done"
        sys.exit()

    print "pickling feature vectors"
    # maybe export feature vector so that it doesn't need to be recalculated while testing classifiers
    with open("region_vectors.pkl",'w') as pklfile:
        pickle.dump(potential_spill_regions,pklfile)

    centroids, labels = classify(potential_spill_regions,8)
    print [x for x in feature_funcs.labels()]
Example #3
0
 def to_vector(self):
     return [self.features[L] for L in feature_funcs.labels()]
Example #4
0
def classify_all(src, contour_list, train=True):

    print "Classifying..."
    image = Sar_Image(src)

    print "number of contours:", len(contour_list)
    potential_spill_regions = [
        Region(x) for x in contour_list if cv2.contourArea(x) > MINSIZE
    ]
    print "potential spill regions:", len(potential_spill_regions)

    data_dir = "data"
    region_name_base = "region"
    if not os.path.exists(data_dir):
        os.makedirs(data_dir)

    for num, region in enumerate(potential_spill_regions):
        #        print "setting up neighborhood matrix..."
        region.setup_mat(image)
        if train:
            region.save(data_dir, region_name_base + "_" + str(num))
        else:
            region.calc_features()
#        print "cleaning up region data"
        region.cleanup()

    if train:
        print "chipping done"
        sys.exit()

    print "pickling feature vectors"
    # maybe export feature vector so that it doesn't need to be recalculated while testing classifiers
    with open("region_vectors.pkl", 'w') as pklfile:
        pickle.dump(potential_spill_regions, pklfile)

    centroids, labels = classify(potential_spill_regions, 8)
    print[x for x in feature_funcs.labels()]
    print "centroids:"
    for c in centroids:
        print c

    result_image = cv2.cvtColor(fit_to_8bit(image.to_array()),
                                cv2.cv.CV_GRAY2RGB)

    print "Saving result image..."

    result_loc = "detections.jpg"
    colors = [get_rand_color(0, 255) for x in xrange(max(labels) + 1)]

    for label, contour in zip(labels,
                              [x.coords for x in potential_spill_regions]):
        cv2.drawContours(result_image, [contour], -1, colors[label], 3)


#    # outline oil in red and false alarms in green
#    cv2.drawContours(result_image,oil,-1,(0,255,0),3)
#    cv2.drawContours(result_image,false_alarms,-1,(0,0,255),3)
#    try:
#        assert len(entropy_list) == len(potential_oil_spills)
#    except:
#        print len(entropy_list),len(potential_oil_spills),"are not equal!"
#    for i,e in enumerate(entropy_list):
#        M = cv2.moments(potential_oil_spills[i])
#        if M['m00'] == 0:
#            print "spill has no area..."
#            print cv2.contourArea(potential_oil_spills[i])
#        else:
#            x,y = (int(M['m10']/M['m00']),int(M['m01']/M['m00']))
#            cv2.putText(result_image,str(round(e,3)),(x,y),cv2.FONT_HERSHEY_PLAIN,1,(255,255,255))

# save the result image for visual inspection
    cv2.imwrite(result_loc, result_image)

    print "File is:", result_loc

    # return list of contours that were identified as oil
    return None
 def calc_features(self):
     all_features = {name:func(self) for name,func in feature_funcs.feature_dict()}
     # this only retains features that were actually decorated
     # all_features will also contain any features used for calculations
     self.features = {k:v for (k,v) in all_features.items() if k in feature_funcs.labels()}
 def to_vector(self):
     return [self.features[L] for L in feature_funcs.labels()]