def demo(sess, net, image_name): """Detect object classes in an image using pre-computed object proposals.""" # Load the demo image #im_file = os.path.join(cfg.DATA_DIR, 'demo', image_name+'.jpg') im_file = '/home/makalo/workspace/code/kaggle/Faster-RCNN_TF/data/VOCdevkit2007/VOC2007/JPEGImages/' + image_name + '.jpg' #im_file = os.path.join('/home/corgi/Lab/label/pos_frame/ACCV/training/000001/',image_name) img = cv2.imread(im_file) im = np.copy(img) # Detect all object classes and regress object bounds timer = Timer() timer.tic() scores, boxes = im_detect(sess, net, im) timer.toc() print('Detection took {:.3f}s for ' '{:d} object proposals').format(timer.total_time, boxes.shape[0]) # Visualize detections for each class #im = im[:, :, (2, 1, 0)] # fig, ax = plt.subplots(figsize=(12, 12)) # ax.imshow(im, aspect='equal') CONF_THRESH = 0.8 NMS_THRESH = 0.3 for cls_ind, cls in enumerate(CLASSES[1:]): cls_ind += 1 # because we skipped background cls_boxes = boxes[:, 4 * cls_ind:4 * (cls_ind + 1)] cls_scores = scores[:, cls_ind] dets = np.hstack( (cls_boxes, cls_scores[:, np.newaxis])).astype(np.float32) keep = nms(dets, NMS_THRESH) dets = dets[keep, :] #vis_detections(im, cls, dets, ax,image_name, thresh=CONF_THRESH) im = vis_detections_cv(im, cls, dets, image_name, thresh=CONF_THRESH) for bbox in read_xml(image_name + '.xml'): cv2.rectangle(im, (bbox[0], bbox[1]), (bbox[2], bbox[3]), (0, 0, 255), 3) #comb=np.hstack([im,img]) cv2.imshow('img', im) cv2.waitKey(0)
def vis_detections(im, class_name, dets, ax, image_name, thresh=0.5): """Draw detected bounding boxes.""" inds = np.where(dets[:, -1] >= thresh)[0] if len(inds) == 0: return for i in inds: bbox = dets[i, :4] score = dets[i, -1] ax.add_patch( plt.Rectangle((bbox[0], bbox[1]), bbox[2] - bbox[0], bbox[3] - bbox[1], fill=False, edgecolor='red', linewidth=3.5)) ax.text(bbox[0], bbox[1] - 2, '{:s} {:.3f}'.format(class_name, score), bbox=dict(facecolor='red', alpha=0.5), fontsize=14, color='white') ax.set_title(('{} detections with ' 'p({} | box) >= {:.1f}').format(class_name, class_name, thresh), fontsize=14) for bbox in read_xml(image_name + '.xml'): ax.add_patch( plt.Rectangle((bbox[0], bbox[1]), bbox[2] - bbox[0], bbox[3] - bbox[1], fill=False, edgecolor='blue', linewidth=3.5)) plt.axis('off') plt.tight_layout() plt.draw()
def sent_sentiment(sent): # predict category dep = sent_to_dep(sent) aspects = extractor(dep) raw_count = nltk.FreqDist(aspects) v = load(open('data/vectorizer.pkl', 'rb')) X = v.transform(raw_count) mdl = load(open('clf.jbl', 'rb')) category = mdl.predict(X) # predict sentiment f_path = "./data/Restaurants_Train_v2.xml" reviews = read_xml(f_path, single_cate=True, print_len=False) voc = Vocabularies(reviews) POS = voc.getPOS() feature = Features(data=sent, vocabulary=POS, neg=False, file=True) feature_set = feature.binary() mdl_senti = load(open('classifiers/bayes-m-POS_Binary.jbl', 'rb')) sentiment = mdl_senti.predict(feature_set) return ("This sentence is expressing a " + str(list(sentiment)[0]) + " attitude about " + str(list(category)[0]))
import sys import csv #import pdb import read_xml if (len(sys.argv) == 3): path = sys.argv[1] outfile = sys.argv[2] else: path="/media/mario/DATAPART1/LIDC_xml" outfile="/home/mario/Documents/LIDC_resample_ROIs/Calculate_Centerpoints/output/centers.csv" print("Using", path, "and", outfile, ". optional: path as arg1, output as arg2") outwriter = csv.writer(open(outfile, 'w', newline='')) files = listdir(path) for file in files: if (file.find(".xml") == -1): # if some weird files sneak in, skip them continue #pdb.set_trace() #print((path + "/" + file)) centers_list = read_xml.read_xml((path + "/" + file)) for tup in centers_list: outwriter.writerow(tup)
print("Make Dir: ",_SAVE_ANNO) os.makedirs(_SAVE_ANNO) if not os.path.isdir(_SAVE_IMAGE): print("Make Dir: ",_SAVE_IMAGE) os.makedirs(_SAVE_IMAGE) list_images = [f for f in os.listdir(PATH_IMAGE) if '_' not in f] angle=[90,180,270,'Flip'] for image in tqdm(list_images): root_name=image[:-4] _image=os.path.join(PATH_IMAGE,image) _anno=os.path.join(PATH_ANNO,root_name+'.xml') #read and save image r_img=cv2.imread(_image) data_anno = read_xml(_anno) r_bbox=[] r_obj_name=[] if 'object' not in data_anno: r_bbox=[[0,0,0,0]] for idx,ang in enumerate(angle): if ang =='Flip': temp_img,temp_bbox=Flip_image(r_img.copy(),r_bbox.copy()) else: temp_img,temp_bbox=Rotate_image(ang,r_img.copy(),r_bbox.copy()) _name= root_name+'_'+str(ang) writer = Writer(os.path.join(_SAVE_IMAGE,_name+'.jpg'), r_img.shape[0], r_img.shape[1]) # for idx_1,box in enumerate(temp_bbox): # __label = r_obj_name[idx_1]
from camera import Camera import configparser from DeviceInfo import deviceInfo from read_xml import read_xml from Time import deviceTime from Video import deviceVideo import time if __name__ == '__main__': start_time = time.time() config_ini = configparser.ConfigParser() config_ini.read('config.ini') #print(config_ini['DEFAULT']['program_mode']) dict_list = read_xml(config_ini['DEFAULT']['read_file']) camerasArray = [] for index in range(len(dict_list)): camerasArray.append( Camera(ip=dict_list[index]['ip'], port=int(dict_list[index]['port']), login=dict_list[index]['login'], password=dict_list[index]['password'])) #result = open(config_ini['DEFAULT']['path_to_save_file'], "w", encoding='utf8') log = open(config_ini['DEFAULT']['log_file'], 'w') log.write( "ip\tstatus_code_deviceInfo\tdeviceName\tdeviceID\tmodel\tserialNumber\tfirmwareVersion\t"
help='train and test the model') # parser.add_argument("--run", dest='run', nargs=2, # metavar=('model_name', 'file_name'), # help='run the chosen model on text file') args = parser.parse_args() return args # Call the Parser function args = Parser() if args.train_test: # Import restaurant data # training data f_path = "./data/Restaurants_Train_v2.xml" train = read_xml(f_path, print_len=False, single_cate=True) train_target = [s[2] for s in train] # testing data # testing data f_path = "./data/Restaurants_Test_Gold.xml" test = read_xml(f_path, print_len=False, single_cate=True) test_target = [s[2] for s in test] # Generate vocabularies for creating features for models # Use training data voc = Vocabularies(train) Raw = voc.getRaw() Raw_neg = voc.getRaw_neg() POS = voc.getPOS() POS_neg = voc.getPOS_neg() POS_DT = voc.getPOSnDT()
from read_xml import read_xml from time import clock # create a structure reading the input data struct = read_xml("xml_files/2_3y.xml") t1 = clock() # solve it struct.solve() t2 = clock() print("Total time running struct.solve: %s seconds" % str(t2 - t1)) # output the solution print(struct)
from joblib import dump, load # import pickle import nltk import time import random import argparse from read_xml import read_xml from stanfordNLP import sent_to_dep from aspect_extractor import extractor # read xml file, only include reviews with one category (single_cate) f_path = "./data/Restaurants_Train_v2.xml" reviews = read_xml(f_path, single_cate=True, print_len=False) test_path = './data/Restaurants_Test_Gold.xml' test_reviews = read_xml(test_path, single_cate=True, print_len=False) def p_args(): parser = argparse.ArgumentParser() parser.add_argument('--extract', action='store_true') parser.add_argument('--train', action='store_true') #nargs=2, type=str parser.add_argument('--test', action='store_true') args = parser.parse_args() return args args = p_args()
for c in my_config.ctrl_list: if not c.is_local: if not c.open_conn(my_config.port): cleanexit() print( "\nAll controllers listed in the director's controller list responded and are connected" ) loopcount = 0 while loopcount < my_config.numoftimes: for songname in my_config.songlist: print("\n***********************************") print("Playing: ", songname) print("***********************************\n") try: xml_file = read_xml.read_xml(songname, my_config.ctrl_list) except: print("Error reading ", songname, " xml file") cleanexit() if xml_file.error: cleanexit() # Verify all controllers match director's *_Network.xml file # Also each controller in *_Network.xml is marked as available # in my_config.ctrl_list if not dir_module.attach_ctrls(xml_file.dir_ctrl_list, my_config): cleanexit() for c in my_config.ctrl_list: # calculate slicesize from director:controllers_list if c.available: my_config.slicesize += c.tot_num_ch
except OSError: print("Error reading config.xml") cleanexit() if not my_config.is_director: print("Not Director!!!") cleanexit() #get xml filename to open and read if len(sys.argv) < 2: print("Error!!!! Format: python3 fseq_parse.py <filename>.xml") cleanexit() ###################### # Read the .xml file # ###################### try: xml_file = read_xml.read_xml(sys.argv[1], my_config.ctrl_list) except: print("Error reading ", sys.argv[1], " xml file") cleanexit() if xml_file.error: cleanexit() # Update controller.tot_num_ch (from network.xml file) for controllers that is not us for c in my_config.ctrl_list: for x in xml_file.dir_ctrl_list: if c.index == x[0]: break #found controller in dir_ctrl_list c.tot_num_ch = x[3] for c in my_config.ctrl_list: # calculate slicesize from controllers_list my_config.slicesize += c.tot_num_ch