def __init__(self, filename): BaseReporter.__init__(self) from cStringIO import StringIO self._sio = StringIO() from elementtree.SimpleXMLWriter import XMLWriter self.writer = XMLWriter(self._sio, "utf-8") self.filename = filename self.test_starts = {}
def __init__(self): BaseReporter.__init__(self) from cStringIO import StringIO self._sio = StringIO() try: from elementtree.SimpleXMLWriter import XMLWriter except ImportError: from testoob.compatibility.SimpleXMLWriter import XMLWriter self.writer = XMLWriter(self._sio, "utf-8") self.test_starts = {}
def write_keymap(keymap, filename): contexts = list(set([c for c, a, k in keymap])) actions = list(set([a for c, a, k in keymap])) w = XMLWriter(filename, "utf-8") doc = w.start("keymap") for context in contexts: w.start(context) w.start("keyboard") for c, a, k in keymap: if c == context: w.element("key", a, id=k) w.end() w.end() w.end() w.close(doc)
def main(): fout_name = os.path.join(data_dir, "geo_coordinates.xml") fout = open(fout_name, "w") w = XMLWriter(fout) w.start("root") f_name = os.path.join(data_dir, "crime_geo_coordinates.txt") with open(f_name) as f: for line in f: lat = str(line.split(",")[0]) lng = str(line.split(",")[1]) w.start('dataelement') w.element('text', "") w.start("geodata") w.element("latitude", lat) w.element("longitude", lng) w.end("geodata") w.end("dataelement") w.end("root")
def writeXML(frames, path, args): out_xml = XMLWriter(path) out_xml.declaration() doc = out_xml.start("AnnotationEvaluation") # source information out_xml.element("video", mark_in=str(args[4]), mark_out=str(args[5])) out_xml.element("mapper", os.path.basename(args[1])) out_xml.element("annotation", os.path.basename(args[2])) out_xml.element("comparison", mean_err=str(calMean(frames))) # compared points out_xml.start("frames", total=str((args[5]-1) - (args[4]+1)+1), compared=str(len(frames))) for f in frames.keys(): out_xml.start("frame", num=str(f)) for key in frames[f]: out_xml.start("object", lens=Theta.name(frames[f][key]["lens"]), name=key, err=str(frames[f][key]["err"])) out_xml.element("annotatedCentroid", x=str(frames[f][key]["ann_x"]), y=str(frames[f][key]["ann_y"])) out_xml.element("mappedCentroid", x=str(frames[f][key]["map_x"]), y=str(frames[f][key]["map_y"])) out_xml.end() # object out_xml.end() # frames # clean up out_xml.close(doc)
def write(self, fileName): file = open(fileName, "w") file.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n") self.writer = XMLWriter(file) structure = self.writer.start("score-partwise", {"version": "3.0"}) self.writer.start("part-list") self.writer.start("score-part", {"id": "P1"}) self.writer.element("part-name", "Music") self.writer.end("score-part") self.writer.end("part-list") self.writer.start("part", {"id": "P1"}) for index, measure in enumerate(self.song.get_measures()): #raw_input() self.currentMeasureIndex = index self.currentBeat = 0 self.writer.start("measure", {"number": str(index + 1)}) self.writer.start("attributes") self.writer.element("divisions", str(self.divisions)) if (index == 0): self.writer.start("key") self.writer.element( "fifths", str(KeySignature.key_sig_values[self.song.key.value])) self.writer.element("mode", "major") self.writer.end("key") self.writer.start("time") self.writer.element("beats", str(measure.duration)) self.writer.element("beat-type", "4") self.writer.end("time") self.writer.element("staves", "2") self.writer.start("clef", {"number": "1"}) self.writer.element("sign", "G") self.writer.element("line", "2") self.writer.end("clef") self.writer.start("clef", {"number": "2"}) self.writer.element("sign", "F") self.writer.element("line", "4") self.writer.end("clef") self.writer.end("attributes") self.writeChordSymbol(measure.chords[0]) for note in measure._notes: if note.duration == 1.5 and self.currentBeat % 1 == 0.5: splitNotes = self.splitDottedHalf(note) #for x in splitNotes: #print str(x.pitch)+", "+str(x.duration) self.writeNoteXML(splitNotes[0], 1, "start") self.writeNoteXML(splitNotes[1], 1, "stop") else: if len(measure.harmonies) == 1: if self.currentBeat == measure.harmonies[0][0]: #print "harmony found: "+str(index+1) self.writeNoteXML([note, measure.harmonies[0][1]], 1) else: self.writeNoteXML(note, 1) else: self.writeNoteXML(note, 1) self.writer.start("backup") self.writer.element("duration", str(measure.duration * self.divisions)) self.writer.end("backup") ### OLD #c = measure.chords[0].get_random_voicing(measure.duration) #self.writeNoteXML(c,2) ### NEW newThing = make_chord_measure(measure.chords[0], measure.duration) for note_tuple in newThing: self.writeNoteXML(note_tuple, 2) #### self.writer.end("measure") self.writer.end("part") self.writer.end(structure)
def verify_image_url_substitution(self, sample_image_url, images_substitution_list, images_sizes_list, custom_browse=False): #""" #Required authentication will only be enabled if the following: #1) Service is using the device link authentication scheme #2) Service has returned "requiredAuthentication" attribute as part of the albumArtUri #3) This is a test for high-res album art and not custom browse #""" require_auth = True if self.requires_auth and self.device_link_enabled and not custom_browse else False #inspect the original image reference_image = self.inspect_art_url(sample_image_url, require_auth) if reference_image is None: self.stop("Could not successfully retrieve reference image.") #get the default image size value defined_image_size_value = self.get_best_substitution_value( sample_image_url, images_substitution_list) #check if we have "one sizer" self.verifyIsNotNoneOrStop( defined_image_size_value, "The reference image size ({0}) in the url should be " "found in the substitution list.".format(defined_image_size_value)) #open html output file once with open(self.output_file_name, 'w') as result_file: writer = XMLWriter(result_file, encoding=UTF8) self.verifyTrueOrFailCase( writer, "Framework: XMLWriter constructor should produce a valid file handle to " "write to.") #write the header to results file (html_element, body_element, result_table) = self.write_results_file_header( writer, sample_image_url) #iterate trough the size maps and do the rest of work for item, dimension in zip(images_substitution_list, images_sizes_list): if custom_browse and dimension == '0': log_msg = "Skipping size check for size 0 because the substitution can be anything" self.logger.info(log_msg) self.console_logger.info(log_msg) continue #create a local name that has size in it substitution_local_name = self.create_name_for_local_image_file( str(dimension), sample_image_url) substitution_url_name = self.create_name_for_substitute_url( item, sample_image_url, defined_image_size_value) #inspect the image image = self.inspect_art_url(substitution_url_name, require_auth) if '.svg' not in substitution_url_name: #save file to local directory self.save_substitution_file_to_local_disc( image, substitution_local_name) # explicitly close the opened url image.close() #put an art image in the file self.write_results_file_images(writer, str(dimension), substitution_url_name) if self.image_lib_install_flag == 0: #inspect the file size self.inspect_file_size(substitution_local_name, dimension) #close html output file else: log_msg = "{0} SVG images do not require size validation so skipping test." \ .format(substitution_url_name) self.logger.info(log_msg) self.console_logger.info(log_msg) # Validate SVG file svg_validator = SvgValidator(image, substitution_url_name) well_formed, error_message = svg_validator.validate_well_formed( ) self.verifyTrueOrStop(well_formed, error_message) dtd_conform, error_message = svg_validator.validate_against_dtd( ) self.verifyTrueOrFailCase(dtd_conform, error_message) # explicitly close the opened url image.close() self.write_results_file_closer(writer, html_element, body_element, result_table)
client = oauth.Client(consumer, access_token) resp, content = client.request(url, method=http_method) return content def search(): URL = 'https://api.twitter.com/1.1/search/tweets.json?' Count = '&count=100' DCGeocode = '&geocode=38.895,-77.036389,4.5mi' URL = URL + Count + DCGeocode return oauth_request(URL) if __name__ == "__main__": f = codecs.open('tweet_data.xml', mode='w') w = XMLWriter(f) dictionary = json.loads(search()) w.start('root') for n in range(0, len(dictionary["statuses"])): entry = dictionary["statuses"][n] if (not (entry["geo"] == None)): w.start('dataelement') w.element( 'text', "Status: " + entry["text"].encode('ascii', 'ignore') + '\n') w.start('geodata') w.element('latitude', str(entry["geo"]["coordinates"][0])) w.element('longitude', str(entry["geo"]["coordinates"][1]))
# creates a reference list with download addresses for every text refs.append((base + 'dx/text/474/%s/' % str(counter), entry.lower())) counter += 1 for param in refs: source = param[0] title = param[1] # debug print '[%s]' % title filename = title.replace(' ', '_') for p in string.punctuation.replace('_', ''): filename = filename.replace(p, '') w = XMLWriter('cicero_' + filename + '.xml', encoding='utf-8') xml = w.start("document") # metadata entries of the output files w.element("meta", name="author", value="marcus tullius cicero") w.element("meta", name="title", value=title) w.element("meta", name="source", value=source + '0') # upon checking it no text in PHI attributed to cicero # has more than 500 pages, so this is a safe download limit for x in range(0, 500): lines = [] entry = [] section = source + str(x) reference = base + 'loc/474/' + str(x) + '/0'
) (options, args) = parser.parse_args() # Validate data source uri if not options.file_input: parser.error("Requires input file. Try --help for usage") sys.exit(-1) if len(options.file_input) < 1: parser.error("Requires input file. Try --help for usage") sys.exit(-1) inputFile = options.file_input # Open outputFiles outputFileName = inputFile.split('.')[0] + "_HTMLMarkupRendered.xml" FILE_out_tree = openFile(outputFileName, "w") FILE_out_debug = openFile("RenderHTMLMarkupWithinXMLDebug.xml", "w") xmlWriter = XMLWriter(FILE_out_tree) # constructed for output writeObjects(inputFile) FILE_out_tree.close() FILE_out_debug.close() print("Found %d objects" % countObjects) print("Output is in %s" % outputFileName)
def main(sysargs): args = EasyArgs(sysargs) cfg = EasyConfig(args.config, group="mapper") if "help" in args: usage() return 0 if ["calib", "trainer", "output"] not in args: print "Must specify: -calib, -trainer, -output files" usage() return 1 if len(args) == 1: print "Not enough input CSV files" usage() return 1 if len(args) > 2 and args.map_trainer_mode: print "Too many CSV for trainer-mapping mode" usage() return 1 if "force_side" in args: side = Theta.resolve(args.force_side) if side == Theta.NonDual: print "Invalid force_side argument:", args.force_side usage() return 1 # set side overrides force_button = (side == Theta.Buttonside) force_back = not force_button else: force_button = force_back = False # working vars csvs = {} frame_num = 0 # open source CSV datasets for i in range(1, len(args)): print args[i] csvs[i] = Memset(args[i]) # reel all the files up to their first flash for i in csvs: csvs[i].restrict() if len(csvs[i].row()) < 10: print "CSV file:", args[i], "contains no marker data!\nAborting." return 1 # override csv name if args.map_trainer_mode: csvs[1]._name = cfg.trainer_target # open calib files try: buttonside = Mapper(args.calib, args.trainer, cfg, Theta.Buttonside) backside = Mapper(args.calib, args.trainer, cfg, Theta.Backside) except Exception as e: print e.message return 1 count = {'bts': 0, 'bks': 0, 'rej': 0} # open destination XML with open(args.output, "w") as xmlfile: w = XMLWriter(xmlfile) w.declaration() xmlfile.write( "<!DOCTYPE dataset SYSTEM \"http://storage.gwillz.com.au/eagleeye_v2.dtd\">" ) doc = w.start("dataset") # iterates until all vicon csvs reach eof while True: w.start("frameInformation") w.element("frame", number=str(frame_num)) #iterates through each vicon csv at the current row for i in csvs: c = csvs[i] # determine marker quality try: max_reflectors = int(c.row()[8]) visible_reflectors = int(c.row()[9]) except: print "Error in reading quality at row {}".format(i) return 1 try: # read VICON data x = float(c.row()[2]) y = float(c.row()[3]) z = float(c.row()[4]) # TODO: is this necessary? We never use the object's rotation rx = float(c.row()[5]) ry = float(c.row()[6]) rz = float(c.row()[7]) except: print "Error occurred when converting VICON data at row {}".format( i) return 1 # run projection/mapping on VICON data if backside.isVisible((x, y, z)): points = backside.reprojpts((x, y, z)) side = 'backside' count['bks'] += 1 elif buttonside.isVisible((x, y, z)): points = buttonside.reprojpts((x, y, z)) points[ 0] += 960 # add 960 to x for rightside points (Ricoh video is both frames side by side) side = 'buttonside' count['bts'] += 1 # TODO don't write non visible dots? else: points = [0., 0.] count['rej'] += 1 # TODO: Change DTD and double check with Manjung w.start("object", id=str(i), name=c.name(), lens=Theta.name(side)) w.element("boxinfo", height="99", width="99", x=str(points[0] - 50), y=str(points[1] - 50)) w.element("centroid", x=str(points[0]), y=str(points[1]), rx=str(rx), ry=str(ry), rz=str(rz)) w.element("visibility", visible=str(visible_reflectors), visibleMax=str(max_reflectors)) w.end() w.end() # test end of files eofs = 0 for i in csvs: if csvs[i].eof(): eofs += 1 if len(csvs) == eofs: print "end of all datasets" break # load next vicon frame frame_num += 1 for i in csvs: csvs[i].next() w.close(doc) print "\nbuttonside", count['bts'] print "backside", count['bks'] print "rejected", count['rej'] return 0
def intWriter(path, buttonside=None, backside=None): try: status = "" print 'Generating Intrinsic Parameters to:', path, '...' with open(path, 'w') as int_xml: w = XMLWriter(int_xml) w.declaration() # Camera Intrinsic (Root) root = w.start('dual_intrinsic') num_sides = range(0, 2) #num_sides = range(0, 1) if buttonside is None or backside is None else range(0, 2) for i in num_sides: w.start("Buttonside" if i == 0 else "Backside") if i == 0 and buttonside[0].size > 0 and buttonside[1].size > 0: status += 'Buttonside' camMat = buttonside[0] distCoe = buttonside[1] calibError = buttonside[2] elif i == 1 and backside[0].size > 0 and backside[1].size > 0: if status == "": status += 'Backside' else: status += ' & Backside' camMat = backside[0] distCoe = backside[1] calibError = backside[2] else: w.end() continue # Camera Matrix w.element('CamMat', fx=str(camMat[0][0]), fy=str(camMat[1][1]), cx=str(camMat[0][2]), cy=str(camMat[1][2])) # Distortion Coefficients if (len(distCoe[0]) == 8): # 8 coefficients Rational Model, k4 k5 k6 enabled w.element('DistCoe', k1=str(distCoe[0][0]), k2=str(distCoe[0][1]), p1=str(distCoe[0][2]), p2=str(distCoe[0][3]), k3=str(distCoe[0][4]), k4=str(distCoe[0][5]), k5=str(distCoe[0][6]), k6=str(distCoe[0][7])) elif (len(distCoe[0]) == 12): # 12 coefficients Prism Model, c1, c2, c3, c4 enabled, new in OpenCV 3.0.0 w.element('DistCoe', k1=str(distCoe[0][0]), k2=str(distCoe[0][1]), p1=str(distCoe[0][2]), p2=str(distCoe[0][3]), k3=str(distCoe[0][4]), k4=str(distCoe[0][5]), k5=str(distCoe[0][6]), k6=str(distCoe[0][7]), c1=str(distCoe[0][8]), c2=str(distCoe[0][9]), c3=str(distCoe[0][10]),c4=str(distCoe[0][11])) else: w.element('DistCoe', k1=str(distCoe[0][0]), k2=str(distCoe[0][1]), p1=str(distCoe[0][2]), p2=str(distCoe[0][3]), k3=str(distCoe[0][4])) # Error values if len(calibError) > 0: w.element('Error', rms=str(calibError['rms']), total=str(calibError['tot_err']), arth=str(calibError['arth_err'])) w.end() #buttonside/backside w.close(root) print status, 'Intrinsic calibration has been generated successfully.' except Exception as e: # keep it bubbling up, to catch in main() raise Exception("{}: {}\n'ERROR: occurred in writing intrinsic XML file.'".format(type(e), e.message))
def generate(self): ''' XML-File fuer Dale erzeugen ''' context = self.context # Layout dc = IZopeDublinCore(context, None) if dc: creator = dc.creators[0] principal = Principal(creator, creator, creator) else: principal = self.request.principal principal = self.request.principal #stammdaten = IStammdaten(self.request.principal) adresse = principal.getAdresse() traegeroid = u'' oid = str(adresse['oid']).strip() if len(oid) == 9: oid = '000000' + oid #session = Session() #s = select([traegeruaz], and_(traegeruaz.c.trgrcd == str(oid))) #res = session.execute(s).fetchone() print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" print oid print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" #if res: # traegeroid = str(res['trgmnr']).strip() xml_file = self.base_file io = StringIO() xml = XMLWriter(io, encoding='utf-8') bv = '' iknr = '' #Ik-Nummer des Mitgliedsunternehmens muss noch ermittelt werden ikdav = '99999999' iknum = '120692198' date = strftime("%d.%m.%Y", localtime()) time = strftime("%H:%M", localtime()) unb_6 = '0' docid = context.__name__ if len(docid.split('-')) == 2: unb_6 = docid.split('-')[1] # Daten schreiben suaz = xml.start('suaz_file') xml.start("unb") xml.element("unb_2", iknr) xml.element("unb_3", ikdav) xml.element("unb_4", date) xml.element("unb_5", '00:00') xml.element("unb_6", unb_6) xml.element("unb_9", '01') xml.end("unb") xml.start("unh") xml.element("unh_2", 'SUAZ:09:1:01:UV') xml.element("unh_3", iknum) xml.end("unh") xml.start("uvt") xml.element("uvt_1", bv) xml.element("uvt_2", iknum) xml.element("uvt_3", date) uvt_4 = "%s.%s.20%s" % (context.unfdatum[0:2], context.unfdatum[3:5], context.unfdatum[8:11]) xml.element("uvt_4", uvt_4) xml.element("uvt_5", '') xml.end("uvt") xml.start("vin") xml.element("vin_1", context.prsname[:30]) xml.element("vin_2", context.prsvor[:30]) xml.element("vin_3", context.prssta) xml.element("vin_4", context.prssex[0]) xml.element("vin_5", context.ikzplz[:6]) xml.element("vin_6", context.ikzort[:30]) vin_7 = "%s %s" % (context.ikstr, context.iknr) xml.element("vin_7", vin_7[:46]) xml.element("vin_8", '') if int(context.prsgeb[6:11]) > int(strftime("%Y", localtime())): vin_9 = "%s.%s.19%s" % (context.prsgeb[0:2], context.prsgeb[3:5], context.prsgeb[8:10]) else: vin_9 = "%s.%s.%s" % (context.prsgeb[0:2], context.prsgeb[3:5], context.prsgeb[6:10]) xml.element("vin_9", vin_9) xml.element("vin_10", '') xml.element("vin_11", '') xml.end("vin") xml.start("ufb") # Mitglied if False: pass #if IMitglied.providedBy(self.request.principal): # enummer = context.enummer # if enummer == None: # enummer = '' # name1 = context.ename # name2 = context.estrasse + ' ' + enummer # name3 = context.eplz + ' ' + context.eort # stra = context.estrasse + ' ' + enummer # plz = context.eplz # ort = context.eort # adressetraeger = adresse['iknam1'] # WICHTIG Hier noch die weitere Adresse einfuegen und testen # Einrichtung else: if 'iknam1' in adresse: name1 = adresse['iknam1'] name2 = adresse['iknam2'] name3 = adresse['iknam3'] stra = adresse['ikstr'] + adresse['ikhnr'] plz = str(adresse['ikhplz']) ort = adresse['ikhort'] else: name1 = "fehlt:adresse['iknam1']" name2 = "fehlt:adresse['iknam2']" name3 = "fehlt:adresse['iknam3']" stra = "fehlt:adresse['ikstr'] + adresse['ikhnr']" plz = "fehlt:str(adresse['ikhplz'])" ort = "fehlt:adresse['ikhort']" adressetraeger = context.traeger # Daten einfuegen... ufb_1 = '%s %s %s %s' % (name1, name2, name3, adressetraeger) xml.element("ufb_1", ufb_1[:200]) xml.element("ufb_2", '') xml.element("ufb_3", plz) xml.element("ufb_4", ort[:30]) xml.element("ufb_5", stra[:46]) xml.element("ufb_6", '') xml.element("ufb_7", '') xml.end("ufb") xml.start("eti") xml.element("eti_1", date) xml.element("eti_2", time) xml.end("eti") xml.start("ksd") xml.element("ksd_1", 'unbekannt') xml.element("ksd_5", '1') xml.element("ksd_2", '') xml.element("ksd_3", '') xml.element("ksd_4", '') xml.end("ksd") xml.start("ufd") xml.element("ufd_1", context.unfzeit) xml.element("ufd_2", context.uadbavon) xml.element("ufd_3", context.uadbabis) xml.end("ufd") xml.start("ebh") # ebh_1 Pseudodatum eigefuegt 30.11.2011 xml.element("ebh_1", '99.99.9999') if context.unfeba1: xml.element("ebh_2", context.unfeba1[:30]) else: xml.element("ebh_2", '') xml.end("ebh") xml.start("dis") dis_1 = "%s %s" % (context.diavkt, context.diaadv) if context.prstkz == 'ja': dis_1 = u'tödlicher Unfall: %s' % dis_1 xml.element("dis_1", dis_1[:100]) xml.element("dis_4", '') xml.element("dis_3", '') xml.end("dis") xml.start("afb") if context.unfae1: eingestellt = context.unfae1 else: eingestellt = ' ' toedlich = context.prstkz if eingestellt == 'nein': afb_1 = '0' afb_4 = '' elif toedlich == 'ja': afb_1 = '1' afb_4 = '' else: afb_1 = '1' xml.element("afb_1", afb_1) if 'sofort' in eingestellt: afb_4 = uvt_4 elif 'spaeter' in eingestellt: afb_4 = '%s.%s.20%s' % (context.unfaedatum[0:2], context.unfaedatum[3:5], context.unfaedatum[8:11]) #In der Unfallanzeige kann das Datum in afb_4 nicht verarbeitet werden #afb_4 = '%s.%s.20%s %s' % (context.unfaedatum[0:2],context.unfaedatum[3:5], # context.unfaedatum[8:11], context.unfaezeit) xml.element("afb_4", afb_4) arbeitsfaehig = context.unfwa1 if arbeitsfaehig == 'nein' or arbeitsfaehig == None: afb_7 = '' else: if context.unfwax == None: afb_7 = '' else: afb_7 = '%s.%s.%s' % (context.unfwax[0:2], context.unfwax[3:5], context.unfwax[6:11]) xml.element("afb_7", afb_7) xml.element("afb_8", '') xml.end("afb") xml.start("abs") xml.element("abs_1", context.unfus2[:81]) xml.element("abs_2", '') xml.element("abs_3", '') xml.element("abs_4", 'Extranet') xml.element("abs_5", '') xml.element("abs_6", context.anspfon) xml.element("abs_7", context.anspname) xml.end("abs") xml.start("uaz") xml.element("uaz_1", context.unfhg1[:3000]) uaz_2 = context.unfhg2 if uaz_2 == 'des Versicherten': uaz_2 = '1' else: uaz_2 = '2' xml.element("uaz_2", uaz_2) xml.element("uaz_3", context.unfort[:200]) if context.prstkz == 'ja': uaz_4 = '1' else: uaz_4 = '2' xml.element("uaz_4", uaz_4) xml.element("uaz_5", context.unfkn1) if context.unfkn2 == 'ja': uaz_6 = '1' else: uaz_6 = '2' xml.element("uaz_6", uaz_6) xml.element("uaz_7", 'nein') xml.element("uaz_8", '1') xml.element("uaz_9", '0') xml.element("uaz_10", context.prsvtr) xml.element("uaz_11", oid) xml.end("uaz") xml.close(suaz) io.seek(0) # utf-8 wurde auskommentiert, da cusa nur Dateien mit header iso-8859-1 verarbeiten kann (das verstehe wer will) # xml_file.write(etree.tostring(etree.parse(io), pretty_print=True, encoding="utf-8", xml_declaration=True)) xml_file.write( etree.tostring(etree.parse(io), pretty_print=True, encoding="ISO-8859-1", xml_declaration=True)) xml_file.close() return io
def main(sysargs): # settings args = EasyArgs(sysargs) cfg = EasyConfig(args.config, group="trainer") max_clicks = args.clicks or cfg.default_clicks window_name = "EagleEye Trainer" if "help" in args: usage() return 0 # grab marks from args if len(args) > 5: mark_in = args[4] mark_out = args[5] # test integer-ness try: int(mark_in) and int(mark_out) except: usage() return 1 elif len(args) > 3: ret, mark_in, mark_out = marker_tool(args[1], cfg.buffer_size, window_name) if not ret: print "Not processing - exiting." return 1 else: usage() return 1 ## clicking time! cropped_total = mark_out - mark_in print "video cropped at:", mark_in, "to", mark_out, "- ({} frames)".format( cropped_total) # clicking function def on_mouse(event, x, y, flags, params): # left click to mark if event == cv2.EVENT_LBUTTONDOWN: params['pos'] = (x, y) params['status'] = Status.record # right click to skip elif event == cv2.EVENT_RBUTTONDOWN: params['status'] = Status.skip # working variables params = {'status': Status.wait, 'pos': None} write_xml = False textstatus = "" dataQuality = 0 # 0 = good, >0 = bad/potentially bad # default right side (buttonside) if cfg.dual_mode: lens = Theta.Right trainer_points = {Theta.Right: {}, Theta.Left: {}} else: # both sides otherwise lens = Theta.Both trainer_points = {Theta.Both: {}} print "Minimum reflectors: {} | Ignore Negative xyz: {}".format( cfg.min_reflectors, cfg.check_negatives) # load video (again) in_vid = BuffSplitCap(args[1], crop=(0, 0, 0, 0), rotate=BuffSplitCap.r0, buff_max=cfg.buffer_size) in_vid.restrict(mark_in, mark_out) # load csv (with appropriate ratio) in_csv = Memset(args[2]) in_csv.restrict() in_csv.setRatio(cropped_total) # test for marker data if len(in_csv.row()) < 10: print "This CSV contains no marker data!\nAborting." return 1 # status print "" print "Writing to:", args[3] print "Number of clicks at:", max_clicks print "" cv2.namedWindow(window_name) cv2.setMouseCallback(window_name, on_mouse, params) # grab clicks (Process 2) while in_vid.isOpened(): frame = in_vid.frame(side=lens) sys.stdout.write(in_vid.status() + " | Clicks {} / {}\r".format( len(trainer_points[lens]), max_clicks)) sys.stdout.flush() # prepare CSV data, click data tx = float(in_csv.row()[2]) ty = float(in_csv.row()[3]) tz = float(in_csv.row()[4]) rx = float(in_csv.row()[5]) ry = float(in_csv.row()[6]) rz = float(in_csv.row()[7]) # data quality status visible = int(in_csv.row()[9]) max_visible = int(in_csv.row()[8]) # status text to write textrow = "VICON - x: {:.4f} y: {:.4f} z: {:.4f} | rx: {:.4f} ry: {:.4f} rz: {:.4f}".format( tx, ty, tz, rx, ry, rz) textquality = "Visible: {} , Max Visible: {}".format( visible, max_visible) textstatus = "{} | {}/{} clicks".format(in_vid.status(), len(trainer_points[lens]), max_clicks) if lens == Theta.Left: textstatus += " - back side" elif lens == Theta.Right: textstatus += " - button side" #else none, no lens split # if data is qualified bad, reduce timeout by one if dataQuality > 0: dataQuality -= 1 if dataQuality == 0: dataStatus = " - Good data!!" dataStatus_colour = (0, 255, 0) # green else: dataStatus = " - Potentially bad data (wait {})".format( dataQuality) dataStatus_colour = (0, 255, 255) # yellow # Data tests # values must be above 0 and minimum reflectors if (cfg.check_negatives and (tx <= 0 or ty <= 0 or tz <= 0)) \ or visible < cfg.min_reflectors: dataStatus = " - Bad data!!" dataStatus_colour = (0, 0, 255) # red if cfg.ignore_baddata: dataStatus += " Ignored." dataQuality = 1 + cfg.quality_delay # draw the trainer dot (if applicable) if in_vid.at() in trainer_points[lens]: cv2.circle(frame, trainer_points[lens][in_vid.at()][0], 1, cfg.dot_colour, 2) cv2.circle(frame, trainer_points[lens][in_vid.at()][0], 15, cfg.dot_colour, 1) # draw text and show displayText(frame, textrow, top=True) displayText(frame, textquality) displayText(frame, textstatus) displayText(frame, dataStatus, endl=True, colour=dataStatus_colour) cv2.imshow(window_name, frame) # pause for input while params['status'] == Status.wait: key = cv2.waitKey(10) if key == Key.esc: params['status'] = Status.stop elif key == Key.enter: write_xml = True params['status'] = Status.stop elif key == Key.right: params['status'] = Status.skip elif key == Key.left: params['status'] = Status.back elif key == Key.backspace: params['status'] = Status.remove elif Key.char(key, '1') and cfg.dual_mode: params['status'] = Status.still lens = Theta.Left elif Key.char(key, '2') and cfg.dual_mode: params['status'] = Status.still lens = Theta.Right # catch exit status if params['status'] == Status.stop: print "\nprocess aborted!" break # write data if params['status'] == Status.record \ and len(trainer_points[lens]) != max_clicks: # TODO: does this disable recording clicks on the last frame if dataQuality == 0: trainer_points[lens][in_vid.at()] = (params['pos'], in_csv.row()[2:5], in_csv.row()[8:10]) params['status'] = Status.skip # or remove it elif params['status'] == Status.remove \ and in_vid.at() in trainer_points[lens]: del trainer_points[lens][in_vid.at()] print "\nremoved dot" # load next csv frame if params['status'] == Status.skip: if in_vid.next(): in_csv.next() else: write_xml = True print "\nend of video: {}/{}".format(in_vid.at() - 1, mark_out - 1) break # or load previous csv frame elif params['status'] == Status.back: if in_vid.back(): in_csv.back() # reset status params['status'] = Status.wait # clean up cv2.destroyAllWindows() ## write xml if write_xml: out_xml = XMLWriter(args[3]) out_xml.declaration() doc = out_xml.start("TrainingSet") # source information out_xml.start("video", mark_in=str(mark_in), mark_out=str(mark_out)) out_xml.data(os.path.basename(args[1])) out_xml.end() out_xml.element("csv", os.path.basename(args[2])) # training point data for lens in trainer_points: if lens == Theta.Right: out_xml.start("buttonside", points=str(len(trainer_points[lens]))) elif lens == Theta.Left: out_xml.start("backside", points=str(len(trainer_points[lens]))) else: # non dualmode out_xml.start("frames", points=str(len(trainer_points[lens]))) for i in trainer_points[lens]: pos, row, markers = trainer_points[lens][i] x, y = pos out_xml.start("frame", num=str(i)) out_xml.element("plane", x=str(x), y=str(y)) out_xml.element("vicon", x=str(row[0]), y=str(row[1]), z=str(row[2])) out_xml.element("visibility", visibleMax=str(markers[0]), visible=str(markers[1])) out_xml.end() out_xml.end() # frames # clean up out_xml.close(doc) print "Data was written." else: print "No data was written" print "\nDone." return 0
# -*- coding: utf-8 -*- """ Created on Sun Dec 28 21:43:03 2014 @author: jmalinchak """ from elementtree.SimpleXMLWriter import XMLWriter import sys w = XMLWriter(sys.stdout) html = w.start("html") w.start("head") w.element("title", "my document") w.element("meta", name="generator", value="my application 1.0") w.end() w.start("body") w.element("h1", "this is a heading") w.element("p", "this is a paragraph") w.start("p") w.data("this is ") w.element("b", "bold") w.data(" and ") w.element("i", "italic") w.data(".") w.end("p")
correction = tokens[2].split("||")[0] correction = ' '.join(nltk.word_tokenize(correction)) required = tokens[3] annotator = int(tokens[5]) annotators[s].add(annotator) if c_start == -1 and c_end == -1 and etype.lower() == "noop": # Noop --> empty set of edits (source is right) pass else: ref_annot[s].append([c_start, c_end, etype, correction, annotator]) f_in.close() # Create the output XML if not out_file: out_file = in_file + ".ieval.xml" f_out = XMLWriter(out_file, "UTF-8") f_out.declaration() f_out.start("scripts") f_out.start("script", id="1") # Assume only one script # Do clustering for s in xrange(len(ref_annot)): sys.stdout.write("\rSentence %s..." % (s+1)) sys.stdout.flush() clusters = [] # Sort edits from longest to shortest range ref_annot[s].sort(key=lambda x: x[0] - x[1]) for e in ref_annot[s]: # Go through each edit merge = False for c in clusters:
def intWriter(path, camMat, distCoe, calibError={}): try: print 'Generating Intrinsic Parameters to:', path, '...' with open(path, 'w') as int_xml: w = XMLWriter(int_xml) w.declaration() # Camera Intrinsic (Root) root = w.start('StdIntrinsic') # Camera Matrix w.element('CamMat', fx=str(camMat[0][0]), fy=str(camMat[1][1]), cx=str(camMat[0][2]), cy=str(camMat[1][2])) # Distortion Coefficients if (len(distCoe[0]) == 8 ): # 8 coefficients Rational Model, k4 k5 k6 enabled w.element('DistCoe', k1=str(distCoe[0][0]), k2=str(distCoe[0][1]), p1=str(distCoe[0][2]), p2=str(distCoe[0][3]), k3=str(distCoe[0][4]), k4=str(distCoe[0][5]), k5=str(distCoe[0][6]), k6=str(distCoe[0][7])) elif ( len(distCoe[0]) == 12 ): # 12 coefficients Prism Model, c1, c2, c3, c4 enabled, new in OpenCV 3.0.0 w.element('DistCoe', k1=str(distCoe[0][0]), k2=str(distCoe[0][1]), p1=str(distCoe[0][2]), p2=str(distCoe[0][3]), k3=str(distCoe[0][4]), k4=str(distCoe[0][5]), k5=str(distCoe[0][6]), k6=str(distCoe[0][7]), c1=str(distCoe[0][8]), c2=str(distCoe[0][9]), c3=str(distCoe[0][10]), c4=str(distCoe[0][11])) else: w.element('DistCoe', k1=str(distCoe[0][0]), k2=str(distCoe[0][1]), p1=str(distCoe[0][2]), p2=str(distCoe[0][3]), k3=str(distCoe[0][4])) # Error values if len(calibError) > 0: w.element('Error', rms=str(calibError['rms']), total=str(calibError['tot_err']), arth=str(calibError['arth_err'])) w.close(root) print 'Intrinsic calibration has been generated successfully.' except Exception as e: print 'ERROR: occurred in writing intrinsic XML file.' raise e # keep it bubbling up, to catch in main()