def __call__(self, args): """ Execute command for adding dive buddy into UDDF file. """ import kenozooid.uddf as ku if args.member: org, number = args.member else: org, number = None, None id = args.id fn, mn, ln = _name_parse(args.name[0]) fout = args.output[0] if os.path.exists(fout): doc = ku.parse(fout).getroot() else: doc = ku.create() ku.create_buddy_data(doc, id=id, fname=fn, mname=mn, lname=ln, org=org, number=number) ku.save(doc, fout)
def __call__(self, args): """ Execute command for removal of dive buddies from UDDF file. """ import kenozooid.uddf as ku query = ku.XP_FIND_BUDDY fin = args.input[0] doc = ku.parse(fin) ku.remove_nodes(doc, query, buddy=args.buddy[0]) ku.save(doc.getroot(), fin)
def _save_dives(drv, time, data, fout): """ Convert raw dive computer data into UDDF format and store it in output file. :Parameters: drv Dive computer driver used to parse raw data. time Time of raw dive computer data fetch. data Raw dive computer data. fout Output file. """ model = drv.version(data) dc_id = ku.gen_id(model) log.debug('dive computer version {}'.format(model)) # convert raw data into dive data and store in output file bdata = kd.BinaryData(datetime=time, data=data) eq = ku.create_dc_data(dc_id, model) dump = ku.create_dump_data(dc_id=dc_id, datetime=time, data=data) p = kc.params(drv.__class__) if 'gas' in p['data']: log.debug('gas data pipeline') with kf.buffer_open(2) as (f_g, f_d): # store gas and dives in two separate files, # then merge the data into one UDDF file save = kf.sink(partial(ku.save, fout=fout)) m = kf.concat(2, partial(cat_gd, equipment=eq, dump=dump), save) kf.send( kf.pipe(drv.dives(bdata), kd.sort_dives, kd.uniq_dives), kf.split( extract_gases(uniq_gases(kf.buffer(f_g, m))), create_dives(kf.buffer(f_d, m), equipment=(dc_id,)) ) ) else: log.debug('simple data pipeline') dives = kf.pipe(drv.dives(bdata), kd.sort_dives, kd.uniq_dives, partial(ku.create_dives, equipment=(dc_id,))) ku.save(ku.create_uddf(equipment=eq, dives=dives, dump=dump), fout)
def enum_dives(files, total=1): """ Enumerate dives with day dive number (when UDDF 3.2 is introduced) and total dive number. :Parameters: files Collection of UDDF files having dives to enumerate. total Start of total dive number. """ fields = ('id', 'date') queries = ( ku.XPath('@id'), ku.XPath('uddf:informationbeforedive/uddf:datetime/text()'), ) parsers = (str, lambda dt: ku.dparse(dt).date()) fnodes = ((f, n) for f in files for n in ku.find(f, ku.XP_FIND_DIVES, nodes=None, dives=None)) data = ((f, ku.dive_data(n, fields, queries, parsers)) for f, n in fnodes) data = ( (item[0], item[1].id, item[1].date) for item in data) # flatten data data = sorted(data, key=itemgetter(2)) # enumerate dives with _day_ dive number and flatten the groups data = ichain( enumerate(g, 1) for k, g in itertools.groupby(data, itemgetter(2))) # enumerate dives with total dive number and transform into # { (f, id) => (n, k) } cache = dict( ((v[0], v[1]), (n, k)) for n, (k, v) in enumerate(data, total)) # update data for f in files: doc = ku.parse(f) for n in ku.XP_FIND_DIVES(doc, nodes=None, dives=None): id = n.get('id') dnn = ku.xp_first(n, 'uddf:informationbeforedive/uddf:divenumber') if dnn is None: pn = ku.xp_first( n, 'uddf:informationbeforedive/uddf:internaldivenumber') if pn is None: pn = ku.xp_first( n, 'uddf:informationbeforedive/uddf:datetime') *_, dnn = ku.create_node('uddf:divenumber') pn.addprevious(dnn) dnn.text = str(cache[f, id][0]) ku.save(doc.getroot(), f)
def __call__(self, args): """ Execute command UDDF file upgrade. """ import kenozooid.uddf as ku import kenozooid.logbook as kl for fin in args.input: try: print('Upgrading {}'.format(fin)) doc = kl.upgrade_file(fin) ku.save(doc.getroot(), fin) except Exception as ex: print('Cannot upgrade file {}'.format(fin), file=sys.stderr) print('Error: {}'.format(ex))
def copy_dives(files, nodes, n_dives, lfile): """ Copy dive nodes to logbook file. The logbook file is created if it does not exist. :Parameters: files Collection of files. nodes Collection of dive ranges. n_dives Numeric range of total dive number, `None` if any dive. lfile Logbook file. """ if os.path.exists(lfile): doc = et.parse(lfile).getroot() else: doc = ku.create() dives = find_dive_nodes(files, nodes, n_dives) gases = find_dive_gas_nodes(files, nodes) _, rg = ku.create_node('uddf:profiledata/uddf:repetitiongroup', parent=doc) gn = ku.xp_first(doc, 'uddf:gasdefinitions') existing = gn is not None if not existing: *_, gn = ku.create_node('uddf:gasdefinitions', parent=doc) with ku.NodeCopier(doc) as nc: copied = False for n in gases: copied = nc.copy(n, gn) is not None or copied if not existing and not copied: p = gn.getparent() p.remove(gn) copied = False for n in dives: copied = nc.copy(n, rg) is not None or copied if copied: ku.reorder(doc) ku.save(doc, lfile) else: log.debug('no dives copied')
def test_dive_add_with_buddy(self): """ Test adding dive with time, depth, duration and buddy """ f = '{}/dive_add_buddy.uddf'.format(self.tdir) doc = ku.create() ku.create_buddy_data(doc, id='b1', fname='F', lname='N') ku.save(doc, f) d = kd.Dive(datetime=datetime(2010, 1, 2, 5, 7), depth=33.0, duration=3540) kl.add_dive(d, f, qbuddies=['b1']) nodes = ku.find(f, '//uddf:dive') dn = next(nodes) self.assertEquals('b1', ku.xp_first(dn, './/uddf:link/@ref'))
def test_dive_add_with_site(self): """ Test adding dive with time, depth, duration and dive site """ f = '{}/dive_add_site.uddf'.format(self.tdir) doc = ku.create() ku.create_site_data(doc, id='s1', location='L1', name='N1') ku.save(doc, f) d = kd.Dive(datetime=datetime(2010, 1, 2, 5, 7), depth=33.0, duration=3102) kl.add_dive(d, f, qsite='s1') nodes = ku.find(f, '//uddf:dive') dn = next(nodes) self.assertEquals('s1', ku.xp_first(dn, './/uddf:link/@ref'))
def __call__(self, args): """ Execute command for adding dive site into UDDF file. """ import kenozooid.uddf as ku id = args.id if args.coords: x, y = args.coords else: x, y = None, None location = args.location[0] name = args.name[0] fout = args.output[0] if os.path.exists(fout): doc = ku.parse(fout).getroot() else: doc = ku.create() ku.create_site_data(doc, id=id, location=location, name=name, x=x, y=y) ku.save(doc, fout)
def add_dive(dive, lfile, qsite=None, qbuddies=()): """ Add new dive to logbook file. The logbook file is created if it does not exist. If dive number is specified and dive cannot be found then ValueError exception is thrown. :Parameters: dive Dive data. lfile Logbook file. qsite Dive site search term. qbuddies Buddy search terms. """ if os.path.exists(lfile): doc = et.parse(lfile).getroot() else: doc = ku.create() if qbuddies is None: qbuddies = [] site_id = None if qsite: nodes = ku.find(lfile, ku.XP_FIND_SITE, site=qsite) n = next(nodes, None) if n is None: raise ValueError('Cannot find dive site in logbook file') if next(nodes, None) is not None: raise ValueError('Found more than one dive site') site_id = n.get('id') buddy_ids = [] log.debug('looking for buddies {}'.format(qbuddies)) for qb in qbuddies: log.debug('looking for buddy {}'.format(qb)) nodes = ku.find(lfile, ku.XP_FIND_BUDDY, buddy=qb) n = next(nodes, None) if n is None: raise ValueError('Cannot find buddy {} in logbook file'.format(qb)) if next(nodes, None) is not None: raise ValueError('Found more than one buddy for {}'.format(qb)) buddy_ids.append(n.get('id')) log.debug('creating dive data') ku.create_dive_data(doc, datetime=dive.datetime, depth=dive.depth, duration=dive.duration, site=site_id, buddies=buddy_ids) ku.reorder(doc) ku.save(doc, lfile)