def test_read_cached_gfile(mock_func): mock_func.side_effect = ValueError() gc_ = GarminCache(pickle_file='%s/temp.pkl.gz' % CURDIR, cache_directory='%s/run/cache' % CURDIR, use_sql=False) gsum = GarminSummary(FITFILE) gfile = gsum.read_file() gc_.write_cached_gfile(gfile) gfile_new = gc_.read_cached_gfile(gfbname=gfile.filename) assert gfile_new is not False
def read_garmin_file(fname, msg_q=None, options=None, s3_files=None): """ read single garmin file """ from garmin_app.garmin_cache import pool from garmin_app.garmin_cache import GarminCache from garmin_app.garmin_parse import GarminParse from garmin_app.garmin_report import GarminReport from garmin_app.garmin_corrections import list_of_corrected_laps if options is None: options = {'cache_dir': CACHEDIR, 'do_update': False} tcx_job = pool.submit(convert_fit_to_tcx, fname) gpx_job = pool.submit(convert_gmn_to_gpx, fname) cache_dir = options['cache_dir'] corr_list_ = list_of_corrected_laps(json_path='%s/run' % cache_dir) pickle_file_ = '%s/run/garmin.pkl.gz' % cache_dir cache_dir_ = '%s/run/cache' % cache_dir avro_file = '%s/%s.avro' % (cache_dir, os.path.basename(fname)) if not os.path.exists(avro_file): s3_cache_files = get_list_of_keys('garmin-scripts-cache-ddboline') s3_key = os.path.basename(avro_file) if s3_key in s3_cache_files: download_from_s3('garmin-scripts-cache-ddboline', s3_key, avro_file) cache_ = GarminCache(cache_directory=cache_dir_, corr_list=corr_list_) _temp_file = None if not options['do_update']: _temp_file = cache_.read_cached_gfile(gfbname=os.path.basename(fname)) if _temp_file: _gfile = _temp_file else: _gfile = GarminParse(fname, corr_list=corr_list_) _gfile.read_file() if not _gfile: return False if options['do_update'] or not os.path.exists(avro_file): cache_.write_cached_gfile(garminfile=_gfile) _report = GarminReport(cache_obj=cache_, msg_q=msg_q, gfile=_gfile) print(_report.file_report_txt()) _report.file_report_html(options=options) for fn0, fn1 in ((tcx_job.result(), '/tmp/temp.tcx'), (gpx_job.result(), '/tmp/temp.gpx')): if fn0 and os.path.exists(fn0): os.rename(fn0, fn1) return True
def test_garmin_cache_makedir(mock_os): mock_path = mock.MagicMock() mock_os.path = mock_path mock_path.exists.return_value = False gcache = GarminCache(pickle_file='%s/temp.pkl.gz' % CURDIR, cache_directory='%s/run/cache' % CURDIR, use_sql=False) mock_os.makedirs.assert_called_once_with('%s/run/cache' % CURDIR)
def __init__(self, sql_string='', pickle_file='', cache_directory='', corr_list=None, garmin_cache=None, summary_list=None): if garmin_cache is not None: self.garmin_cache = garmin_cache else: self.garmin_cache = GarminCache( pickle_file=pickle_file, cache_directory=cache_directory, corr_list=corr_list, cache_read_fn=self.read_sql_table, cache_write_fn=self.write_sql_table, use_sql=False) self.sql_string = sql_string self.summary_list = {} if isinstance(summary_list, dict): self.summary_list.update(summary_list) self.engine = create_engine(self.sql_string, echo=False) self.create_table()
def test_garmin_cache_pickle_error(mock_pickle): b = bytes([0]) s = str('TEST') mock_pickle.load.side_effect = UnicodeDecodeError(s, b, 0, 1, s) gfile = GarminParse(FITFILE) gfile.read_file() gcache = GarminCache(pickle_file='%s/temp.pkl.gz' % CURDIR, cache_directory='%s/run/cache' % CURDIR, use_sql=False) write_pickle_object_to_file(gfile, gcache.pickle_file) del gfile result = read_pickle_object_in_file(gcache.pickle_file) assert result is None
def test_pickle_fit(self): """ test cache dump pickle to dataframe """ gfile = GarminParse(FITFILE) gfile.read_file() gcache = GarminCache(pickle_file='%s/temp.pkl.gz' % CURDIR, cache_directory='%s/run/cache' % CURDIR, use_sql=False) write_pickle_object_to_file(gfile, gcache.pickle_file) del gfile gfile = read_pickle_object_in_file(gcache.pickle_file) gdf = GarminDataFrame(garmin_class=GarminPoint, garmin_list=gfile.points).dataframe gdf.to_csv('temp.fit.point.csv', index=False, float_format='%.4f') md5 = md5_command('cat temp.fit.point.csv | md5sum') self.assertEqual(md5, '9b5dd53949c7f9555d97c4a95be1934e')
def do_summary(directory_, msg_q=None, options=None): """ produce summary report """ from garmin_app.garmin_cache import (GarminCache, read_pickle_object_in_file, write_pickle_object_to_file) from garmin_app.garmin_corrections import list_of_corrected_laps from garmin_app.garmin_corrections_sql import write_corrections_table from garmin_app.garmin_report import GarminReport if options is None: options = {'cache_dir': CACHEDIR} cache_dir = options['cache_dir'] corr_list_ = list_of_corrected_laps(json_path='%s/run' % cache_dir) pickle_file_ = '%s/run/garmin.pkl.gz' % cache_dir cache_dir_ = '%s/run/cache' % cache_dir cache_ = GarminCache(cache_directory=cache_dir_, corr_list=corr_list_, use_sql=True, do_tunnel=options.get('do_tunnel', False), check_md5=options.get('do_check', False)) if 'build' in options and options['build']: summary_list_ = cache_.get_cache_summary_list(directory='%s/run' % cache_dir, options=options) cache_ = GarminCache(pickle_file=pickle_file_, cache_directory=cache_dir_, corr_list=corr_list_, use_sql=False, check_md5=True, cache_read_fn=read_pickle_object_in_file, cache_write_fn=write_pickle_object_to_file, do_tunnel=options.get('do_tunnel', False)) cache_.cache_write_fn(cache_.cache_summary_file_dict) write_corrections_table(corr_list_, do_tunnel=options.get('do_tunnel', False)) return summary_list_ summary_list_ = cache_.get_cache_summary_list(directory=directory_, options=options) if not summary_list_: return False _report = GarminReport(cache_obj=cache_, msg_q=msg_q) print(_report.summary_report(summary_list_.values(), options=options)) return True
def garmin_parse_arg_list(args, options=None, msg_q=None): """ parse command line arguments """ from garmin_app.garmin_cache import GarminCache from garmin_app.garmin_cache_sql import write_postgresql_table from garmin_app.garmin_corrections import list_of_corrected_laps from garmin_app.garmin_corrections_sql import write_corrections_table if options is None: options = {'cache_dir': CACHEDIR} cache_dir = options['cache_dir'] s3_files = get_list_of_keys() gdir = set() for arg in args: if arg == 'build': if msg_q is not None: return options['build'] = True elif arg == 'backup': if msg_q is not None: return fname = '%s/garmin_data_%s.tar.gz'\ % (cache_dir, datetime.date.today().strftime('%Y%m%d')) run_command('cd %s/run/ ; ' % cache_dir + 'tar zcvf %s gps_tracks/ ' % fname + 'garmin_corrections.json') if os.path.exists('%s/public_html/backup' % os.getenv('HOME')): run_command('cp %s %s/public_html/backup/garmin_data.tar.gz' % (fname, os.getenv('HOME'))) if os.path.exists('%s/public_html/garmin/tar' % os.getenv('HOME')): run_command('mv %s %s/public_html/garmin/tar' % (fname, os.getenv('HOME'))) pickle_file_ = '%s/run/garmin.pkl.gz' % cache_dir cache_dir_ = '%s/run/cache' % cache_dir corr_list_ = list_of_corrected_laps(json_path='%s/run' % cache_dir) write_corrections_table(corr_list_, do_tunnel=options.get('do_tunnel', False)) cache_ = GarminCache(pickle_file=pickle_file_, cache_directory=cache_dir_, corr_list=corr_list_, check_md5=True) summary_list_ = cache_.cache_read_fn() # backup garmin.pkl.gz info to postgresql database write_postgresql_table(summary_list_, do_tunnel=options.get('do_tunnel', False)) return elif arg == 'occur': options['occur'] = True elif os.path.isfile(arg): gdir.add(arg) elif arg != 'run' and os.path.isdir(arg): gdir.add(arg) elif arg != 'run' and os.path.isdir('%s/run/%s' % (cache_dir, arg)): gdir.add('%s/run/%s' % (cache_dir, arg)) elif arg == 'correction': add_correction(' '.join(args[1:]), json_path='%s/run' % cache_dir, options=options) return elif arg in options: options[arg] = True elif 'do_%s' % arg in options: options['do_%s' % arg] = True else: spts = [x for x in SPORT_TYPES if arg in x] if len(spts) > 0: options['do_sport'] = spts[0] elif arg == 'bike': options['do_sport'] = 'biking' elif '-' in arg or arg in ( '%4d' % _ for _ in range(2008, datetime.date.today().year + 1)): gdir.update(find_gps_tracks(arg, cache_dir, s3_files=s3_files)) elif '.gmn' in arg or 'T' in arg: files = glob.glob('%s/run/gps_tracks/%s' % (cache_dir, arg)) gdir.update(files) else: print('unhandled argument:', arg) if not gdir: gdir.add('%s/run/gps_tracks' % cache_dir) gdir = sorted(gdir) if len(gdir) == 1 and os.path.isfile(gdir[0]): return read_garmin_file(gdir[0], msg_q=msg_q, options=options, s3_files=s3_files) return do_summary(gdir, msg_q=msg_q, options=options)
class GarminCacheSQL(object): """ cache in SQL database using sqlalchemy """ def __init__(self, sql_string='', pickle_file='', cache_directory='', corr_list=None, garmin_cache=None, summary_list=None): if garmin_cache is not None: self.garmin_cache = garmin_cache else: self.garmin_cache = GarminCache( pickle_file=pickle_file, cache_directory=cache_directory, corr_list=corr_list, cache_read_fn=self.read_sql_table, cache_write_fn=self.write_sql_table, use_sql=False) self.sql_string = sql_string self.summary_list = {} if isinstance(summary_list, dict): self.summary_list.update(summary_list) self.engine = create_engine(self.sql_string, echo=False) self.create_table() def create_table(self): """ create table """ metadata.create_all(self.engine) def delete_table(self): """ drop table """ metadata.drop_all(self.engine) def read_sql_table(self): """ deserialize from database """ session = sessionmaker(bind=self.engine) session = session() for row in session.query(GarminSummaryTable).all(): gsum = GarminSummary() for sl_ in DB_ENTRIES: if sl_ == 'begin_datetime': tmp = getattr(row, sl_) if 'txt' in row.filename: tmp = parse(tmp) tmp = tmp.astimezone(est) else: tmp = parse(tmp) tmp = tmp.astimezone(est) setattr(gsum, sl_, tmp) else: setattr(gsum, sl_, getattr(row, sl_)) self.summary_list[gsum.filename] = gsum session.commit() session.close() return self.summary_list def write_sql_table(self, summary_list): """ serialize into database """ slists = [] def convert_to_sql(sl_): """ ... """ sld = {x: getattr(sl_, x) for x in DB_ENTRIES} sld['begin_datetime'] = sld['begin_datetime'].astimezone(utc) sld['begin_datetime'] = sld['begin_datetime'].replace(tzinfo=None) return GarminSummaryTable(**sld) session = sessionmaker(bind=self.engine) session = session() for fn_, sl_ in summary_list.items(): if not isinstance(sl_, GarminSummary): raise Exception('Bad type %s' % type(sl_)) assert isinstance(sl_, GarminSummary) fn_ = sl_.filename if fn_ in self.summary_list: sl0 = self.summary_list[fn_] if not all( getattr(sl_, x) == getattr(sl0, x) for x in DB_ENTRIES): obj = session.query(GarminSummaryTable)\ .filter_by(filename=fn_).all()[0] session.delete(obj) session.commit() slists.append(convert_to_sql(sl_)) else: slists.append(convert_to_sql(sl_)) session.add_all(slists) session.commit() session.close() def get_cache_summary_list(self, directory, options=None): """ redirect call """ return self.garmin_cache.get_cache_summary_list(directory, options=options)
def test_summary_report_file(self): """ test GarminCache.summary_report """ gc_ = GarminCache(pickle_file='%s/temp.pkl.gz' % CURDIR, cache_read_fn=read_pickle_object_in_file, cache_write_fn=write_pickle_object_to_file, cache_directory='%s/run/cache' % CURDIR, use_sql=False) sl_ = gc_.get_cache_summary_list(directory='%s/tests' % CURDIR) rp_ = GarminReport(cache_obj=gc_) options = { 'do_plot': False, 'do_year': True, 'do_month': True, 'do_week': True, 'do_day': True, 'do_file': True, 'do_sport': 'running', 'do_update': False, 'do_average': True } script_path = CURDIR options['script_path'] = '%s/garmin_app' % script_path options['cache_dir'] = script_path output = rp_.summary_report(sl_, copy_to_public_html=False, options=options) mstr = hashlib.md5() mstr.update(output.encode()) self.assertEqual(mstr.hexdigest(), '79e43ff052e6a2238f54d7c2cd78b0ad') options['do_sport'] = None output = rp_.summary_report(sl_, copy_to_public_html=False, options=options) mstr = hashlib.md5() mstr.update(output.encode()) self.assertEqual(mstr.hexdigest(), '927f7ac0f1eb04b20ffe78c23bc6936c') options['do_sport'] = 'running' options['do_week'] = False output = rp_.summary_report(sl_, copy_to_public_html=False, options=options) mstr = hashlib.md5() mstr.update(output.encode()) self.assertEqual(mstr.hexdigest(), '88d026c8736bef031d84bede447471cc') options['do_sport'] = 'running' options['do_month'] = False output = rp_.summary_report(sl_, copy_to_public_html=False, options=options) mstr = hashlib.md5() mstr.update(output.encode()) self.assertEqual(mstr.hexdigest(), '94954edf36e8625143735f8b3e263c6b') options = { 'do_plot': False, 'do_year': False, 'do_month': False, 'do_week': False, 'do_day': False, 'do_file': False, 'do_sport': None, 'do_update': False, 'do_average': False, 'occur': True } options['script_path'] = '%s/garmin_app' % script_path options['cache_dir'] = script_path output = rp_.summary_report(sl_, copy_to_public_html=False, options=options) mstr = hashlib.md5() mstr.update(output.encode()) self.assertEqual(mstr.hexdigest(), '6256eb536104110794d9fc2123a8c104')
def test_garmin_cache(self): """ test GarminCache.get_cache_summary_list """ gc_ = GarminCache(pickle_file='%s/temp.pkl.gz' % CURDIR, cache_directory='%s/run/cache' % CURDIR, cache_read_fn=read_pickle_object_in_file, cache_write_fn=write_pickle_object_to_file, use_sql=False) sl_ = gc_.get_cache_summary_list(directory='%s/tests' % CURDIR) output = '\n'.join( '%s' % s for s in sorted(sl_.values(), key=lambda x: x.filename)) test_output = open('tests/test_cache_summary.out', 'rt').read().strip() test_output0 = test_output.replace('10:43:08-05:00', '11:43:08-04:00') mstr = hashlib.md5() mstr.update(output.encode()) print(output) # print(test_output) self.assertIn(output, [test_output, test_output0]) sqlite_str = 'sqlite:///%s/run/cache/test.db' % CURDIR gc_ = GarminCacheSQL(sql_string=sqlite_str) sl_ = gc_.get_cache_summary_list(directory='%s/tests' % CURDIR) output = '\n'.join( '%s' % s for s in sorted(sl_.values(), key=lambda x: x.filename)) mstr = hashlib.md5() mstr.update(output.encode()) self.assertIn(mstr.hexdigest(), [ 'c06f13236f9abed0723e4af7537ca3d4', 'a59c8ee120e789eda36e0cc8592ffce1', '35475bfdd07e72c9cd3988c83a07b083', 'f1749a2ec48d1ca814b570d2bf36d587' ]) gc0 = GarminCache(pickle_file='%s/temp.pkl.gz' % CURDIR, cache_directory='%s/run/cache' % CURDIR, use_sql=False) sl_ = gc_.get_cache_summary_list(directory='%s/tests' % CURDIR) sqlite_str = 'sqlite:///%s/run/cache/test.db' % CURDIR gc1 = GarminCacheSQL(sql_string=sqlite_str, garmin_cache=gc0, summary_list=sl_) output = '\n'.join('%s' % s for s in sorted(gc1.summary_list.values(), key=lambda x: x.filename)) mstr = hashlib.md5() mstr.update(output.encode()) # self.assertIn(mstr.hexdigest(), [ # '06465ba08d19d59c963e542bc19f12b7', 'a59c8ee120e789eda36e0cc8592ffce1', # '34605a1d755eda499022946e46d46c1a', '9fbf84e57a513d875f471fbcabe20e22', # '9e23c7a7bc3c436ef319a5a3d1003264' # ]) with OpenPostgreSQLsshTunnel(port=5435, do_tunnel=True) as pport: postgre_str = '%s:%d/test_garmin_summary' % (POSTGRESTRING, pport) gc_ = GarminCacheSQL(sql_string=postgre_str) sl_ = gc_.get_cache_summary_list(directory='%s/tests' % CURDIR) output = '\n'.join( '%s' % s for s in sorted(sl_.values(), key=lambda x: x.filename)) print(output) mstr = hashlib.md5() mstr.update(output.encode()) #self.assertIn(mstr.hexdigest(), [ #'c06f13236f9abed0723e4af7537ca3d4', 'a59c8ee120e789eda36e0cc8592ffce1', #'35475bfdd07e72c9cd3988c83a07b083', '34605a1d755eda499022946e46d46c1a', #'9fbf84e57a513d875f471fbcabe20e22', 'f1749a2ec48d1ca814b570d2bf36d587', #'9e23c7a7bc3c436ef319a5a3d1003264' #]) with OpenPostgreSQLsshTunnel(port=5436, do_tunnel=True) as pport: postgre_str = '%s:%d/test_garmin_summary' % (POSTGRESTRING, pport) gc_ = GarminCache(pickle_file='%s/temp.pkl.gz' % CURDIR, cache_directory='%s/run/cache' % CURDIR, cache_read_fn=read_pickle_object_in_file, cache_write_fn=write_pickle_object_to_file, use_sql=False) sl_ = gc_.get_cache_summary_list(directory='%s/tests' % CURDIR) sl_ = _write_postgresql_table(sl_, get_summary_list=True, dbname='test_garmin_summary', port=pport) sl_ = _write_postgresql_table(sl_, dbname='test_garmin_summary', port=pport) print(len(sl_)) output = '\n'.join( '%s' % s for s in sorted(sl_.values(), key=lambda x: x.filename)) gc_ = GarminCacheSQL(sql_string=postgre_str) gc_.delete_table() mstr = hashlib.md5() mstr.update(output.encode()) # self.assertIn(mstr.hexdigest(), [ # '06465ba08d19d59c963e542bc19f12b7', '34605a1d755eda499022946e46d46c1a', # '9fbf84e57a513d875f471fbcabe20e22', '9e23c7a7bc3c436ef319a5a3d1003264' # ]) gc_ = GarminCache(pickle_file='%s/temp.pkl.gz' % CURDIR, cache_directory='%s/run/cache' % CURDIR, use_sql=False) gsum = GarminSummary(FITFILE) gfile = gsum.read_file() gc_.write_cached_gfile(gfile) gfile_new = gc_.read_cached_gfile(gfbname=gfile.filename) self.assertEqual(gfile, gfile_new) gc_ = GarminCache(pickle_file='%s/temp.pkl.gz' % CURDIR, use_sql=False) gfile_new = gc_.read_cached_gfile(gfbname=gfile.filename) self.assertEqual(gfile_new, False) gc_ = GarminCache(pickle_file='%s/temp.pkl.gz' % CURDIR, cache_read_fn=read_pickle_object_in_file, cache_write_fn=write_pickle_object_to_file, cache_directory='%s/run/cache' % CURDIR, use_sql=False) sl_ = gc_.get_cache_summary_list(directory='%s/tests' % CURDIR) rp_ = GarminReport(cache_obj=gc_) options = { 'do_plot': False, 'do_year': False, 'do_month': False, 'do_week': False, 'do_day': False, 'do_file': False, 'do_sport': None, 'do_update': False, 'do_average': False } script_path = CURDIR options['script_path'] = '%s/garmin_app' % script_path options['cache_dir'] = script_path output = rp_.summary_report(sl_, copy_to_public_html=False, options=options) mstr = hashlib.md5() mstr.update(output.encode()) self.assertEqual(mstr.hexdigest(), 'dd7cc23be0f6f21a6d05782e506cb647')