def parse(self): self._args = self._parser.parse_args() self._opts = Options(self._options, self._args, '[config]', prefix='PIMATIC') self._config = self._opts.config_parser if self._config is None: LogAdapter(get_logger(), {'package': self.app}).critical("Missing configuration file!") exit(1) self._logger = LogAdapter(self._opts.logger, {'package': self.app}) # region Get node configuration and logger settings # set log level of requests module logging.getLogger('requests').setLevel(log_level(self._opts.loglevel_requests)) logging.getLogger('urllib3').setLevel(log_level(self._opts.loglevel_requests)) self._logger.debug(u'Parsing configuration file %s' % (self._opts.config_file)) if self._config.has_option('options', 'secrets'): secrets = self._config.get('options', 'secrets') path = os.path.expanduser(secrets) if os.path.isfile(path): secret = ConfigParser() secret.read(path) if self._opts.node: if self._opts.node.startswith('http'): self._node['url'] = self._opts.node else: node_section = 'node_' + self._opts.node if secret.has_section(node_section): protocol = secret.get(node_section, 'protocol') server = secret.get(node_section, 'server') port = secret.get(node_section, protocol + '_port') self._node['url']=protocol + '://' + server + ':' + port self._node['username'] = secret.get(node_section, 'username') self._node['password'] = secret.get(node_section, 'password') else: self._logger.critical(u'Invalid node tag %s' % (node_section)) exit(1) else: self._logger.critical(u'Missing pimatic node specification!') exit(1) if self._opts.username: self._node['username'] = self._opts.username if self._opts.password: self._node['password'] = self._opts.password # endregion self._logger.info(u'url: %s' % (self._node['url']))
def __init__(self, cache='~/.tdapi', client_id=None, client_secret=None, logger=None): # region Logging from pyutils import get_logger, LogAdapter if logger is None: self._logger = get_logger('tdapi', 'DEBUG') else: self._logger = logger self._adapter = LogAdapter(self._logger, {'package': 'tdapi'}) # endregion self._cache_dir = os.path.expanduser(cache) if not os.path.isdir(self._cache_dir): error = 'Cache directory [%s] not found!' % (self._cache_dir) self.logger.critical(error) raise ToodledoApiError(error) if not os.path.isfile(self.cache_file('session')): error = 'Session cache file [%s] not found!' % (self.cache_file('session')) self.logger.critical(error) raise ToodledoApiError(error) self._client_id = client_id or os.environ.get('TOODLEDO_CLIENT_ID') self._client_secret = client_secret or os.environ.get('TOODLEDO_CLIENT_SECRET') self._cache = {'session': None, 'account': None, 'lists': None, 'tasks': None} self._offline = None self._account = None self._lists = None self._tasks = None from tdapi import ToodledoFolders, ToodledoContexts, ToodledoGoals, ToodledoLocations from tdapi import ToodledoFolder, ToodledoContext, ToodledoGoal, ToodledoLocation self._class_map = {'folders': {'collection': ToodledoFolders, 'item': ToodledoFolder, 'auto': True}, 'contexts': {'collection': ToodledoContexts, 'item': ToodledoContext, 'auto': True}, 'goals': {'collection': ToodledoGoals, 'item': ToodledoGoal, 'auto': True}, 'locations': {'collection': ToodledoLocations, 'item': ToodledoLocation, 'auto': True}} # load cache files for key in self._cache: fn = self.cache_file(key) if os.path.isfile(fn): with codecs.open(fn, 'r', encoding='utf-8') as fh: self._cache[key] = json.load(fh)
def __init__(self, left=None, right=None, opts=None, logger=None): if logger is None: self._logger = get_logger('pysync', logging.DEBUG) else: self._logger = logger self._adapter = LogAdapter(self._logger, {'package': 'pysync'}) self.logger.debug(u'Initalizing PySync with %s and %s ...' % (left, right)) self._opts = opts self._left = left self._right = right self._sync = opts['sync'] self._new_sync = None
def __init__(self, server, user=None, password=None, logger=None, verify=True): from pyutils import LogAdapter, get_logger self._server = server self._user = user self._password = password self._verify = verify self._session = None self._cookies = None self._offline = None self._utc_offset = None if logger is None: self._logger = get_logger('oxapi', logging.DEBUG) else: self._logger = logger self._adapter = LogAdapter(self._logger, {'package': 'oxapi', 'callback': OxHttpAPI.hide_password})
def __init__(self, server="http://localhost:8080", username="******", password="******", logger=None): self._server = server self._username = username self._password = password self._session = None self._cookies = None self._offline = None self._response = None self._content = None self._devices = None self._rules = None if logger is None: self._logger = get_logger('pimatic', logging.DEBUG) else: self._logger = logger self._adapter = LogAdapter(self._logger, {'package': 'pimatic', 'callback': PimaticAPI.hide_password})
def __init__(self, options, logger=None, package='sync'): if logger is None: self._logger = get_logger('sync', logging.DEBUG) else: self._logger = logger self._adapter = LogAdapter(self._logger, {'package': package}) self._options = options self._key = None self._items = {} self._filter_expr = options.get('filter_expr') self._filter_module = options.get('filter_module') self._deleted = {} self._modified = {} self._created = {} self._changes = {'deleted': 0, 'created': 0, 'modified': 0}
#!/usr/bin/env python # -*- coding: utf-8 -*- import os, sys, time from pyutils import get_logger from oxapi import * if __name__ == '__main__': logger = get_logger("_export", "INFO") with OxHttpAPI.get_session(logger=logger) as ox: # result = ox.get("export", "ICAL", {'folder': 1607}) try: result = ox.export("126") print result except ValueError as e: logger.error(e.message)
def __init__(self, cache='~/.tdapi', client_id=None, client_secret=None, logger=None): # region Logging from pyutils import get_logger, LogAdapter if logger is None: self._logger = get_logger('tdapi', 'DEBUG') else: self._logger = logger self._adapter = LogAdapter(self._logger, {'package': 'tdapi'}) # endregion self._cache_dir = os.path.expanduser(cache) if not os.path.isdir(self._cache_dir): error = 'Cache directory [%s] not found!' % (self._cache_dir) self.logger.critical(error) raise ToodledoApiError(error) if not os.path.isfile(self.cache_file('session')): error = 'Session cache file [%s] not found!' % ( self.cache_file('session')) self.logger.critical(error) raise ToodledoApiError(error) self._client_id = client_id or os.environ.get('TOODLEDO_CLIENT_ID') self._client_secret = client_secret or os.environ.get( 'TOODLEDO_CLIENT_SECRET') self._cache = { 'session': None, 'account': None, 'lists': None, 'tasks': None } self._offline = None self._account = None self._lists = None self._tasks = None from tdapi import ToodledoFolders, ToodledoContexts, ToodledoGoals, ToodledoLocations from tdapi import ToodledoFolder, ToodledoContext, ToodledoGoal, ToodledoLocation self._class_map = { 'folders': { 'collection': ToodledoFolders, 'item': ToodledoFolder, 'auto': True }, 'contexts': { 'collection': ToodledoContexts, 'item': ToodledoContext, 'auto': True }, 'goals': { 'collection': ToodledoGoals, 'item': ToodledoGoal, 'auto': True }, 'locations': { 'collection': ToodledoLocations, 'item': ToodledoLocation, 'auto': True } } # load cache files for key in self._cache: fn = self.cache_file(key) if os.path.isfile(fn): with codecs.open(fn, 'r', encoding='utf-8') as fh: self._cache[key] = json.load(fh)
#!/usr/bin/env python # -*- coding: utf-8 -*- import time if __name__ == '__main__': from pyutils import get_logger from tdapi import * logger = get_logger('example', 'DEBUG') td = ToodledoAPI.get_session() tasks = td.get_tasks(folder=None) for task in tasks: logger.debug('%s' % (task)) # DAY = 24*60*60 # timestamp = int(time.time()) # region Toodledo task attributes # id, title, modified, completed, # folder, context, goal, location, tag, # startdate, duedate, duedatemod, starttime, duetime, # remind, repeat, # status, star, priority, # length, timer, added, note, # parent, children, order, # meta, previous, attachment, # shared, addedby, via, attachments # endregion
def main(): from argparse import ArgumentParser from pysync import __version__, __author__ from pysync import SyncError, SyncSessionError, SyncInitError options = { 'secrets': '~/.pysync.secrets', 'loglevel_requests': 'ERROR' # 'loglevel': 'INFO' } # region Command line arguments parser = ArgumentParser(description='PySnc Engine Rev. %s (c) %s' % (__version__, __author__)) parser.add_argument('-c', '--config', type=str, help='use alternate configuration file(s)') parser.add_argument('--relations', type=str, help='list of pysync relations to process') parser.add_argument('--rebuild', action='store_true', help='rebuild map file') parser.add_argument('--reset', type=str, help='delete entries and recreate from left/right') parser.add_argument('--update', type=str, help='force update on left/right side') parser.add_argument( '-l', '--loglevel', type=str, choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], help='debug log level') args = parser.parse_args() opts = Options(options, args, '[config]', prefix='PYSYNC') config = opts.config_parser if config is None: LogAdapter(get_logger(), { 'package': 'main' }).critical("Missing configuration file!") exit(1) logger = LogAdapter(opts.logger, {'package': 'main'}) # endregion # region Basic configuration and logger settings # set log level of requests module logging.getLogger('requests').setLevel(log_level(opts.loglevel_requests)) logging.getLogger('urllib3').setLevel(log_level(opts.loglevel_requests)) logger.debug(u'Parsing configuration file %s' % (opts.config_file)) if opts.relations: relations = list(opts.relations.split(',')) else: logger.critical('Missing relations!') exit(1) # endregion for relation in relations: try: left_opts, right_opts, relation_opts = parse_config( relation, config, opts.logger) except Exception as e: logger.exception( 'Error parsing configuration options. Skipping sync for [%s]' % (relation)) continue if lock(relation, relation_opts, opts.logger): # initialise web service sessions via @staticmethod session() # and initialize sync engine classes try: label = left_opts.get('label') session_lockfile = left_opts.get('session_lockfile', True) left_session = left_opts['class'].session( left_opts, opts.logger) label = right_opts.get('label') session_lockfile = right_opts.get('session_lockfile', True) right_session = right_opts['class'].session( right_opts, opts.logger) except Exception as e: # TODO: check exception type, unlock() only in case of an temp. network error etc. logger.exception( 'Session initialization for [%s] failed! Skipping sync for [%s]' % (label, relation)) if not session_lockfile: unlock(relation, relation_opts, opts.logger) continue # initialize sync map relation_opts['sync'] = {'map': None} if os.path.isfile(relation_opts['map']): #################### # incremental sync # #################### with codecs.open(relation_opts['map'], 'r', encoding='utf-8') as fp: relation_opts['sync'] = json.load(fp) logger.info(u'%s: starting incremental sync for %d items' % (relation, len(relation_opts['sync'].get('map')))) # merge signature from map file left_opts.update({'signature': relation_opts['sync']['left']}) right_opts.update( {'signature': relation_opts['sync']['right']}) try: engine_lockfile = left_opts.get('engine_lockfile', True) left = left_opts['class'](left_session, left_opts, logger=opts.logger) engine_lockfile = right_opts.get('engine_lockfile', True) right = right_opts['class'](right_session, right_opts, logger=opts.logger) except Exception as e: # TODO: check exception type, unlock() only in case of an temp. network error etc. logger.exception( 'Engine initialization for [%s] failed! Skipping sync for [%s]' % (label, relation)) if not engine_lockfile: unlock(relation, relation_opts, opts.logger) continue if opts['update']: try: pysync = PySync(left, right, relation_opts, opts.logger) relation_opts['sync'] = pysync.update(opts['update']) except Exception as e: logger.exception( 'Unexpected error when processing update option! Skipping sync for [%s]' % relation) unlock(relation, relation_opts, opts.logger) continue if opts.reset: try: pysync = PySync(left, right, relation_opts, opts.logger) relation_opts['sync'] = pysync.reset(opts.reset) except Exception as e: logger.exception( 'Unexpected error when processing reset option!') check_sync_map(relation, pysync.direction, left, right, relation_opts, logger) continue if opts.rebuild: relation_opts['sync'] = {'map': None} else: ################ # initial sync # ################ logger.info(u'%s: Starting initial sync' % (relation)) for opt in ['update', 'reset', 'rebuild']: if opts.get(opt): logger.warning( 'Ignoring option [%s] for initial sync' % (opt)) try: left = left_opts['class'](left_session, left_opts, logger=opts.logger) right = right_opts['class'](right_session, right_opts, logger=opts.logger) except Exception as e: # TODO: check exception type, unlock() only in case of an temp. network error etc. logger.exception( 'Engine initialization for [%s] failed! Skipping sync for [%s]' % (label, relation)) # unlock(relation, relation_opts, opts.logger) continue try: pysync = PySync(left, right, relation_opts, opts.logger) relation_opts['sync'] = pysync.process() except Exception as e: logger.exception('Unexpected error when processing sync map!') check_sync_map(relation, pysync.direction, left, right, relation_opts, logger) continue # check/modify sync map by backend engine relation_opts = left.commit_sync('left', relation_opts, logger) relation_opts = right.commit_sync('right', relation_opts, logger) count, errors = check_sync_map(relation, pysync.direction, left, right, relation_opts, logger) unlock(relation, relation_opts, logger) logger.info(u'%s: %s %s' % (relation, left.label, left._changes)) logger.info(u'%s: %s %s' % (relation, right.label, right._changes)) logger.info(u'%s: finished %s sync for %d items with %d errors' % (relation, pysync.direction, count, errors)) left_opts['class'].end_session(logger) right_opts['class'].end_session(logger)
# -*- encoding:utf-8 -*- # @Time : 2020/5/28 10:09 上午 # @Author : jiang.g.f # @File : test_task2_split_data.py # @Software: PyCharm import os import os.path as osp import pyutils import multiprocessing as mp import queue as queue from .produce_consume import MultiTask logger = pyutils.get_logger() def load(q, data, woker_id=0, display=10000): """单进程处理,无时间瓶颈""" count = 0 if not isinstance(data, list): data = [data] for file in data: with open(file, 'r') as reader: line = reader.readline() if 'vid' in line: line = reader.readline() while line: # time.sleep(0.0001) q.put(line) line = reader.readline() count += 1
def main(): from argparse import ArgumentParser from pysync import __version__, __author__ from pysync import SyncError, SyncSessionError, SyncInitError options = { 'secrets': '~/.pysync.secrets', 'loglevel_requests': 'ERROR' # 'loglevel': 'INFO' } # region Command line arguments parser = ArgumentParser(description='PySnc Engine Rev. %s (c) %s' % (__version__, __author__)) parser.add_argument('-c', '--config', type=str, help='use alternate configuration file(s)') parser.add_argument('--relations', type=str, help='list of pysync relations to process') parser.add_argument('--rebuild', action='store_true', help='rebuild map file') parser.add_argument('--reset', type=str, help='delete entries and recreate from left/right') parser.add_argument('--update', type=str, help='force update on left/right side') parser.add_argument('-l', '--loglevel', type=str, choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], help='debug log level') args = parser.parse_args() opts = Options(options, args, '[config]', prefix='PYSYNC') config = opts.config_parser if config is None: LogAdapter(get_logger(), {'package': 'main'}).critical("Missing configuration file!") exit(1) logger = LogAdapter(opts.logger, {'package': 'main'}) # endregion # region Basic configuration and logger settings # set log level of requests module logging.getLogger('requests').setLevel(log_level(opts.loglevel_requests)) logging.getLogger('urllib3').setLevel(log_level(opts.loglevel_requests)) logger.debug(u'Parsing configuration file %s' % (opts.config_file)) if opts.relations: relations = list(opts.relations.split(',')) else: logger.critical('Missing relations!') exit(1) # endregion for relation in relations: try: left_opts, right_opts, relation_opts = parse_config(relation, config, opts.logger) except Exception as e: logger.exception('Error parsing configuration options. Skipping sync for [%s]' % (relation)) continue if lock(relation, relation_opts, opts.logger): # initialise web service sessions via @staticmethod session() # and initialize sync engine classes try: label = left_opts.get('label') session_lockfile = left_opts.get('session_lockfile', True) left_session = left_opts['class'].session(left_opts, opts.logger) label = right_opts.get('label') session_lockfile = right_opts.get('session_lockfile', True) right_session = right_opts['class'].session(right_opts, opts.logger) except Exception as e: # TODO: check exception type, unlock() only in case of an temp. network error etc. logger.exception('Session initialization for [%s] failed! Skipping sync for [%s]' % (label, relation)) if not session_lockfile: unlock(relation, relation_opts, opts.logger) continue # initialize sync map relation_opts['sync'] = {'map': None} if os.path.isfile(relation_opts['map']): #################### # incremental sync # #################### with codecs.open(relation_opts['map'], 'r', encoding='utf-8') as fp: relation_opts['sync'] = json.load(fp) logger.info(u'%s: starting incremental sync for %d items' % (relation, len(relation_opts['sync'].get('map')))) # merge signature from map file left_opts.update({'signature': relation_opts['sync']['left']}) right_opts.update({'signature': relation_opts['sync']['right']}) try: engine_lockfile = left_opts.get('engine_lockfile', True) left = left_opts['class'](left_session, left_opts, logger=opts.logger) engine_lockfile = right_opts.get('engine_lockfile', True) right = right_opts['class'](right_session, right_opts, logger=opts.logger) except Exception as e: # TODO: check exception type, unlock() only in case of an temp. network error etc. logger.exception('Engine initialization for [%s] failed! Skipping sync for [%s]' % (label, relation)) if not engine_lockfile: unlock(relation, relation_opts, opts.logger) continue if opts['update']: try: pysync = PySync(left, right, relation_opts, opts.logger) relation_opts['sync'] = pysync.update(opts['update']) except Exception as e: logger.exception('Unexpected error when processing update option! Skipping sync for [%s]' % relation) unlock(relation, relation_opts, opts.logger) continue if opts.reset: try: pysync = PySync(left, right, relation_opts, opts.logger) relation_opts['sync'] = pysync.reset(opts.reset) except Exception as e: logger.exception('Unexpected error when processing reset option!') check_sync_map(relation, pysync.direction, left, right, relation_opts, logger) continue if opts.rebuild: relation_opts['sync'] = {'map': None} else: ################ # initial sync # ################ logger.info(u'%s: Starting initial sync' % (relation)) for opt in ['update', 'reset', 'rebuild']: if opts.get(opt): logger.warning('Ignoring option [%s] for initial sync' % (opt)) try: left = left_opts['class'](left_session, left_opts, logger=opts.logger) right = right_opts['class'](right_session, right_opts, logger=opts.logger) except Exception as e: # TODO: check exception type, unlock() only in case of an temp. network error etc. logger.exception('Engine initialization for [%s] failed! Skipping sync for [%s]' % (label, relation)) # unlock(relation, relation_opts, opts.logger) continue try: pysync = PySync(left, right, relation_opts, opts.logger) relation_opts['sync'] = pysync.process() except Exception as e: logger.exception('Unexpected error when processing sync map!') check_sync_map(relation, pysync.direction, left, right, relation_opts, logger) continue # check/modify sync map by backend engine relation_opts = left.commit_sync('left', relation_opts, logger) relation_opts = right.commit_sync('right', relation_opts, logger) count, errors = check_sync_map(relation, pysync.direction, left, right, relation_opts, logger) unlock(relation, relation_opts, logger) logger.info(u'%s: %s %s' % (relation, left.label, left._changes)) logger.info(u'%s: %s %s' % (relation, right.label, right._changes)) logger.info(u'%s: finished %s sync for %d items with %d errors' % (relation, pysync.direction, count, errors)) left_opts['class'].end_session(logger) right_opts['class'].end_session(logger)