def main(): """Main """ start = time() parser = ArgumentParser(description='Sync', prog='python __main__.py') add = parser.add_argument add('--clean', action='store_true', help='delete all .gz files') add('--console-debug', nargs='?', default='INFO', help='console debug level', choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']) add('--download', action='store_true', help='download JSON files') add('--host', nargs='?', default='/', help='host, ex: /seriv/') add('--no-debug', nargs='?', default=0, const=1, type=int, help="remove debug code from the javascript") add('--no-process', nargs='?', default=0, const=1, type=int, help="don't process the images") add('--zip', action='store_true', help='create .gz files') args = parser.parse_args() args_dict = vars(args) if args.download: download_json() else: sync = Sync(**args_dict) sync.synchronise() end = time() print(f'\nELAPSED: {end-start:.3f} seconds')
def run(self): sync = Sync(show_progress=self._isManual, run_silent=self._runSilent, library=self._library, api=globals.traktapi) sync.sync() if utilities.getSettingAsBool('tagging_enable') and utilities.getSettingAsBool('tagging_tag_after_sync'): q = queue.SqliteQueue() q.append({'action': 'updatetags'})
def main(): parser = argparse.ArgumentParser(description='Sync current folder to your flickr account.') parser.add_argument('--monitor', action='store_true', help='starts a daemon after sync for monitoring') parser.add_argument('--starts-with', type=str, help='only sync that path that starts with') parser.add_argument('--download', type=str, help='download the photos from flickr specify a path or . for all') parser.add_argument('--ignore-videos', action='store_true', help='ignore video files') parser.add_argument('--ignore-images', action='store_true', help='ignore image files') parser.add_argument('--version', action='store_true', help='output current version') parser.add_argument('--sync-path', type=str, default=os.getcwd(), help='specify the sync folder (default is current dir)') parser.add_argument('--custom-set', type=str, help='customize your set name from path with regex') parser.add_argument('--custom-set-builder', type=str, help='build your custom set title (default just merge groups)') parser.add_argument('--update-custom-set', action='store_true', help='updates your set title from custom set') parser.add_argument('--username', type=str, help='token username') #token username argument for api parser.add_argument('--keyword', action='append', type=str, help='only upload files matching this keyword') args = parser.parse_args() if args.version: # todo get from setup.cfg logger.info('v0.1.17') exit() # validate args args.is_windows = os.name == 'nt' args.sync_path = args.sync_path.rstrip(os.sep) + os.sep if not os.path.exists(args.sync_path): logger.error('Sync path does not exists') exit(0) local = Local(args) remote = Remote(args) sync = Sync(args, local, remote) sync.start_sync()
def onLogin(self, host, username, passwd, ssl): """ Slot. Triggers a log in request to the server. :param host: Indicates the hostname of the FTP server :param username: Username to log in into the FTP server :param passwd: Password to log in into the FTP server :param ssl: Indicates whether the FTP needs SSL support """ self.sync = Sync(host, ssl) self.syncStarted.connect(self.sync.initQueue) self.sync.server.downloadProgress.connect(self.onDownloadProgress) self.sync.server.uploadProgress.connect(self.onUploadProgress) self.sync.server.fileEvent.connect(self.onFileEvent) self.sync.server.badFilenameFound.connect(self.badNameWarning) self.sync.server.loginCompleted.connect(self.onLoginCompleted) self.sync.server.fileEventCompleted.connect(self.onFileEventCompleted) self.sync.server.ioError.connect(self.onIOError) # Added by Si self.sync.server.textStatus.connect(self.setStatus) self.sync.statusChanged.connect(self.setStatus) self.loginRequested.connect(self.sync.server.onLogin) self.syncThread = QThread() self.sync.moveToThread(self.syncThread) self.syncThread.start() QApplication.instance().lastWindowClosed.connect(self.syncThread.quit) self.loginRequested.emit(username, passwd)
def sync(self, dst, event): """Sync pdb-rest.db to to local target""" pdbsync = Sync( self, dst, event) pdbsync.run()
def run(self): sync = Sync(show_progress=self._isManual, run_silent=self._runSilent, api=globals.traktapi) sync.sync() if utilities.getSettingAsBool('tagging_enable') and utilities.getSettingAsBool('tagging_tag_after_sync'): q = queue.SqliteQueue() q.append({'action': 'updateTags'})
def create_sync(self): """ creates a sync object. """ if self.certificate_file is None: self.sync = Sync(self.server_address, self.username, self.password) else: self.sync = Sync(self.server_address, self.username, self.password, self.certificate_file.name)
def main(): parser = argparse.ArgumentParser(description='Sync current folder to your flickr account.') parser.add_argument('--monitor', action='store_true', help='starts a daemon after sync for monitoring') parser.add_argument('--dry-run', action='store_true', help='do not perform any remote action') parser.add_argument('--starts-with', type=str, help='only sync that path starts with this text, e.g. "2015/06"') parser.add_argument('--download', type=str, help='download the photos from flickr, specify a path or . for all') parser.add_argument('--ignore-videos', action='store_true', help='ignore video files') parser.add_argument('--ignore-images', action='store_true', help='ignore image files') parser.add_argument('--ignore-ext', type=str, help='comma separated list of extensions to ignore, e.g. "jpg,png"') parser.add_argument('--version', action='store_true', help='output current version: ' + version) parser.add_argument('--sync-path', type=str, default=os.getcwd(), help='specify the sync folder (default is current dir)') parser.add_argument('--sync-from', type=str, help='Only supported value: "all". Uploads anything that isn\'t on flickr, and download anything that isn\'t on the local filesystem') parser.add_argument('--custom-set', type=str, help='customize your set name from path with regex, e.g. "(.*)/(.*)"') parser.add_argument('--custom-set-builder', type=str, help='build your custom set title, e.g. "{0} {1}" to join the first two groups (default merges groups with hyphen)') parser.add_argument('--update-custom-set', action='store_true', help='updates your set title from custom-set (and custom-set-builder, if given)') parser.add_argument('--custom-set-debug', action='store_true', help='for testing your custom sets, asks for confirmation when creating an album on flickr') parser.add_argument('--username', type=str, help='token username') # token username argument for api parser.add_argument('--add-photo-prefix', type=str, help='Add a specific prefix to the remote files whose local files have that prefix') parser.add_argument('--iphoto', action='store_true', help='Backup iPhoto Masters folder') parser.add_argument('--keyword', action='append', type=str, help='only upload files matching this keyword') args = parser.parse_args() if args.version: logger.info(version) exit() # validate args args.is_windows = os.name == 'nt' args.sync_path = args.sync_path.rstrip(os.sep) + os.sep if not os.path.exists(args.sync_path): logger.error('Sync path does not exists') exit(0) local = Local(args) remote = Remote(args) sync = Sync(args, local, remote) sync.start_sync()
def _get_sync_object(): """ get sync object """ try: sync = Sync() sync.bucket = args.bucket return sync except NoCredentialsError as ex: log.debug('Error to connect to Amazon. Can not found the credentials.') log.debug(repr(ex)) log.debug('Exit') exit(1)
def main(): parser = argparse.ArgumentParser(description="Sync current folder to your flickr account.") parser.add_argument("--monitor", action="store_true", help="starts a daemon after sync for monitoring") parser.add_argument("--starts-with", type=str, help='only sync that path starts with this text, e.g. "2015/06"') parser.add_argument("--download", type=str, help="download the photos from flickr, specify a path or . for all") parser.add_argument("--ignore-videos", action="store_true", help="ignore video files") parser.add_argument("--ignore-images", action="store_true", help="ignore image files") parser.add_argument("--ignore-ext", type=str, help='comma separated list of extensions to ignore, e.g. "jpg,png"') parser.add_argument("--version", action="store_true", help="output current version: " + version) parser.add_argument( "--sync-path", type=str, default=os.getcwd(), help="specify the sync folder (default is current dir)" ) parser.add_argument( "--sync-from", type=str, help="Only supported value: \"all\". Uploads anything that isn't on flickr, and download anything that isn't on the local filesystem", ) parser.add_argument("--custom-set", type=str, help='customize your set name from path with regex, e.g. "(.*)/(.*)"') parser.add_argument( "--custom-set-builder", type=str, help='build your custom set title, e.g. "{0} {1}" to join the first two groups (default merges groups with hyphen)', ) parser.add_argument( "--update-custom-set", action="store_true", help="updates your set title from custom-set (and custom-set-builder, if given)", ) parser.add_argument( "--custom-set-debug", action="store_true", help="for testing your custom sets, asks for confirmation when creating an album on flickr", ) parser.add_argument("--username", type=str, help="token username") # token username argument for api parser.add_argument("--keyword", action="append", type=str, help="only upload files matching this keyword") args = parser.parse_args() if args.version: logger.info(version) exit() # validate args args.is_windows = os.name == "nt" args.sync_path = args.sync_path.rstrip(os.sep) + os.sep if not os.path.exists(args.sync_path): logger.error("Sync path does not exists") exit(0) local = Local(args) remote = Remote(args) sync = Sync(args, local, remote) sync.start_sync()
def run(self): """ Thread: Run """ s = Sync(self.src, self.dst, **self.kwargs) s.diff() self.loadInitContents(s) s.progressfnc = self.progress while True: s.diff() s.difftrim(create=self.getInitContents()) s.run() time.sleep(self.freq)
def main(): global sync print(sys.argv) url = "index.html" if len(sys.argv) > 1: url = sys.argv[1] print("main thread id:", threading.get_ident()) data_dir = appdirs.user_data_dir(APPNAME) if not os.path.exists(data_dir): os.mkdir(data_dir) setting = read_setting_db() if not setting: setting = { "workspace": os.path.join(Path.home(), "gitCloud"), "interval": config.SYNC_INTERVAL } print("setting:", setting) excludesFile = os.path.join(appdirs.user_data_dir(APPNAME), ".gitignore") createExcludesFile(excludesFile) print("excludesFile:", excludesFile) workspace = setting["workspace"] if not os.path.exists(workspace): os.mkdir(workspace) if os.path.isabs(config.GIT): path = os.path.dirname(config.GIT) env_path = os.getenv("PATH") if env_path: env_path = env_path + ":" + path else: env_path = path set_env_path(env_path) api = Api(setting) repos = [repo.copy() for repo in api.repos] sync = Sync(repos, event_q, setting["interval"], excludesFile) window = webview.create_window('gitCloud', url, width=400, height=680, js_api=api) api.window = window api.start() sync.start(sync_q, workspace) webview.start(debug=config.DEBUG)
def __init__(self, config=None): if config is None: config = get_config_from_file("config.ini") self.config = config # Seed the trust stores Sync(self.config).seed() self.cert_processor = CertProcessor(config)
def setUp(self, glancesync): """create constructor, mock with glancesync, Set a master region""" self.regions = [] self.sync = Sync(self.regions) self.glancesync = glancesync config = {'return_value.master_region': 'MasterRegion'} self.glancesync.configure_mock(**config)
def onShow(self): print(self.controller.get_srvAddr()) self.sync = Sync(srvAddr=self.controller.get_srvAddr(),conn=self.conn,session=self.controller.get_session()) if self.sync is not None: self.sync.dosync() self.remotePgmTotal = self.sync.getRemoteProgramTotalCount() self.remoteTrackTotal = self.sync.getRemoteTrackTotalCount() banner = Frame(self) banner.grid(row=0,column=0,sticky="WE") self.label = Label(banner,image=self.logo) self.label.image = self.logo self.label.pack() body = Frame(self) body.grid(row=1,column=0,sticky="NSWE") Label(body,text="歌曲同步中...",padding=(10, 5, 10, 5)).grid(row=1,column=0,sticky="WE") self.pgb_media = Progressbar(body, orient="horizontal", length=200, mode="determinate") self.pgb_media.grid(row=1,column=1,sticky="WE") self.pgb_media["value"]=0 self.pgb_media["maximum"]=self.remoteTrackTotal self.sync.start() self.checkstatus()
def init_sync(self): self.model.set_status(Status.INIT.value) sync_create_result = Sync.create_sync(Path(self.path_to_wow), self.repo_url) result = sync_create_result["status"] self.sync = sync_create_result["sync"] self.handle_result(result)
def op_sync (self): conf = self.get_config() pname = self._load_profile() sync = Sync(conf, pname, self.get_db(), dr=self.is_dry_run()) if self.is_dry_run(): sync.prep_lists(self.get_sync_dir()) else: try: startt = conf.get_curr_time() result = sync.sync(self.get_sync_dir()) if result: conf.set_last_sync_start(pname, val=startt) conf.set_last_sync_stop(pname) logging.info('Updating item inventory...') sync.save_item_lists() logging.info('Updating item inventory...done') else: logging.info('timestamps not reset for profile %s due to ' 'errors (previously identified).', pname) except Exception, e: logging.critical('Exception (%s) while syncing profile %s', str(e), pname) logging.critical(traceback.format_exc()) return False
def __init__(self, port, virtual_world, camera_mgr, sync_session): self.port = port self.virtual_world = virtual_world self.cam_mgr = camera_mgr self.task_mgr = virtual_world.taskMgr self.cManager = QueuedConnectionManager() self.cListener = QueuedConnectionListener(self.cManager, 0) self.cReader = QueuedConnectionReader(self.cManager, 0) self.cReader.setRawMode(True) self.cWriter = ConnectionWriter(self.cManager, 1) self.cWriter.setRawMode(True) self.tcpSocket = self.cManager.openTCPServerRendezvous(port, BACKLOG) self.cListener.addConnection(self.tcpSocket) self.activeSessions = {} self.connection_map = {} self.set_handlers() hostname = socket.gethostname() a, b, address_list = socket.gethostbyname_ex(hostname) self.ip = address_list[0] logging.info("Addresses %s" % address_list) logging.info("Server is running on ip: %s, port: %s" % (self.ip, self.port)) self.client_counter = 0 self.read_buffer = '' self.read_state = 0 self.read_body_length = 0 self.packet = SocketPacket() controller = virtual_world.getController() self.sync = Sync(self.task_mgr, controller, camera_mgr, sync_session) self.vv_id = None if sync_session: logging.info("Waiting for Sync Client!") self.showing_info = False virtual_world.accept("i", self.toggleInfo) self.sync_session = sync_session self.createInfoLabel() atexit.register(self.exit)
async def run(): async def face_loop(): nonlocal face, running while running: face.processEvents() await asyncio.sleep(0.01) event_loop = asyncio.get_event_loop() event_loop.create_task(face_loop()) running = True face = Face() keychain = KeyChain() face.setCommandSigningInfo(keychain, keychain.getDefaultCertificateName()) sync = Sync(prefix=Name("/git"), face=face, on_update=on_update) sync.run() while True: await sync.publish_data(branch="test_branch", timestamp=None) await asyncio.sleep(5)
def main(): patch_win_unicode() try: config = Config() config.read() src_storage = _get_storage(config, config.src) if config.list_only or config.list_folders: walker = _get_walker(config, src_storage, config.list_format) walker.walk() else: dest_storage = _get_storage(config, config.dest) sync = Sync(config, src_storage, dest_storage) sync.run() except urllib2.URLError as e: logger.error("Error connecting to server. {!r}".format(e)) sys.exit(1) except KeyboardInterrupt: sys.exit()
def run(self, orig_args): options,args = self.parser.parse_args(orig_args) args = args[1:] if len(args) <= 0: self.print_usage() return project_dir = self.git.module_name(args[0]) if project_dir == None or len(project_dir) <= 0: print("Unknow project name.Please use git clone, then use mgit sync in the project dir.") return cmd = ['git', 'clone'] + args if options.branch != None: cmd += ["-b", options.branch] if 0 != os.system(" ".join(cmd)): return cwd = os.getcwd() os.chdir("/".join([cwd, project_dir])) sync = Sync() sync.run(['sync'] + orig_args[1:]) os.chdir(cwd)
def Execute(self, opt, args): if os.path.exists(os.path.join(self.repodir, "projects")): print >>sys.stderr, "this working directory has already been cloned into. exiting." sys.exit(1) #run Sync to clone the repos into the tree the way Repo expects them sync_cmd = Sync() sync_cmd.NAME = 'sync' sync_cmd.manifest = self.manifest sync_cmd.repodir = self.repodir argv = sys.argv argv = argv[argv.index('--')+2:] newopts, newargs = sync_cmd.OptionParser.parse_args(argv) sync_cmd.Execute(newopts, newargs) #checkout master everywhere for project in self.GetProjects(''): print >>sys.stdout, "%s:" % project.name out = project.work_git.checkout("-t", "-b", "master", "remotes/origin/master") print >>sys.stdout, out print >>sys.stdout
def __init__(self): parser = ArgumentParser() parser.add_argument('-c') args = parser.parse_args() if not os.path.exists(args.c): raise Exception('Error while reading config') config = ConfigParser() config.read(args.c) fileConfig(args.c) self.Logger = getLogger(__name__) self.client = Sync(config) self.to_get_queue = gevent.queue.Queue(maxsize=500) self.to_put_queue = gevent.queue.Queue(maxsize=500) db_url = create_db_url( config.get('user', 'username'), config.get('user', 'password'), config.get('db', 'host'), config.get('db', 'port'), ) self.get_db(db_url, config.get('db', 'name'))
class TestSync(unittest.TestCase): """Class to test all methods but constructor and parallel sync""" @patch('sync.GlanceSync', auto_spec=True) def setUp(self, glancesync): """create constructor, mock with glancesync, Set a master region""" self.regions = [] self.sync = Sync(self.regions) self.glancesync = glancesync config = {'return_value.master_region': 'MasterRegion'} self.glancesync.configure_mock(**config) def test_report_status(self): """check that calls to export_sync_region_status are done""" self.sync.regions = ['region1', 'region2'] self.sync.report_status() calls = [call('region1', ANY), call('region2', ANY)] self.glancesync.return_value.export_sync_region_status.\ assert_has_calls(calls) def test_sequential_sync(self): """check that calls to sync_region are done""" self.sync.regions = ['region1', 'region2'] self.sync.sequential_sync(dry_run=True) calls = [call('region1', dry_run=True), call('region2', dry_run=True)] self.glancesync.return_value.sync_region.assert_has_calls(calls) def test_show_regions(self): """check that calls to get_regions are done""" targets = {'master': None, 'other_target': None} config = {'return_value.targets': targets} self.glancesync.configure_mock(**config) self.sync.show_regions() calls = [call(), call(target='other_target')] self.glancesync.return_value.get_regions.assert_has_calls(calls) @patch('sync.os') @patch('sync.datetime') def test_make_backup(self, datetime_mock, os_mock): """check make backup; calls are correct and mkdir is invoked with right parameters""" datetime_str = '2020-02-06T23:57:09.205378' config = {'datetime.now.return_value.isoformat.return_value': datetime_str} datetime_mock.configure_mock(**config) self.sync.make_backup() dir_name = 'backup_glance_' + datetime_str os_mock.mkdir.assert_called_with(dir_name) self.glancesync.return_value.backup_glancemetadata_region.\ assert_called_with('MasterRegion', dir_name)
def syncElement(self, path, filename, extension, imdbid, istvshow, oldelement=None): printl( str(path) + " " + str(filename) + " " + str(extension) + " " + str(imdbid) + " " + str(istvshow), self) element = None if oldelement is None: element = MediaInfo(path, filename, extension) element.parse() element.ImdbId = imdbid else: element = oldelement #.copy() if istvshow: element.setMediaType(MediaInfo.SERIE) else: element.setMediaType(MediaInfo.MOVIE) results = Sync().syncWithId(element) if results is not None: return results else: if istvshow is False: element.setMediaType(MediaInfo.SERIE) else: element.setMediaType(MediaInfo.MOVIE) results = Sync().syncWithId(element) if results is not None: return results return None
def updateAll(self, notifyOutput=None, notifyProgress=None, notifyRange=None): episodes = self.getAll(self.TVSHOWSEPISODES) total = len(episodes) progress = 0 if notifyRange is not None: notifyRange(total) if notifyProgress is not None: notifyProgress(0) for episode in episodes: if episode.Title is None or episode.Season is None or episode.Episode is None: continue tvshow = self.getMedia(episode.ParentId) if episode.Title == tvshow.Title: printl( "Episode has same title as tvshow so probably update needed (%s %dx%d)" % (episode.Title, episode.Season, episode.Episode), self, "I") if notifyOutput is not None: notifyOutput( Utf8.utf8ToLatin( "Updating %s %dx%d" % (episode.Title, episode.Season, episode.Episode))) id = episode.Id seen = self.isMediaSeen(episode.Id) episode.setMediaType(episode.SERIE) newElement = Sync().syncWithId(episode) if newElement is not None: if len(newElement) == 2: episode = newElement[1] else: episode = newElement[0] self.deleteMedia(id) ret = self.insertMedia(episode) if seen: self.MarkAsSeen(ret["id"]) progress = progress + 1 printl( "Update progress %.2f (%d/%d)" % ((progress / total) * 100.0, progress, total), self, "I") if notifyProgress is not None: notifyProgress(progress) notifyProgress(total)
def start(self): """ Starts an experiment. """ print("Starting experiment...") self.sync = Sync( device=self.attr_device, counter_input=self.attr_counter_input, counter_output=self.attr_counter_output, counter_bits=self.attr_counter_bits, event_bits=self.attr_event_bits, output_path=self.attr_output_path, freq=self.attr_pulse_freq, verbose=True, force_sync_callback=False, ) lines = eval(self.attr_line_labels) for index, line in enumerate(lines): self.sync.add_label(index, line) self.sync.start()
class SyncFrame(Frame): def __init__(self,master=None,controller=None,conn=None): Frame.__init__(self,master) self.controller = controller self.conn = self.controller.get_conn() self.logo = self.controller.get_logo() self.remotePgmTotal = 0 self.remoteTrackTotal = 0 def onShow(self): print(self.controller.get_srvAddr()) self.sync = Sync(srvAddr=self.controller.get_srvAddr(),conn=self.conn,session=self.controller.get_session()) if self.sync is not None: self.sync.dosync() self.remotePgmTotal = self.sync.getRemoteProgramTotalCount() self.remoteTrackTotal = self.sync.getRemoteTrackTotalCount() banner = Frame(self) banner.grid(row=0,column=0,sticky="WE") self.label = Label(banner,image=self.logo) self.label.image = self.logo self.label.pack() body = Frame(self) body.grid(row=1,column=0,sticky="NSWE") Label(body,text="歌曲同步中...",padding=(10, 5, 10, 5)).grid(row=1,column=0,sticky="WE") self.pgb_media = Progressbar(body, orient="horizontal", length=200, mode="determinate") self.pgb_media.grid(row=1,column=1,sticky="WE") self.pgb_media["value"]=0 self.pgb_media["maximum"]=self.remoteTrackTotal self.sync.start() self.checkstatus() def checkstatus(self): self.pgb_media["value"] = 0 if self.sync is None else self.sync.getTrackIdx() if self.pgb_media["value"] >= self.remoteTrackTotal: self.gotoMain() else: self.after(200,self.checkstatus) def gotoMain(self): print('going to main....') self.controller.show_frame(EnvoMaster)
def op_sync(self): conf = self.get_config() pname = self._load_profile() startt_old = conf.get_last_sync_start(pname) stopt_old = conf.get_last_sync_stop(pname) if self.is_sync_all(): # This is the case the user wants to force a sync ignoring the # earlier sync states. This is useful when ASynK code changes - # and let's say we add support for synching a enw field, or some # such. # # This works by briefly resetting the last sync start and stop # times to fool the system. If the user is doing a dry run, we # will restore his earlier times dutifully. if self.is_dry_run(): logging.debug('Temporarily resetting last sync times...') conf.set_last_sync_start(pname, val="1980-01-01T00:00:00.00+00:00") conf.set_last_sync_stop(pname, val="1980-01-01T00:00:00.00+00:00") sync = Sync(conf, pname, self.get_db(), dr=self.is_dry_run()) if self.is_dry_run(): sync.prep_lists(self.get_sync_dir()) # Since it is only a dry run, resetting to the timestamps to the # real older sync is sort of called for. conf.set_last_sync_start(pname, val=startt_old) conf.set_last_sync_stop(pname, val=stopt_old) logging.debug('Reset last sync timestamps to real values') else: try: startt = conf.get_curr_time() result = sync.sync(self.get_sync_dir()) if result: conf.set_last_sync_start(pname, val=startt) conf.set_last_sync_stop(pname) logging.info('Updating item inventory...') sync.save_item_lists() logging.info('Updating item inventory...done') else: logging.info( 'timestamps not reset for profile %s due to ' 'errors (previously identified).', pname) except Exception, e: logging.critical('Exception (%s) while syncing profile %s', str(e), pname) logging.critical(traceback.format_exc()) return False
def __init__(self, port, virtual_world, camera_mgr, sync_session): self.port = port self.virtual_world = virtual_world self.cam_mgr = camera_mgr self.task_mgr = virtual_world.taskMgr self.cManager = QueuedConnectionManager() self.cListener = QueuedConnectionListener(self.cManager, 0) self.cReader = QueuedConnectionReader(self.cManager, 0) self.cReader.setRawMode(True) self.cWriter = ConnectionWriter(self.cManager, 1) self.cWriter.setRawMode(True) self.tcpSocket = self.cManager.openTCPServerRendezvous(port, BACKLOG) self.cListener.addConnection(self.tcpSocket) self.activeSessions = {} self.connection_map = {} self.set_handlers() hostname = socket.gethostname() a, b, address_list = socket.gethostbyname_ex(hostname) self.ip = address_list[0] logging.info("Addresses %s" % address_list) logging.info("Server is running on ip: %s, port: %s" %(self.ip, self.port)) self.client_counter = 0 self.read_buffer = '' self.read_state = 0 self.read_body_length = 0 self.packet = SocketPacket() controller = virtual_world.getController() self.sync = Sync(self.task_mgr, controller, camera_mgr, sync_session) self.vv_id = None if sync_session: logging.info("Waiting for Sync Client!") self.showing_info = False virtual_world.accept("i", self.toggleInfo) self.sync_session = sync_session self.createInfoLabel() atexit.register(self.exit)
def setUp(self, glancesync): """create constructor, mock with glancesync, Set a master region""" regions = ['region1', 'region2'] self.sync = Sync(regions) self.glancesync = glancesync self.log = logging.getLogger('glancesync') config = { 'return_value.master_region': 'MasterRegion', 'return_value.log': self.log, 'return_value.sync_region.side_effect': lambda region: time.sleep(1.5) or self.log.info('Sync ' + region + ' ' + str(time.time())) } self.glancesync.configure_mock(**config) path = os.path.abspath(os.curdir) self.dir_name = os.path.join(path, 'sync_20200206_2357') self.tearDown()
def op_sync (self): conf = self.get_config() pname = self._load_profile() startt_old = conf.get_last_sync_start(pname) stopt_old = conf.get_last_sync_stop(pname) if self.is_sync_all(): # This is the case the user wants to force a sync ignoring the # earlier sync states. This is useful when ASynK code changes - # and let's say we add support for synching a enw field, or some # such. # # This works by briefly resetting the last sync start and stop # times to fool the system. If the user is doing a dry run, we # will restore his earlier times dutifully. if self.is_dry_run(): logging.debug('Temporarily resetting last sync times...') conf.set_last_sync_start(pname, val=utils.time_start) conf.set_last_sync_stop(pname, val=utils.time_start) sync = Sync(conf, pname, [x.get_db() for x in self.get_colls()], dr=self.is_dry_run()) if self.is_dry_run(): sync.prep_lists(self.get_sync_dir()) # Since it is only a dry run, resetting to the timestamps to the # real older sync is sort of called for. conf.set_last_sync_start(pname, val=startt_old) conf.set_last_sync_stop(pname, val=stopt_old) logging.debug('Reset last sync timestamps to real values') else: try: startt = conf.get_curr_time() result = sync.sync(self.get_sync_dir()) if result: conf.set_last_sync_start(pname, val=startt) conf.set_last_sync_stop(pname) logging.info('Updating item inventory...') sync.save_item_lists() logging.info('Updating item inventory...done') else: logging.info('timestamps not reset for profile %s due to ' 'errors (previously identified).', pname) except Exception, e: logging.critical('Exception (%s) while syncing profile %s', str(e), pname) logging.critical(traceback.format_exc()) return False
def prepare_sync(key: bytes, app_id: int, installation_id: int) -> Sync: gh = GitHub() gh.login_as_app_installation(key, app_id, installation_id) auth = "x-access-token:" + gh.session.auth.token repos = {} print(f"Checking GitHub repositories for installation {installation_id}") for r in _app_installation_repositories(gh): if not r.homepage: print(f"NOTE: Skipping repository {r.full_name} (no homepage)") continue assert r.name not in repos, 'Duplicate repository name: ' + r.name # FIXME: Should we always append .git? src = parse_url(r.homepage + ".git") dest = parse_url(r.clone_url)._replace(auth=auth) repos[r.name] = Repo(src, dest) return Sync(repos)
def Execute(self, opt, args): opt.smart_sync = True Sync.Execute(self, opt, args)
def _Options(self, p): Sync._Options(self, p, show_smart=False)
def test_pull_empty_request(self): sync = Sync("https://ersatzworld.net/ctpwdgen-server/", 'inter', 'op', 'file.pem') status, blob = sync.pull() self.assertTrue(status) self.assertEqual('', blob)
def create_sync(self): """ creates a sync object. """ self.sync = Sync(self.server_address, self.username, self.password, self.certificate_file.name)
if purge_time: self.purge_counter += 1 if self.purge_counter == purge_sleep_ratio: try: purge = Purge(self.__prefs, purge_time) except Exception, e: logging.getLogger().exception(e) raise self.purge_counter = 0 if sync_sleep_ratio: #debug("sync count: %d", self.sync_counter) self.sync_counter += 1 if self.sync_counter == sync_sleep_ratio: try: sync = Sync(self.__prefs) if self.__sync_upload: debug("sync upload") timestamp = sync.send_new_hosts() if self.__sync_download: debug("sync download") new_hosts = sync.receive_new_hosts() if new_hosts: info("received new hosts: %s", str(new_hosts)) self.get_denied_hosts() self.update_hosts_deny(new_hosts) sync.xmlrpc_disconnect() except Exception, e: logging.getLogger().exception(e) raise self.sync_counter = 0
class SyncDevice(Device, metaclass=DeviceMeta): """ Tango Sync device class. Parameters ---------- None Examples -------- >>> from PyTango.server import server_run >>> server_run((SyncDevice,)) """ time = attribute() # read only is default error_handler = attribute( dtype=str, access=AttrWriteType.READ_WRITE, ) device = attribute( dtype=str, access=AttrWriteType.READ_WRITE, ) counter_input = attribute( dtype=str, access=AttrWriteType.READ_WRITE, ) counter_output = attribute( dtype=str, access=AttrWriteType.READ_WRITE, ) pulse_freq = attribute( dtype=float, access=AttrWriteType.READ_WRITE, ) output_path = attribute( dtype=str, access=AttrWriteType.READ_WRITE, ) line_labels = attribute( dtype=str, access=AttrWriteType.READ_WRITE, ) # ------------------------------------------------------------------------------ # INIT # ------------------------------------------------------------------------------ def init_device(self): """ Device constructor. Automatically run by Tango upon device export. """ self.set_state(DevState.ON) self.set_status("READY") self.attr_error_handler = "" self.attr_device = 'Dev1' self.attr_counter_input = 'ctr0' self.attr_counter_output = 'ctr2' self.attr_counter_bits = 64 self.attr_event_bits = 24 self.attr_pulse_freq = 10000000.0 self.attr_output_path = "C:/sync/output/test.h5" self.attr_line_labels = "[]" print("Device initialized...") # ------------------------------------------------------------------------------ # Attribute R/W # ------------------------------------------------------------------------------ def read_time(self): return time.time() def read_error_handler(self): return self.attr_error_handler def write_error_handler(self, data): self.attr_error_handler = data def read_device(self): return self.attr_device def write_device(self, data): self.attr_device = data def read_counter_input(self): return self.attr_counter_input def write_counter_input(self, data): self.attr_counter_input = data def read_counter_output(self): return self.attr_counter_output def write_counter_output(self, data): self.attr_counter_output = data def read_pulse_freq(self): return self.attr_pulse_freq def write_pulse_freq(self, data): self.attr_pulse_freq = data def read_output_path(self): return self.attr_output_path def write_output_path(self, data): self.attr_output_path = data def read_line_labels(self): return self.attr_line_labels def write_line_labels(self, data): self.attr_line_labels = data # ------------------------------------------------------------------------------ # Commands # ------------------------------------------------------------------------------ @command(dtype_in=str, dtype_out=str) def echo(self, data): """ For testing. Just echos whatever string you send. """ return data @command(dtype_in=str, dtype_out=None) def throw(self, msg): print(("Raising exception:", msg)) # Send to error handler or sequencing engine @command(dtype_in=None, dtype_out=None) def start(self): """ Starts an experiment. """ print("Starting experiment...") self.sync = Sync( device=self.attr_device, counter_input=self.attr_counter_input, counter_output=self.attr_counter_output, counter_bits=self.attr_counter_bits, event_bits=self.attr_event_bits, output_path=self.attr_output_path, freq=self.attr_pulse_freq, verbose=True, force_sync_callback=False, ) lines = eval(self.attr_line_labels) for index, line in enumerate(lines): self.sync.add_label(index, line) self.sync.start() @command(dtype_in=None, dtype_out=None) def stop(self): """ Stops an experiment and clears the NIDAQ tasks. """ print("Stopping experiment...") try: self.sync.stop() except Exception as e: print(e) self.sync.clear(self.attr_output_path) self.sync = None del self.sync @command(dtype_in=str, dtype_out=None) def load_config(self, path): """ Loads a configuration from a .pkl file. """ print(("Loading configuration: %s" % path)) with open(path, 'rb') as f: config = pickle.load(f) self.attr_device = config['device'] self.attr_counter_input = config['counter'] self.attr_counter_output = config['pulse'] self.attr_counter_bits = int(config['counter_bits']) self.attr_event_bits = int(config['event_bits']) self.attr_pulse_freq = float(config['freq']) self.attr_output_path = config['output_dir'] self.attr_line_labels = str(config['labels']) @command(dtype_in=str, dtype_out=None) def save_config(self, path): """ Saves a configuration to a .pkl file. """ print(("Saving configuration: %s" % path)) config = { 'device': self.attr_device, 'counter': self.attr_counter_input, 'pulse': self.attr_counter_output, 'freq': self.attr_pulse_freq, 'output_dir': self.attr_output_path, 'labels': eval(self.attr_line_labels), 'counter_bits': self.attr_counter_bits, 'event_bits': self.attr_event_bits, } with open(path, 'wb') as f: pickle.dump(config, f) @command(dtype_in=str, dtype_out=None) def copy_dataset(self, folder): """ Copies last dataset to specified folder. """ source = self.attr_output_path dest = os.path.join(folder, os.path.basename(source)) copyfile(source, dest)
def run(self): sync = Sync(show_progress=self._isManual, run_silent=self._runSilent, library=self._library, api=globals.traktapi) sync.sync()
def test_push(self): sync = Sync("https://ersatzworld.net/ctpwdgen-server/", 'inter', 'op', 'file.pem') self.assertTrue(sync.push(str(b64encode(b'Test'), encoding='utf-8')))
present_mode = not present_mode if present_mode: write("Present mode") else: write("Attendance mode") # Calls the sync_data method and alerts user on success or failure def sync(data): global present_mode write("Syncing...") if s.sync_data(data + ',' + ('P' if present_mode else 'A')): write("Successfully synced data") else: write("Error syncing, please try again") # Calls the scan method and returns the found data def scan_code(): write("Please display code") return scan() write("System starting up...") present_mode = False s = Sync() cad = pifacecad.PiFaceCAD() listener = pifacecad.SwitchEventListener(chip=cad) listener.register(4, pifacecad.IODIR_FALLING_EDGE, scan_and_sync) listener.register(0, pifacecad.IODIR_FALLING_EDGE, switch_modes) listener.activate() write("Startup complete...")
self.purge_counter += 1 if self.purge_counter == purge_sleep_ratio: try: purge = Purge(self.__prefs, purge_time) except Exception, e: logging.getLogger().exception(e) raise self.purge_counter = 0 if sync_sleep_ratio: #debug("sync count: %d", self.sync_counter) self.sync_counter += 1 if self.sync_counter == sync_sleep_ratio: try: sync = Sync(self.__prefs) if self.__sync_upload: debug("sync upload") timestamp = sync.send_new_hosts() if self.__sync_download: debug("sync download") new_hosts = sync.receive_new_hosts() if new_hosts: info("received new hosts: %s", str(new_hosts)) self.get_denied_hosts() self.update_hosts_deny(new_hosts) sync.xmlrpc_disconnect() except Exception, e: logging.getLogger().exception(e) raise self.sync_counter = 0
def run(self): sync = Sync(show_progress=self._isManual, api=globals.traktapi) sync.sync()
class SyncManager(object): """ Synchronization manager. This initializes and stores settings and handles the Sync object. """ def __init__(self): self.server_address = "" self.username = "" self.password = "" self.certificate = "" self.certificate_file = None self.sync = None def __del__(self): if self.certificate_file: self.certificate_file.close() def get_binary_sync_settings(self): """ returns packed sync settings :return: binary settings :rtype: bytes """ if self.sync: return Packer.compress(json.dumps({ "server-address": self.server_address, "username": self.username, "password": self.password, "certificate": self.certificate }).encode('utf-8')) else: return b'' def load_binary_sync_settings(self, data): """ loads sync settings :param bytes data: packed json data of sync settings """ settings_dict = json.loads(str(Packer.decompress(data), encoding='utf-8')) if "server-address" in settings_dict and \ "username" in settings_dict and \ "password" in settings_dict and \ "certificate" in settings_dict: self.server_address = settings_dict["server-address"] self.username = settings_dict["username"] self.password = settings_dict["password"] self.certificate = settings_dict["certificate"] if self.certificate_file: self.certificate_file.close() self.certificate_file = NamedTemporaryFile() self.certificate_file.write(self.certificate.encode('utf-8')) self.certificate_file.seek(0) self.create_sync() def ask_for_sync_settings(self): """ Ask the user for sync settings: Asks for server-URL, username and password. """ print("Bitte geben Sie die Einstellungen für Ihren Synchronisations-Server an...") self.server_address = input("URL: ") self.username = input("Benutzername: ") self.password = input("Passwort: ") line = input("Zertifikat im .pem-Format (beenden mit einer Leerzeile): ") while len(line) > 0: self.certificate += line + "\n" line = input("") self.certificate += line if self.certificate_file: self.certificate_file.close() self.certificate_file = NamedTemporaryFile() self.certificate_file.write(self.certificate.encode('utf-8')) self.certificate_file.seek(0) self.create_sync() def set_server_address(self, url): """ Sets the url without ajax folder and php file names but with https:// :param url: the url :type url: str """ self.server_address = url def set_username(self, username): """ Sets the username. :param username: the username :type username: str """ self.username = username def set_password(self, password): """ Sets the password. :param password: the password :type password: str """ self.password = password def set_certificate(self, certificate): """ Sets the certificate from a string in PEM format. :param certificate: certificate in PEM format :type certificate: str """ self.certificate = certificate if self.certificate_file: self.certificate_file.close() self.certificate_file = NamedTemporaryFile() self.certificate_file.write(self.certificate.encode('utf-8')) self.certificate_file.seek(0) def create_sync(self): """ creates a sync object. """ self.sync = Sync(self.server_address, self.username, self.password, self.certificate_file.name) def has_settings(self): """ Returns true if pull or push are possible :return: Are there settings? :rtype: bool """ return bool(self.sync) def pull(self): """ pulls data from the sync server. Returns an empty string if no connection is possible. :return: pulled base64 data :rtype: str """ if self.sync: return self.sync.pull() else: return False, '' def push(self, data): """ pushes data to the sync server. If the push fails an error message is displayed. :param str data: base64 data """ if self.sync: if not self.sync.push(data): print("Synchronisation fehlgeschlagen.") else: print("Sie haben keine gültigen Einstellungen für den sync server.")
class TestSyncParallel(unittest.TestCase): """Test the parallel functionality""" @patch('sync.GlanceSync', auto_spec=True) def setUp(self, glancesync): """create constructor, mock with glancesync, Set a master region""" regions = ['region1', 'region2'] self.sync = Sync(regions) self.glancesync = glancesync self.log = logging.getLogger('glancesync') config = { 'return_value.master_region': 'MasterRegion', 'return_value.log': self.log, 'return_value.sync_region.side_effect': lambda region: time.sleep(1.5) or self.log.info('Sync ' + region + ' ' + str(time.time())) } self.glancesync.configure_mock(**config) path = os.path.abspath(os.curdir) self.dir_name = os.path.join(path, 'sync_20200206_2357') self.tearDown() def tearDown(self): """clean directory and files created during the test""" if os.path.exists(self.dir_name): for name in os.listdir(self.dir_name): os.unlink(os.path.join(self.dir_name, name)) os.rmdir(self.dir_name) def _check_sync_invoked(self, datetime_mock): """Check that the files indicating than the regions are synchronised are invoked and return the difference of the timestamp where each file is printed. This way is possible to determine if both process are invoked at the some time or not. :param datetime_mock: the absolute difference time, in seconds (float) :return: """ match_obj1 = None match_obj2 = None dt = datetime.datetime(2020, 2, 6, 23, 57) config = {'datetime.now.return_value': dt} datetime_mock.configure_mock(**config) self.sync.parallel_sync() file1 = os.path.join(self.dir_name, 'region1.txt') file2 = os.path.join(self.dir_name, 'region2.txt') assert(os.path.exists(file1)) assert(os.path.exists(file2)) data1 = open(file1).read() data2 = open(file2).read() # The expected values for data1 and data2 are: # 'Sync region<region id> <timestamp>' or 'INFO:Sync region<region id> <timestamp>' regular_expression = r'(INFO:)?Sync region.* (.*)' match_obj1 = re.match(regular_expression, data1, re.M | re.I) assert(match_obj1 is not None), 'The file {} does not contain the expected value'.format(file1) match_obj2 = re.match(regular_expression, data2, re.M | re.I) assert(match_obj2 is not None), 'The file {} does not contain the expected value'.format(file2) time1 = float(match_obj1.group(2)) time2 = float(match_obj2.group(2)) return abs(time1 - time2) @patch('sync.datetime') def test_parallel_sync(self, datetime_mock): """test with support for two clients, so both processes run at the some time""" config = { 'return_value.max_children': 2, } self.glancesync.configure_mock(**config) diff = self._check_sync_invoked(datetime_mock) assert(diff <= 1) @patch('sync.datetime') def test_noparallel_sync(self, datetime_mock): """test with support for only one client, so one process run first and then the other one""" config = { 'return_value.max_children': 1, } self.glancesync.configure_mock(**config) diff = self._check_sync_invoked(datetime_mock) assert(diff > 1)
# Operations group = optparse.OptionGroup(parser, 'Operations', 'Type of sync to run, only supply one.') group.add_option('--watch', help='Watch the source folder for changes and sync them to the destination folder', dest='watch', action='store_true', default=False) group.add_option('--run', help='Run sync with custom flags specified below', dest='run', action='store_true', default=False) parser.add_option_group(group) help = {'update':'Update files that are changed in the source folder to the destination', 'newer':'Only update if the source file is newer than the destination', 'create':'Create files that don\'t currently exist in destination', 'purge':'Delete files that don\'t currently exist in destination', 'watch':'Keep the sync alive and monitor source folder for changes'} # Flags s = Sync() opts = s.getopts() group = optparse.OptionGroup(parser, 'Flags', 'Flags to adjust sync') for k in opts.keys(): typ = 'string' kwargs = {} if isinstance(opts[k], bool): typ = 'choice' kwargs['choices'] = ['True', 'False'] group.add_option('--{0}'.format(k), help=(help[k] + ' ' if help.has_key(k) else '') + 'Default: {0}'.format(opts[k]), dest=k, action='store', default=None, type=typ, **kwargs) del s parser.add_option_group(group) # Watch folder customization group = optparse.OptionGroup(parser, 'Watch Folder Attributes')
def test_pull(self): sync = Sync("https://ersatzworld.net/ctpwdgen-server/", 'inter', 'op', 'file.pem') status, blob = sync.pull() self.assertTrue(status) self.assertEqual(str(b64encode(b'Test'), encoding='utf-8'), blob)
class SocketServer(): def __init__(self, port, virtual_world, camera_mgr, sync_session): self.port = port self.virtual_world = virtual_world self.cam_mgr = camera_mgr self.task_mgr = virtual_world.taskMgr self.cManager = QueuedConnectionManager() self.cListener = QueuedConnectionListener(self.cManager, 0) self.cReader = QueuedConnectionReader(self.cManager, 0) self.cReader.setRawMode(True) self.cWriter = ConnectionWriter(self.cManager, 1) self.cWriter.setRawMode(True) self.tcpSocket = self.cManager.openTCPServerRendezvous(port, BACKLOG) self.cListener.addConnection(self.tcpSocket) self.activeSessions = {} self.connection_map = {} self.set_handlers() hostname = socket.gethostname() a, b, address_list = socket.gethostbyname_ex(hostname) self.ip = address_list[0] logging.info("Addresses %s" % address_list) logging.info("Server is running on ip: %s, port: %s" %(self.ip, self.port)) self.client_counter = 0 self.read_buffer = '' self.read_state = 0 self.read_body_length = 0 self.packet = SocketPacket() controller = virtual_world.getController() self.sync = Sync(self.task_mgr, controller, camera_mgr, sync_session) self.vv_id = None if sync_session: logging.info("Waiting for Sync Client!") self.showing_info = False virtual_world.accept("i", self.toggleInfo) self.sync_session = sync_session self.createInfoLabel() atexit.register(self.exit) def createInfoLabel(self): string = self.generateInfoString() self.info_label = OST(string, pos=(-1.3, -0.5), fg=(1,1,1,1), bg=(0,0,0,0.7), scale=0.05, align=TextNode.ALeft) self.info_label.hide() def generateInfoString(self,): string = " IP:\t%s \n" % self.ip string += " PORT:\t%s \n" % self.port if self.sync_session: string += " MODE:\tSync Client\n" string += " VV ID:\t%s\n" % self.vv_id else: string += " MODE:\tAutomatic\n" cameras = self.cam_mgr.getCameras() num_cameras = len(cameras) for camera in cameras: id = camera.getId() type = camera.getTypeString() string += " Cam%s:\t%s\n" %(id, type) string += "\n" return string def set_handlers(self): self.task_mgr.add(self.connection_polling, "Poll new connections", -39) self.task_mgr.add(self.reader_polling, "Poll reader", -40) self.task_mgr.add(self.disconnection_polling, "PollDisconnections", -41) def connection_polling(self, taskdata): if self.cListener.newConnectionAvailable(): rendezvous = PointerToConnection() netAddress = NetAddress() newConn = PointerToConnection() if self.cListener.getNewConnection(rendezvous,netAddress, newConn): conn = newConn.p() self.cReader.addConnection(conn) # Begin reading connection conn_id = self.client_counter logging.info("New Connection from ip:%s, conn:%s" % (conn.getAddress(), conn_id)) self.connection_map[conn_id] = conn self.client_counter += 1 message = eVV_ACK_OK(self.ip, self.port, conn_id) self.sendMessage(message, conn) return Task.cont def reader_polling(self, taskdata): if self.cReader.dataAvailable(): datagram = NetDatagram() # catch the incoming data in this instance # Check the return value; if we were threaded, someone else could have # snagged this data before we did if self.cReader.getData(datagram): self.read_buffer = self.read_buffer + datagram.getMessage() while (True): if self.read_state == 0: if len(self.read_buffer) >= self.packet.header_length: bytes_consumed = self.packet.header_length self.packet.header = self.read_buffer[:bytes_consumed] self.read_body_length = self.packet.decode_header() self.read_buffer = self.read_buffer[bytes_consumed:] self.read_state = 1 else: break if self.read_state == 1: if len(self.read_buffer) >= self.read_body_length: bytes_consumed = self.read_body_length self.packet.data = self.read_buffer[:bytes_consumed] self.packet.offset = 0 self.read_body_length = 0 self.read_buffer = self.read_buffer[bytes_consumed:] self.read_state = 0 self.new_data_callback(self.packet) else: break return Task.cont def new_data_callback(self, packet): packet = copy.deepcopy(packet) message_type = packet.get_int() conn_id = packet.get_int() if message_type == VP_SESSION: conn = self.connection_map[conn_id] type = packet.get_char() pipeline = packet.get_char() req_cam_id = packet.get_int() logging.debug("Received VP_SESSION message from conn:%s, " \ "type=%s, pipeline=%s requested camera id=%d" %(conn_id, VP_TYPE[type], PIPELINE[pipeline], req_cam_id)) self.newVPSession(conn, type, pipeline, conn_id, req_cam_id) elif message_type == SYNC_SESSION: vv_id = packet.get_int() self.vv_id = vv_id string = self.generateInfoString() self.info_label.setText(string) conn = self.connection_map[conn_id] logging.debug("Received SYNC_SESSION message from conn:%s" %conn_id) self.newSyncSession(conn, conn_id, vv_id) logging.info("Sync client connected") elif message_type == VP_REQ_CAM_LIST: logging.debug("Received VP_REQ_CAM_LIST message from conn:%s" % conn_id) cameras = self.cam_mgr.getCameras() pipeline = self.activeSessions[conn_id].getPipeline() camera_type = None if pipeline == STATIC_PIPELINE: camera_type = VP_STATIC_CAMERA elif pipeline == PTZ_PIPELINE: camera_type = VP_ACTIVE_CAMERA cam_list = [] for camera in cameras: if camera_type == camera.getType() and not camera.hasSession(): cam_list.append(camera.getId()) message = eVV_CAM_LIST(self.ip, self.port, cam_list) conn = self.connection_map[conn_id] logging.debug("Sent VV_CAM_LIST message to conn:%s" % conn_id) self.sendMessage(message, conn) elif message_type == VP_REQ_IMG: cam_id = packet.get_int() frequency = packet.get_char() width = packet.get_int() height = packet.get_int() jpeg = packet.get_bool() data = (frequency, width, height, jpeg) camera = self.cam_mgr.getCameraById(cam_id) logging.debug("Received VV_REQ_IMG message from conn:%s" % conn_id) if camera and not camera.hasSession(): session = self.activeSessions[conn_id] session.addCamera(cam_id) camera.setSession(session, VP_BASIC, self.ip, self.port, data) else: if conn_id in self.activeSessions: self.activeSessions[conn_id].newMessage(message_type, packet) def newVPSession(self, conn, type, pipeline, conn_id, req_cam_id): if type == VP_ADVANCED: camera_type = -1 if pipeline == STATIC_PIPELINE: camera_type = STATIC_CAMERA ## Change this to use a different static camera class elif pipeline == PTZ_PIPELINE: camera_type = ACTIVE_CAMERA if camera_type != -1: #cam = self.cam_mgr.getAvailableCamera(camera_type) cam = self.cam_mgr.getRequestedCamera(camera_type, req_cam_id) if cam: session = VPSession(conn_id, conn, self, VP, pipeline) session.addCamera(cam.getId()) self.activeSessions[conn_id] = session message = eVV_VP_ACK_OK(self.ip, self.port, cam.getId()) logging.debug("Sent VV_VP_ACK_OK message to conn:%s" % conn_id) self.sendMessage(message, conn) cam.setSession(session, type, self.ip, self.port) else: message = eVV_VP_ACK_FAILED(self.ip, self.port) logging.debug("Sent VV_VP_ACK_FAILED message to conn:%s" % conn_id) self.sendMessage(message, conn) else: message = eVV_VP_ACK_FAILED(self.ip, self.port) logging.debug("Sent VV_VP_ACK_FAILED message to conn:%s" % conn_id) self.sendMessage(message, conn) def newSyncSession(self, conn, conn_id, vv_id): session = SyncSession(conn_id, conn, self, SYNC) self.sync.setSession(session, vv_id, self.ip, self.port) self.activeSessions[conn_id] = session message = eVV_SYNC_ACK(self.ip, self.port, vv_id) logging.debug("Sent VV_SYNC_ACK message to conn:%s" % conn_id) self.sendMessage(message, conn) def sendMessage(self, message, conn): self.cWriter.send(message, conn) def disconnection_polling(self, taskdata): if(self.cManager.resetConnectionAvailable()): connectionPointer = PointerToConnection() self.cManager.getResetConnection(connectionPointer) lostConnection = connectionPointer.p() for session in self.activeSessions.values(): if session.conn == lostConnection: logging.info("Lost Connection from ip:%s, conn:%s" %(session.client_address, session.conn_id)) conn_id = session.conn_id if session.getSessionType() == VP: cameras = session.getCameras() for cam_id in cameras: camera = self.cam_mgr.getCameraById(cam_id) camera.clearSession() del self.activeSessions[conn_id] del self.connection_map[conn_id] break self.cManager.closeConnection(lostConnection) return Task.cont def toggleInfo(self): if self.showing_info: self.info_label.hide() self.showing_info = False else: self.info_label.show() self.showing_info = True def exit(self): for connection in self.connection_map.values(): self.cReader.removeConnection(connection) self.cManager.closeConnection(self.tcpSocket) self.tcpSocket.getSocket().Close()
import gevent import sys from sync import Sync from settings import generateDefaultSetting from BoardCode import HAKSA if __name__ == "__main__": setting = generateDefaultSetting() sync = Sync(setting, HAKSA) if len(sys.argv) > 1: if sys.argv[1] == "firstRun": sync.firstRun() print(len(sync.keys())) elif sys.argv[1] == "Run": keys = len(sync.keys()) [sync.delete(i) for i in range(keys - 3, keys + 1)] print(len(sync.keys())) sync.Run() print(len(sync.keys())) [sync.delete(i) for i in range(keys - 30, keys + 1)] print(len(sync.keys())) sync.Run() print(len(sync.keys()))
def main(): parser = argparse.ArgumentParser( description='Sync current folder to your flickr account.') parser.add_argument('--monitor', action='store_true', help='Start monitoring daemon.') parser.add_argument( '--starts-with', type=str, help='Only sync those paths that start with this text, e.g. "2015/06."' ) parser.add_argument( '--download', type=str, help='Download photos from flickr. Specify a path or use "." for all.') parser.add_argument('--dry-run', action='store_true', help='Do not download or upload anything.') parser.add_argument('--ignore-videos', action='store_true', help='Ignore video files.') parser.add_argument('--ignore-images', action='store_true', help='Ignore image files.') parser.add_argument( '--ignore-ext', type=str, help= 'Comma separated list of filename extensions to ignore, e.g. "jpg,png".' ) parser.add_argument('--fix-missing-description', action='store_true', help='Replace missing set description with set title.') parser.add_argument('--version', action='store_true', help='Output current version: ' + version) parser.add_argument('--sync-path', type=str, default=os.getcwd(), help='Specify sync path (default: current dir).') parser.add_argument( '--sync-from', type=str, help= 'Only one supported value: "all". Upload anything not on flickr. Download anything not on the local filesystem.' ) parser.add_argument( '--custom-set', type=str, help='Customize set name from path with regex, e.g. "(.*)/(.*)".') parser.add_argument( '--custom-set-builder', type=str, help= 'Build custom set title, e.g. "{0} {1}" joins first two groups (default behavior merges groups using a hyphen).' ) parser.add_argument( '--update-custom-set', action='store_true', help= 'Updates set title from custom-set (and custom-set-builder, if given).' ) parser.add_argument( '--custom-set-debug', action='store_true', help= 'When testing custom sets: ask for confirmation before creating an album.' ) parser.add_argument('--username', type=str, help='Token username argument for API.') parser.add_argument('--keyword', action='append', type=str, help='Only upload files matching this keyword.') args = parser.parse_args() if args.version: logger.info(version) exit() # Windows OS args.is_windows = os.name == 'nt' args.sync_path = args.sync_path.rstrip(os.sep) + os.sep if not os.path.exists(args.sync_path): logger.error('Sync path does not exist.') exit(0) local = Local(args) remote = Remote(args) sync = Sync(args, local, remote) sync.start_sync()
from config import ConfigLoader from sync import Sync from util import read_file from worker import Worker # Load all properties from INI and point to the folder where resources are stored props_file = "app.ini" cl = ConfigLoader.load(props_file, 'sync_resource') # Just for process output with open("logging.yml") as log_cfg: logging.config.dictConfig(yaml.safe_load(log_cfg)) sync_options = cl.get_config('sync') resources_config = cl.get_resource_config() log_folder = sync_options.get("log_folder") os.makedirs(log_folder, exist_ok=True) # The "base" config into which sync config gets merged template_config = resources_config.merge_with(sync_options.values) template_config.set_value("log_folder", os.path.abspath(log_folder)) # Read in the set of example syncs example_sync_data = read_file("../example_sync/example.yml") # Execute (single thread for now) for config in example_sync_data: sync = Sync(config) w = Worker(sync, template_config) w.run()
from sync import Sync import logger INVALID_FILENAME_CHARS = '\/:*?"<>|' STRIPTAGS = re.compile(r'<[^>]+>') STRIPHEAD = re.compile("<head>.*?</head>", re.DOTALL) EMPTYP = re.compile('<p style=\"-qt-paragraph-type:empty;.*(?=<p>)', re.DOTALL) NOTESPATH = os.path.expanduser('~/.ownnotes/') COLOR_TITLE = '#441144' COLOR_LINK = '#115511' COLOR_SUBTITLE = '#663366' settings = Settings() sync = Sync() if not os.path.exists(NOTESPATH): os.makedirs(NOTESPATH) def _getValidFilename(filepath): dirname, filename = os.path.dirname(filepath), os.path.basename(filepath) return os.path.join(dirname, ''.join(car for car in filename if car not in INVALID_FILENAME_CHARS)) def setColors(title_color, subtitle_color, link_color): global COLOR_TITLE global COLOR_LINK global COLOR_SUBTITLE
class SocketServer(): def __init__(self, port, virtual_world, camera_mgr, sync_session): self.port = port self.virtual_world = virtual_world self.cam_mgr = camera_mgr self.task_mgr = virtual_world.taskMgr self.cManager = QueuedConnectionManager() self.cListener = QueuedConnectionListener(self.cManager, 0) self.cReader = QueuedConnectionReader(self.cManager, 0) self.cReader.setRawMode(True) self.cWriter = ConnectionWriter(self.cManager, 1) self.cWriter.setRawMode(True) self.tcpSocket = self.cManager.openTCPServerRendezvous(port, BACKLOG) self.cListener.addConnection(self.tcpSocket) self.activeSessions = {} self.connection_map = {} self.set_handlers() hostname = socket.gethostname() a, b, address_list = socket.gethostbyname_ex(hostname) self.ip = address_list[0] logging.info("Addresses %s" % address_list) logging.info("Server is running on ip: %s, port: %s" % (self.ip, self.port)) self.client_counter = 0 self.read_buffer = '' self.read_state = 0 self.read_body_length = 0 self.packet = SocketPacket() controller = virtual_world.getController() self.sync = Sync(self.task_mgr, controller, camera_mgr, sync_session) self.vv_id = None if sync_session: logging.info("Waiting for Sync Client!") self.showing_info = False virtual_world.accept("i", self.toggleInfo) self.sync_session = sync_session self.createInfoLabel() atexit.register(self.exit) def createInfoLabel(self): string = self.generateInfoString() self.info_label = OST(string, pos=(-1.3, -0.5), fg=(1, 1, 1, 1), bg=(0, 0, 0, 0.7), scale=0.05, align=TextNode.ALeft) self.info_label.hide() def generateInfoString(self, ): string = " IP:\t%s \n" % self.ip string += " PORT:\t%s \n" % self.port if self.sync_session: string += " MODE:\tSync Client\n" string += " VV ID:\t%s\n" % self.vv_id else: string += " MODE:\tAutomatic\n" cameras = self.cam_mgr.getCameras() num_cameras = len(cameras) for camera in cameras: id = camera.getId() type = camera.getTypeString() string += " Cam%s:\t%s\n" % (id, type) string += "\n" return string def set_handlers(self): self.task_mgr.add(self.connection_polling, "Poll new connections", -39) self.task_mgr.add(self.reader_polling, "Poll reader", -40) self.task_mgr.add(self.disconnection_polling, "PollDisconnections", -41) def connection_polling(self, taskdata): if self.cListener.newConnectionAvailable(): rendezvous = PointerToConnection() netAddress = NetAddress() newConn = PointerToConnection() if self.cListener.getNewConnection(rendezvous, netAddress, newConn): conn = newConn.p() self.cReader.addConnection(conn) # Begin reading connection conn_id = self.client_counter logging.info("New Connection from ip:%s, conn:%s" % (conn.getAddress(), conn_id)) self.connection_map[conn_id] = conn self.client_counter += 1 message = eVV_ACK_OK(self.ip, self.port, conn_id) self.sendMessage(message, conn) return Task.cont def reader_polling(self, taskdata): if self.cReader.dataAvailable(): datagram = NetDatagram( ) # catch the incoming data in this instance # Check the return value; if we were threaded, someone else could have # snagged this data before we did if self.cReader.getData(datagram): self.read_buffer = self.read_buffer + datagram.getMessage() while (True): if self.read_state == 0: if len(self.read_buffer) >= self.packet.header_length: bytes_consumed = self.packet.header_length self.packet.header = self.read_buffer[:bytes_consumed] self.read_body_length = self.packet.decode_header() self.read_buffer = self.read_buffer[bytes_consumed:] self.read_state = 1 else: break if self.read_state == 1: if len(self.read_buffer) >= self.read_body_length: bytes_consumed = self.read_body_length self.packet.data = self.read_buffer[:bytes_consumed] self.packet.offset = 0 self.read_body_length = 0 self.read_buffer = self.read_buffer[bytes_consumed:] self.read_state = 0 self.new_data_callback(self.packet) else: break return Task.cont def new_data_callback(self, packet): packet = copy.deepcopy(packet) message_type = packet.get_int() conn_id = packet.get_int() if message_type == VP_SESSION: conn = self.connection_map[conn_id] type = packet.get_char() pipeline = packet.get_char() logging.debug("Received VP_SESSION message from conn:%s, " \ "type=%s, pipeline=%s" %(conn_id, VP_TYPE[type], PIPELINE[pipeline])) self.newVPSession(conn, type, pipeline, conn_id) elif message_type == SYNC_SESSION: vv_id = packet.get_int() self.vv_id = vv_id string = self.generateInfoString() self.info_label.setText(string) conn = self.connection_map[conn_id] logging.debug("Received SYNC_SESSION message from conn:%s" % conn_id) self.newSyncSession(conn, conn_id, vv_id) logging.info("Sync client connected") elif message_type == VP_REQ_CAM_LIST: logging.debug("Received VP_REQ_CAM_LIST message from conn:%s" % conn_id) cameras = self.cam_mgr.getCameras() pipeline = self.activeSessions[conn_id].getPipeline() camera_type = None if pipeline == STATIC_PIPELINE: camera_type = VP_STATIC_CAMERA elif pipeline == PTZ_PIPELINE: camera_type = VP_ACTIVE_CAMERA cam_list = [] for camera in cameras: if camera_type == camera.getType() and not camera.hasSession(): cam_list.append(camera.getId()) message = eVV_CAM_LIST(self.ip, self.port, cam_list) conn = self.connection_map[conn_id] logging.debug("Sent VV_CAM_LIST message to conn:%s" % conn_id) self.sendMessage(message, conn) elif message_type == VP_REQ_IMG: cam_id = packet.get_int() frequency = packet.get_char() width = packet.get_int() height = packet.get_int() jpeg = packet.get_bool() data = (frequency, width, height, jpeg) camera = self.cam_mgr.getCameraById(cam_id) logging.debug("Received VV_REQ_IMG message from conn:%s" % conn_id) if camera and not camera.hasSession(): session = self.activeSessions[conn_id] session.addCamera(cam_id) camera.setSession(session, VP_BASIC, self.ip, self.port, data) else: if conn_id in self.activeSessions: self.activeSessions[conn_id].newMessage(message_type, packet) def newVPSession(self, conn, type, pipeline, conn_id): if type == VP_ADVANCED: camera_type = -1 if pipeline == STATIC_PIPELINE: camera_type = STATIC_CAMERA ## Change this to use a different static camera class elif pipeline == PTZ_PIPELINE: camera_type = ACTIVE_CAMERA if camera_type != -1: cam = self.cam_mgr.getAvailableCamera(camera_type) if cam: session = VPSession(conn_id, conn, self, VP, pipeline) session.addCamera(cam.getId()) self.activeSessions[conn_id] = session message = eVV_VP_ACK_OK(self.ip, self.port, cam.getId()) logging.debug("Sent VV_VP_ACK_OK message to conn:%s" % conn_id) self.sendMessage(message, conn) cam.setSession(session, type, self.ip, self.port) else: message = eVV_VP_ACK_FAILED(self.ip, self.port) logging.debug("Sent VV_VP_ACK_FAILED message to conn:%s" % conn_id) self.sendMessage(message, conn) else: message = eVV_VP_ACK_FAILED(self.ip, self.port) logging.debug("Sent VV_VP_ACK_FAILED message to conn:%s" % conn_id) self.sendMessage(message, conn) def newSyncSession(self, conn, conn_id, vv_id): session = SyncSession(conn_id, conn, self, SYNC) self.sync.setSession(session, vv_id, self.ip, self.port) self.activeSessions[conn_id] = session message = eVV_SYNC_ACK(self.ip, self.port, vv_id) logging.debug("Sent VV_SYNC_ACK message to conn:%s" % conn_id) self.sendMessage(message, conn) def sendMessage(self, message, conn): self.cWriter.send(message, conn) def disconnection_polling(self, taskdata): if (self.cManager.resetConnectionAvailable()): connectionPointer = PointerToConnection() self.cManager.getResetConnection(connectionPointer) lostConnection = connectionPointer.p() for session in self.activeSessions.values(): if session.conn == lostConnection: logging.info("Lost Connection from ip:%s, conn:%s" % (session.client_address, session.conn_id)) conn_id = session.conn_id if session.getSessionType() == VP: cameras = session.getCameras() for cam_id in cameras: camera = self.cam_mgr.getCameraById(cam_id) camera.clearSession() del self.activeSessions[conn_id] del self.connection_map[conn_id] break self.cManager.closeConnection(lostConnection) return Task.cont def toggleInfo(self): if self.showing_info: self.info_label.hide() self.showing_info = False else: self.info_label.show() self.showing_info = True def exit(self): for connection in self.connection_map.values(): self.cReader.removeConnection(connection) self.cManager.closeConnection(self.tcpSocket) self.tcpSocket.getSocket().Close()
import json import sys from enko import EnkoAPI from isilist_api import IsilistAPI from repository import UserLists from sync import Sync from transformer import ISIEnkoTransformer if __name__ == "__main__": isilist_api = IsilistAPI() email, password = sys.argv[1], sys.argv[2] isilist_api.init(email, password) sync = Sync(isilist_api.get_sync()) sync.sync() user_lists = UserLists.extract_as_enko() user_lists = [ISIEnkoTransformer.transform(user_list) for user_list in user_lists] enko_api = EnkoAPI() enko_api.process_sales(user_lists, '2016-01-01')
def main(): parser = argparse.ArgumentParser( description='Sync current folder to your flickr account.') parser.add_argument('--monitor', action='store_true', help='starts a daemon after sync for monitoring') parser.add_argument( '--starts-with', type=str, help='only sync that path starts with this text, e.g. "2015/06"') parser.add_argument( '--download', type=str, help='download the photos from flickr, specify a path or . for all') parser.add_argument('--ignore-videos', action='store_true', help='ignore video files') parser.add_argument('--ignore-images', action='store_true', help='ignore image files') parser.add_argument( '--ignore-ext', type=str, help='comma separated list of extensions to ignore, e.g. "jpg,png"') parser.add_argument('--version', action='store_true', help='output current version: ' + version) parser.add_argument( '--sync-path', type=str, default=os.getcwd(), help='specify the sync folder (default is current dir)') parser.add_argument( '--sync-from', type=str, help= 'Only supported value: "all". Uploads anything that isn\'t on flickr, and download anything that isn\'t on the local filesystem' ) parser.add_argument( '--custom-set', type=str, help='customize your set name from path with regex, e.g. "(.*)/(.*)"') parser.add_argument( '--custom-set-builder', type=str, help= 'build your custom set title, e.g. "{0} {1}" to join the first two groups (default merges groups with hyphen)' ) parser.add_argument( '--update-custom-set', action='store_true', help= 'updates your set title from custom-set (and custom-set-builder, if given)' ) parser.add_argument( '--custom-set-debug', action='store_true', help= 'for testing your custom sets, asks for confirmation when creating an album on flickr' ) parser.add_argument( '--username', type=str, help='token username') # token username argument for api parser.add_argument('--keyword', action='append', type=str, help='only upload files matching this keyword') args = parser.parse_args() if args.version: logger.info(version) exit() # validate args args.is_windows = os.name == 'nt' args.sync_path = args.sync_path.rstrip(os.sep) + os.sep if not os.path.exists(args.sync_path): logger.error('Sync path does not exists') exit(0) local = Local(args) remote = Remote(args) sync = Sync(args, local, remote) sync.start_sync()