def handle_message(self, body, attributes, messages_attributes): if body: print("[%s] %s" % (datetime.now().isoformat(), body)) git_pull(GIT_REPO_DIR) #copy_tree(GIT_REPO_DIR, DAGS_DIR) sync(GIT_REPO_DIR, DAGS_DIR, "sync", purge=True) print("[%s] GIT PULL COMPLETE" % datetime.now().isoformat())
def sync_source_folder(folder, target, ignore=None): _from = os.path.abspath(os.path.join(source_folder, folder)) print("Sync source {0} to {1}".format(_from, target)) if ignore is None: dirsync.sync(_from, target, "sync") else: dirsync.sync(_from, target, "sync", exclude=ignore)
def push(self, response): dirsync.sync(self.project.local_fs_path, self.fs_url, "sync", purge=True, logger=logging.getLogger(dirsync.__name__)) return response
def sync_manifest_files(options): main_dir = os.path.join(constants.DIR_APP_ROOT, "src", "main") public_apk_gen_dir = os.path.join(options.chromium_root, "out", options.buildtype, "gen/chrome/android/chrome_public_apk") args = {'only': ['AndroidManifest\\.xml']} sync(public_apk_gen_dir, main_dir, "sync", **args)
def sync_java_files(options): app_java_dir = os.path.join(constants.DIR_APP_ROOT, "src", "main", "java") chrome_java_dir = os.path.join(options.chromium_root, "chrome", "android", "java", "src") args = {'exclude': ['\S+\\.aidl']} sync(chrome_java_dir, app_java_dir, "sync", **args) # sync aidl files app_aidl_dir = os.path.join(constants.DIR_APP_ROOT, "src", "main", "aidl") args = {'only': ['\S+\\.aidl'], 'ignore': ['\S*common.aidl']} sync(chrome_java_dir, app_aidl_dir, "sync", **args) # sync generated enums files gen_enums_dir = os.path.join(options.chromium_root, "out", options.buildtype, "gen", "enums") for dir in os.listdir(gen_enums_dir): java_dir = os.path.join(gen_enums_dir, dir) args = {'exclude':['org/chromium/(android_webview|base|blink_public|content|content_public|media|net|sync|ui)\S*', 'org/chromium/components/(dom_distiller|bookmarks)S*']} sync(java_dir, app_java_dir, "sync", **args) # sync generated template files gen_template_dir = os.path.join(options.chromium_root, "out", options.buildtype, "gen", "templates") for dir in os.listdir(gen_template_dir): java_dir = os.path.join(gen_template_dir, dir) args = {'exclude':['org/chromium/(android_webview|base|blink_public|content|content_public|media|net|sync|ui)\S*', 'org/chromium/components/(dom_distiller|bookmarks)S*']} sync(java_dir, app_java_dir, "sync", **args) # syn NativeLibraries.java native_libraries_dir = os.path.join(options.chromium_root, "out", options.buildtype, "chrome_public_apk", "native_libraries_java") java_dir = os.path.join(constants.DIR_APP_ROOT, "src", "main", "java", "org", "chromium", "base", "library_loader") sync(native_libraries_dir, java_dir, "sync")
def save_h5p(h5p, baseDir=None, force_fresh=False, fetch_media=True): if baseDir is None: baseDir = os.path.join(export_dir, h5p.title) contentDir = os.path.join(baseDir, "content") mkdir(contentDir) if os.path.exists(baseDir) and force_fresh: dbg("Removing existing dir: %s" % baseDir, 4) shutil.rmtree(baseDir) if not os.path.exists(baseDir): dbg("Creating H5P base dir %s" % baseDir, 4) mkdir(baseDir) dirsync.sync(h5p_libs_dir, baseDir, "sync", ignore=[ r".*/\.git/", r".*\.swp", r"\#.*", ]) if fetch_media: dbg("Downloading media...") h5p.fetch_media(baseDir, recursive=True) dbg("Populating content.json...") content_fh = open(os.path.join(contentDir, "content.json"), "w") content_fh.write(h5p.content_json) content_fh.close() dbg("Populating h5p.json...") # TODO: copy required libraries (`preloadedDependencies`) in # h5p_json from some known location into baseDir? h5p_fh = open(os.path.join(baseDir, "h5p.json"), "w") h5p_fh.write(h5p.h5p_json) h5p_fh.close() dbg("DONE.")
def test_del_dst_dir_nopurge(self): self.rm('dst/dir') sync('src', 'dst', 'sync') self.assertExists('src/dir') self.assertExists('dst/dir')
def test_only(self): sync('src', 'dst', 'sync', create=True, only=('^.*\.py$', )) self.assertNotExists('dst/file1.txt') self.assertExists('dst/file2.py') self.assertNotExists('dst/dir/file4.txt') self.assertNotExists('dst/dir')
def test_ignore_dir(self): self.rm('src/dir') sync('src', 'dst', 'sync', ignore=('dir', )) self.assertNotExists('src/dir') self.assertExists('dst/dir')
def copy_static_files(self): for d in [settings.STATIC_ROOT]: dir_loc = self.get_static_destination() print("syncing {0}".format(d)) if os.path.isdir(dir_loc) is False: os.makedirs(dir_loc) sync(d, dir_loc, "sync")
def test_del_src_dir(self): self.rm('src/dir') sync('src', 'dst', 'update') self.assertNotExists('src/dir') self.assertExists('dst/dir')
def sync(sourcedir=None, exclude_gitignore_entries=None, exclude_regex_list=None): """ Create and populate workdir.options.path, memoized so that it only runs once """ _set_log_level() sourcedir = sourcedir or options.sync_sourcedir or os.getcwd() if exclude_gitignore_entries is None: exclude_gitignore_entries = options.sync_exclude_gitignore_entries exclude_regex_list = exclude_regex_list or copy.copy(options.sync_exclude_regex_list) gitignore_path = os.path.join(sourcedir, '.gitignore') if exclude_gitignore_entries and os.path.isfile(gitignore_path): gitignore_lines = [] with open(gitignore_path) as gitignore: for line in gitignore.readlines(): line = line.strip() if line and not line.startswith('#'): gitignore_lines.append(_gitignore_entry_to_regex(line)) exclude_regex_list += gitignore_lines dirsync_logger = logging.getLogger('dirsync') dirsync_logger.setLevel(logging.INFO if options.debug else logging.FATAL) logger.info('syncing {} to {}'.format(sourcedir, options.path)) logger.debug('excluding {} from sync'.format(exclude_regex_list)) dirsync.sync( sourcedir=sourcedir, targetdir=options.path, action='sync', create=True, exclude=exclude_regex_list, logger=dirsync_logger )
def put_theme(self, source, output): """Put builtin theme into the source. Must be called after the output tree has been made. Parameters ---------- source : str source of presentation output: str output path Returns ------- str source of presentation with theme included """ source_themed = source if self.theme: themes = os.path.join(os.path.dirname(__file__), 'utils/builtin_themes') for theme in os.listdir(themes): if theme == self.theme: theme_path = os.path.join(os.path.join(themes, theme), 'theme.yaml') if os.path.exists(theme_path): sync_logger = logging.getLogger('sync_logger') sync(os.path.join(themes, theme), 'theme-' + theme, 'sync', create=True, logger=sync_logger) source_themed = r'\$include(' + os.path.join('theme-' + theme, 'theme.yaml') + ')\n' + source_themed metadata_path = os.path.join(os.path.join(themes, theme), 'metadata.yaml') if os.path.exists(metadata_path): source_themed = r'\$include(' + os.path.join('theme-' + theme, 'metadata.yaml') + ')\n' + source_themed return source_themed
def run(cls): sourcedir = MagicPath.FilePath(os.path.abspath(source_dir)) targetdir = MagicPath.FilePath(os.path.abspath(dest_dir)) listdirs = sourcedir.ls_dir() print(listdirs.__len__()) sourcedirs = [] for dirr in listdirs: print(dirr) if not dirr.basename() == "_iso" and not dirr.basename( )[0] == "$" and not dirr.basename()[0] == ".": sourcedirs.append(dirr) print(sourcedirs.__len__()) for source in sourcedirs: if source.ls().__len__() < 1: print(source.basename()) print("Not file") continue if targetdir.addpath(source.basename()).is_dir(): print(source.basename()) print("Is already in dest") continue print(source.path()) sync(source.path(), targetdir.addpath(source.basename()).path(), 'sync', create=True)
def copy_static_files(self): for d in settings.STATICFILES_DIRS: dir_loc = os.path.join(settings.BAKE_LOCATION, "static") print "syncing {0}".format(d) if os.path.isdir(dir_loc) == False: os.makedirs(dir_loc) sync(d, dir_loc, "sync")
def file_sync(self): """ loop through key, values in self.conf_data dict """ for source, dest in self.conf_data[self.dir_key].items(): # start syncing sync(Path(source), Path(dest), "sync", verbose=False, exclude=(self.regex,))
def sync_dir(source_dir, target_dir): try: mkdir(target_dir) except: pass finally: sync(source_dir, target_dir, "sync")
def save(self, config, output): """Save the html form of presentation into external file. Parameters ---------- config : MatisseConfig MaTiSSe configuration output : str output path """ if not os.path.exists(output): os.makedirs(output) with open(os.path.join(output, 'index.html'), 'w') as html: html.write(self.to_html(config=config)) # copy user defined directories if set if len(self.metadata['dirs_to_copy'].value) > 0: for data in self.metadata['dirs_to_copy'].value: sync_logger = logging.getLogger('sync_logger') sync(data, os.path.join(output, data), 'sync', create=True, logger=sync_logger) # css files with open(os.path.join(output, 'css/theme.css'), 'w') as css_theme: css_theme.writelines(self.theme.css) for chapter in self.chapters: for section in chapter.sections: for subsection in section.subsections: for slide in subsection.slides: if slide.overtheme.custom: with open(os.path.join(output, 'css/slide-' + str(slide.number) + '-overtheme.css'), 'w') as css_theme: css_theme.writelines(slide.overtheme.css) return
def bake_static(): """ syncs the static file location to the bake directory """ for d in settings.STATICFILES_DIRS: print("syncing {0}".format(d)) sync(d, os.path.join(settings.BAKE_LOCATION, "static"), "sync")
def process(self, instance): directory = os.path.join( os.path.dirname(instance.context.data["currentFile"]), "workspace", "deadline", "PYTHONPATH") sync(os.path.join(os.path.dirname(__file__), "PYTHONPATH"), directory, "sync", create=True, purge=True, modtime=True) # Add event script to Deadline submission data = instance.data.get("deadlineData", {"job": {}, "plugin": {}}) # Add required environment. key_values = {"PYTHONPATH": directory} if "EnvironmentKeyValue" in data["job"]: data["job"]["EnvironmentKeyValue"].update(key_values) else: data["job"]["EnvironmentKeyValue"] = key_values # Setting data instance.data["deadlineData"] = data
def copy_static_files(self): for d in [settings.STATIC_ROOT]: dir_loc = self.get_static_destination() print "syncing {0}".format(d) if os.path.isdir(dir_loc) == False: os.makedirs(dir_loc) sync(d,dir_loc,"sync")
def bake_static(): """ syncs the static file location to the bake directory """ for d in settings.STATICFILES_DIRS: print "syncing {0}".format(d) sync(d, os.path.join(settings.BAKE_LOCATION, "static"), "sync")
def save_h5p(h5p,baseDir=None,force_fresh=False,fetch_media=True): if baseDir is None: baseDir = os.path.join(export_dir,h5p.title) contentDir = os.path.join(baseDir,"content") mkdir(contentDir) if os.path.exists(baseDir) and force_fresh: dbg("Removing existing dir: %s" % baseDir, 4) shutil.rmtree(baseDir) if not os.path.exists(baseDir): dbg("Creating H5P base dir %s" % baseDir, 4) mkdir(baseDir) dirsync.sync( h5p_libs_dir, baseDir, "sync", ignore=[ r".*/\.git/", r".*\.swp", r"\#.*", ]) if fetch_media: dbg("Downloading media...") h5p.fetch_media(baseDir,recursive=True) dbg("Populating content.json...") content_fh = open(os.path.join(contentDir,"content.json"),"w") content_fh.write(h5p.content_json) content_fh.close() dbg("Populating h5p.json...") # TODO: copy required libraries (`preloadedDependencies`) in # h5p_json from some known location into baseDir? h5p_fh = open(os.path.join(baseDir,"h5p.json"),"w") h5p_fh.write(h5p.h5p_json) h5p_fh.close() dbg("DONE.")
def put_theme(self, source, output): """Put builtin theme into the source. Must be called after the output tree has been made. Parameters ---------- source : str source of presentation output: str output path Returns ------- str source of presentation with theme included """ source_themed = source if self.theme: themes = os.path.join(os.path.dirname(__file__), 'utils/builtin_themes') for theme in os.listdir(themes): if theme == self.theme: theme_path = os.path.join(os.path.join(themes, theme), 'theme.yaml') if os.path.exists(theme_path): sync_logger = logging.getLogger('sync_logger') sync(os.path.join(themes, theme), 'theme-' + theme, 'sync', create=True, logger=sync_logger) source_themed = r'$include(' + os.path.join('theme-' + theme, 'theme.yaml') + ')\n' + source_themed metadata_path = os.path.join(os.path.join(themes, theme), 'metadata.yaml') if os.path.exists(metadata_path): source_themed = r'$include(' + os.path.join('theme-' + theme, 'metadata.yaml') + ')\n' + source_themed titlepage_path = os.path.join(os.path.join(themes, theme), 'titlepage.md') if os.path.exists(titlepage_path): source_themed = r'$include(' + os.path.join('theme-' + theme, 'titlepage.md') + ')\n' + source_themed return source_themed
def test_ignore_file_rm_dir(self): self.rm('src/file1.txt') sync('src', 'dst', 'sync', ignore=('file1.txt', )) self.assertNotExists('src/file1.txt') self.assertExists('dst/file1.txt')
def store_backup(folder_to_backup: list): ''' Stores every folder into the ourput folder, concatenating current date time to keep history ''' def get_duplicate_suffix(path: Path, name: str) -> str: ''' Returns a suffix (N) where N is the number of duplicate, or empty if there's none ''' suffix = '' if path.exists(): regex = compile(name + r'[\(\d+\)]*') folders_in_path = [item.stem for item in path.iterdir()] duplicates = list(filter(regex.match, folders_in_path)) if duplicates: suffix = '({})'.format(len(duplicates)) return suffix logging.info('Beginning backup') for folder in folder_to_backup: # If there's a duplicate, add (N) to the target name duplicate_sufix = get_duplicate_suffix(folder['target'], folder['source'].stem) sync(folder['source'], folder['target'] / (folder['source'].stem + duplicate_sufix), 'sync', purge=True, create=True, verbose=True)
def sync(sourcedir=None, exclude_gitignore_entries=None, exclude_regex_list=None): """ Create and populate workdir.options.path, memoized so that it only runs once """ _set_log_level() sourcedir = sourcedir or options.sync_sourcedir or os.getcwd() if exclude_gitignore_entries is None: exclude_gitignore_entries = options.sync_exclude_gitignore_entries exclude_regex_list = exclude_regex_list or copy.copy( options.sync_exclude_regex_list) gitignore_path = os.path.join(sourcedir, '.gitignore') if exclude_gitignore_entries and os.path.isfile(gitignore_path): gitignore_lines = [] with open(gitignore_path) as gitignore: for line in gitignore.readlines(): line = line.strip() if line and not line.startswith('#'): gitignore_lines.append(_gitignore_entry_to_regex(line)) exclude_regex_list += gitignore_lines dirsync_logger = logging.getLogger('dirsync') dirsync_logger.setLevel(logging.INFO if options.debug else logging.FATAL) logger.info('syncing {} to {}'.format(sourcedir, options.path)) logger.debug('excluding {} from sync'.format(exclude_regex_list)) dirsync.sync(sourcedir=sourcedir, targetdir=options.path, action='sync', create=True, exclude=exclude_regex_list, logger=dirsync_logger)
def test_del_src_dir_purge(self): self.rm('src/dir') sync('src', 'dst', 'sync', purge=True) self.assertNotExists('src/dir') self.assertNotExists('dst/dir')
def sync_BIDS(root_directory_server, root_directory_local, subdirectory, from_to="server_to_local"): checkPathError(root_directory_server) checkPathError(root_directory_local) directory_server = join(root_directory_server, subdirectory) directory_local = join(root_directory_local, subdirectory) if from_to == "server_to_local": dirsync.sync(directory_server, directory_local, 'sync', verbose=True, ctime=True, create=True) #sync SERVER to LOCAL. Source is the SERVER elif from_to == "local_to_server": dirsync.sync( directory_local, directory_server, 'sync', verbose=True, ctime=True, create=True) #sync LOCAL to SERVER. Source is the LOCAL directory
def pull(self): dirsync.sync(self.fs_url, self.project.local_fs_path, "sync", create=True, purge=True, logger=logging.getLogger(dirsync.__name__))
def sync_jar_files(options): app_lib_dir = os.path.join(constants.DIR_APP_ROOT, "libs") chrome_java_lib_dir = os.path.join(options.chromium_root, "out", options.buildtype, "lib.java") args = {'only':['\w+_java\\.jar$', 'cacheinvalidation_javalib\\.jar$', 'jsr_305_javalib\\.jar$', 'protobuf_nano_javalib\\.jar$', 'web_contents_delegate_android_java\\.jar$'], 'ignore': ['chrome_java\\.jar$']} sync(chrome_java_lib_dir, app_lib_dir, "sync", **args)
def pull(self): dirsync.sync( self.fs_url, self.project.local_fs_path, "sync", create=True, purge=True, logger=logging.getLogger(dirsync.__name__))
def test_sync_all(self): sync('src', 'dst', 'sync', create=True) self.assertIsFile('dst/file1.txt') self.assertIsDir('dst/dir') self.assertListDir('dst/dir', ['file4.txt']) self.assertIsDir('dst/empty_dir') self.assertListDir('dst/empty_dir', [])
def test_exclude_include(self): sync('src', 'dst', 'sync', create=True, exclude=('^.*\.py$',), include=('^file2\.py$',)) self.assertExists('dst/file2.py')
def test_ignore_dir(self): self.rm('src/dir') sync('src', 'dst', 'sync', ignore=('dir',)) self.assertNotExists('src/dir') self.assertExists('dst/dir')
def test_ignore_file_rm_dir(self): self.rm('src/file1.txt') sync('src', 'dst', 'sync', ignore=('file1.txt',)) self.assertNotExists('src/file1.txt') self.assertExists('dst/file1.txt')
def push(self, response): dirsync.sync( self.project.local_fs_path, self.fs_url, "sync", purge=True, logger=logging.getLogger(dirsync.__name__)) return response
def test_simple_exclude(self): sync('src', 'dst', 'sync', create=True, exclude=('^dir.*$', '^.*\.py$')) self.assertNotExists('dst/file2.py') self.assertNotExists('dst/dir')
def sync_folders_once(self): tday = str(date.today()) dest = self.destination_folder.get() sync(self.source_folder.get(), self.destination_folder.get() + "/" + tday, 'sync', verbose=True, create=True) time.sleep(5)
def sync_dirs(current_client, current_identifier, no_of_dirs_temp): no_of_dirs = no_of_dirs_temp for i in range(len(no_of_dirs)): #https://www.geeksforgeeks.org/python-os-path-join-method/ src = os.path.join(PATH, no_of_dirs[i]) dest = os.path.join(os.path.join(client_path, current_identifier), no_of_dirs[i]) sync(src, dest, 'sync')
def overwrite_with_export(repo, output): repo = path(repo) output = path(output) if not os.path.exists(output): log.critical("Output dir does not exist: {}".format(output)) create = raw_input("Shall I create the output directory? [N/y] ") if create is 'y' or create is 'Y': os.mkdir(output) sync(repo, output, 'sync')
def run_task(self, from_location, to_location): # copy files from source to target and update the 'last update' field self.connect_to_db() timestamp = dt.datetime.now() self.db.execute('UPDATE jobs SET last_backup = "{}" ' 'WHERE from_location == "{}" AND to_location == "{}"'.format(timestamp, from_location, to_location)) self.dbconn.commit() dirsync.sync(from_location, to_location, action='sync', verbose=True)
def sync_data_files(options): # TODO(alex) # locales_dir = os.path.join(constants.DIR_LIBRARIES_ROOT, "chrome_res", "src", "main", "res", "raw") # pak_gen_dir = os.path.join(options.chromium_root, "out", options.buildtype, "locales") # args = {'only': ['en-US.pak', 'zh-CN.pak']} # sync(pak_gen_dir, locales_dir, "sync", **args) assets_dir = os.path.join(constants.DIR_APP_ROOT, "src", "main", "assets") chrome_public_assets_dir = os.path.join(options.chromium_root, "out", options.buildtype, "assets", "chrome_public_apk") sync(chrome_public_assets_dir, assets_dir, "sync")
def sync_content_res_files(options): library_res_dir = os.path.join(constants.DIR_LIBRARIES_ROOT, "content_res", "src", "main", "res") content_res_dir = os.path.join(options.chromium_root, "content", "public", "android", "java", "res") sync(content_res_dir, library_res_dir, "sync") # sync grd generated string resources content_grd_res_dir = os.path.join(options.chromium_root, "out", options.buildtype, "obj", "content", "content_strings_grd.gen", "content_strings_grd", "res_grit") args = {'exclude': ['values-\S+'], 'include': ['values-zh-rCN']} sync(content_grd_res_dir, library_res_dir, "sync", **args)
def test_only(self): sync('src', 'dst', 'sync', create=True, only=('^.*\.py$',)) self.assertNotExists('dst/file1.txt') self.assertExists('dst/file2.py') self.assertNotExists('dst/dir/file4.txt') self.assertNotExists('dst/dir')
def test_exclude_include_ignore(self): sync('src', 'dst', 'sync', create=True, exclude=('^.*\.py$',), ignore=('^.*\.txt$',), include=('^file2\.py$',)) self.assertNotExists('dst/file1.txt') self.assertNotExists('dst/dir/file4.txt')
def sync_manifest_files(options): main_dir = os.path.join(constants.DIR_APP_ROOT, "src", "main") public_apk_gen_dir = os.path.join(options.chromium_root, "out", options.buildtype, "gen", "chrome_public_apk_manifest") sync(public_apk_gen_dir, main_dir, "sync") # sync meta xml files xml_dir = os.path.join(constants.DIR_APP_ROOT, "src", "main", "res", "xml") policy_gen_dir = os.path.join(options.chromium_root, "out", options.buildtype, "gen", "policy") args = {'only': ['\S+\\.xml']}
def test_sync_modif(self): sync('src', 'dst', 'sync', create=True) file1 = open('src/file1.txt', 'r+') file1.write('modifying file') file1.close() result = sync('src', 'dst', 'sync', create=True) self.assertSetEqual(result, set([os.path.join('dst', 'file1.txt')])) file1 = open('dst/file1.txt', 'r') self.assertEqual(file1.read(), 'modifying file') file1.close()
def do_sync(self, *args, **options): debug('do sync', args, options) if not self.dirs_okay(): if self.activate_id is None: self.activate_id = self.after(400, self.reactivate) return if 'exclude' not in options.keys(): # exclude hidden files, lock files, backup files options['exclude'] = [ ".*\.DS_Store", ".+/\.[\w \.]+$", ".+/~.+\.idlk$", ".+/#.+$", ".+/.+~$", ] self.ticking() self.action.set(self.pad('Syncing')) self.update_idletasks() logfile = os.path.join( self.dirs.user_log_dir, '{}.log'.format(self.__class__.__name__) ) logging.basicConfig(filename=logfile, level=logging.DEBUG) files = sync( self.wfs_dir.get(), self.sfs_dir.get(), 'sync', logger=logging, **options ) self.stop_ticking() self.queue_status('Copied {} files'.format(len(files)))
def syncFolder(self, source, target): print("Syncing folders, source " + source + " target" + target) logger = FileLogger() result = dirsync.sync(source, target, 'sync', verbose=True, logger=logger) return result
def test_del_src_dir(self): self.rm('src/dir') sync('src', 'dst', 'diff', logger=self.logger) self.assertListEqual(self.output.splitlines()[:11], ['Difference of directory dst from src', '', 'Only in dst', '<< dir', '<< dir%sfile4.txt' % os.sep, '', 'Common to src and dst', '-- empty_dir', '-- file1.txt', '-- file2.py', '-- file3.js'])
def fetch(self): synced = dirsync.sync( self.fs_url, self.project.local_fs_path, "sync", create=True, purge=True, logger=logging.getLogger(dirsync.__name__)) if synced: revision.get(Project)(self.project).set( keys=["pootle.fs.fs_hash"], value=uuid.uuid4().hex)
def sync_local_files(self): """ Sync non-binary files in source to dest_path """ # Define source and destination paths src_path = self.paths['shelves'] dest_path = self.options['dest_sync'] if 'shelf' in self.options: src_path = os.path.join(src_path, self.options['shelf']) dest_path = os.path.join(dest_path, self.options['shelf']) # Sync if paths dont match. No need to sync on top of the same path. if not src_path == dest_path: dirsync.sync( src_path, dest_path, 'sync', logger=self.logger, create=True, verbose=True, exclude=['^.*\.pitem$'] )