def __enter__(self): self._load_db() if self._safe_dump and os.path.isfile(self._db_file): # Backup database self._logger.info('Safe mode, backup db %s' % '%s.orig' % self._db_file) copy_file(source=self._db_file, dest='%s.orig' % self._db_file, dry_run=self._dry_run) if not check_file_consistency(source=self._db_file, dest='%s.orig' % self._db_file, dry_run=self._dry_run): raise Exception('Unable to do safe load. Copy %s' % self._db_file) return self
def sync_dir(): "Sync source dir with dest dir" LOG.warning('Start sync dir ...') # Launch database connection with Db(db_name='sync', db_file='%s/db.json' % ARGS.target, dry_run=DRY_RUN, safe_dump=True) as db: # For each files for dir_path, dirs, files in os.walk(ARGS.source): if not files: continue # For each files in dir for file_name in files: # Filter db file if (file_name == 'db.json' or file_name == 'db.json.orig'): continue # Exemple : -s Video/foo -t dest # Give : src = Video/foo : relative = foo : dst = dest/foo dir_source = dir_path dir_relative = re.sub(r'%s/?' % ARGS.source,'', dir_source) dir_dest = join(ARGS.target, dir_relative) file_source = join(dir_source, file_name) file_relative = join(dir_relative, file_name) file_dest = join(dir_dest, file_name) if db.get(file_relative) is None: if is_include(file_relative) \ or not is_exclude(file_relative): create_dir(dir_dest, dry_run=DRY_RUN) try: copy_file(file_source, file_dest, dry_run=DRY_RUN) except IOError as error: LOG.critical("Error can't copy file %s : %s" % (file_dest, error)) if check_file_consistency(file_source, file_dest, dry_run=DRY_RUN): db.save(file_relative, 'unused') else: LOG.critical("Error file is not consistent " "the sum don't match") # Clean empty dir after sync remove_empty_dir(ARGS.target, dry_run=DRY_RUN)