def __init__(self, subject): self.session = subject.copy() self.session['date'] = data.getDateStr() self.session['computer'] = socket.gethostname() self.trials = Trials(**subject) self.load_sounds('stimuli/sounds') self.feedback = {} self.feedback['audio'] = { 0: sound.Sound('stimuli/feedback/incorrect.wav'), 1: sound.Sound('stimuli/feedback/correct.wav'), } self.texts = yaml.load(open('texts.yaml')) self.device = ResponseDevice(gamepad={ 0: 1, 3: 0 }, keyboard={ 'y': 1, 'n': 0 }) data_dir = Path(DATA_FILE.format(**subject)).parent if not data_dir.isdir(): data_dir.mkdir() self.data_file = open(DATA_FILE.format(**subject), 'w', 0) self.write_trial() # write header
def content(request=None): base = Path(current_app.config.get('INUPYPI_REPO', Path('.', 'packages'))) if request: repo = Path(base, request) else: repo = base try: repo = repo.absolute() base = base.absolute() if not repo.exists(): if base == repo: raise InuPyPIMissingRepoPath # sets the request to lowercase and compares it with # the existing items in the repository in lowercase repo = search_path(repo, base) if not repo: raise InuPyPI404Exception if repo.isdir(): return Dirs(repo) if repo.isfile(): return repo except InuPyPIMissingRepoPath: abort(500, 'Missing repository or package path!') except InuPyPI404Exception: abort(404, 'Path or File could not be found!') except: abort(500, 'Internal Server Error!') return repo
def go_home(domain): path_domain = Path(AQUATONE_ROOT, domain) if path_domain.isdir(): path_domain.chdir() else: print("error %s is not a dir", domain) sys.exit(1)
def get_closest_uid(path): path = Path(path) while not path.isdir(): path = path.ancestor(1) if path == '/': return False return path.stat().st_uid
def main(): """ Controller. """ args = sys.argv[1:] if len(args) == 0: print_usage() sys.exit(0) args = check_args(args) prg = args[0] # script in venv args = args[1:] # arguments of the script in venv p = Path(prg).absolute() venv_file = find_venv_file(p.parent) venv_dir = Path(venv_file.read_file().strip()) # .venv can also contain a relative path if not venv_dir.isabsolute(): venv_dir = Path(venv_file.parent, venv_dir).norm() if not venv_dir.isdir(): print("Error: {vd} is not a directory.".format(vd=venv_dir), file=sys.stderr) sys.exit(1) # python_path = Path(venv_dir, "bin/python") if not python_path.isfile(): print("Error: {pp} is missing.".format(pp=python_path), file=sys.stderr) sys.exit(1) if DEBUG: print("# venv dir: {d}".format(d=venv_dir), file=sys.stderr) my_call(python_path, prg, args)
def copy_sounds(ctx, force=False): """Copy sounds from acoustic-similarity to use in this experiment.""" src_dir = Path('../acoustic-similarity/data/sounds') assert src_dir.isdir(), 'expecting sounds to be in {}'.format(src_dir) dst_dir = Path('stimuli/sounds') if not dst_dir.isdir(): dst_dir.mkdir() trials = pandas.read_csv('stimuli/messages.csv') for seed_id in trials.seed_id.unique(): seed_name = '{}.wav'.format(seed_id) dst = Path(dst_dir, seed_name) if force or not dst.exists(): Path(src_dir, seed_name).copy(dst)
class Manager(object): __metaclass__ = ABCMeta def __init__(self, admin_repository): self.path = Path(admin_repository) self.git = Git(admin_repository) if not self.path.isdir(): raise ValueError('Admin repository path should point to directory') def get_or_create(self, lookup_entity, *args, **kwargs): return self.get(lookup_entity) or self.create(lookup_entity, *args, **kwargs) @abstractmethod def get(self, entity): raise NotImplementedError("Each manager needs a get method") @abstractmethod def create(self, entity): raise NotImplementedError("Each manager needs a create method") @abstractmethod def delete(self, entity_name): raise NotImplementedError("Each manager needs a delete method")
def get(project=None): """Get the data from different experiments. Warning! Experiment directories are expected in a particular location outside of this (words-in-transition) directory. Options are: telephone-app acoustic-similarity learning-sound-names """ if project is None or project == 'telephone-app': app_dir = Path('../telephone-app') snapshot_dir = Path(app_dir, 'words-in-transition') if src_dir.exists(): src_dir.rmtree() copytree(snapshot_dir, src_dir) if project is None or project == 'acoustic-similarity': # src proj_dir = Path('../acoustic-similarity/data') judgments = Path(proj_dir, 'judgments') # dst acoustic_similarity_dir = Path(data_raw, 'acoustic-similarity') if not acoustic_similarity_dir.isdir(): acoustic_similarity_dir.mkdir() # copy the csvs in the root proj data dir for csv in proj_dir.listdir('*.csv'): csv.copy(Path(acoustic_similarity_dir, csv.name)) # concat and save judgments files judgments_csv = Path(acoustic_similarity_dir, 'judgments.csv') judgments = [pd.read_csv(x) for x in judgments.listdir('*.csv')] if judgments: (pd.concat(judgments, ignore_index=True).to_csv(judgments_csv, index=False)) if project is None or project == 'learning-sound-names': src = Path('../learning-sound-names/data') dst = Path(data_raw, 'learning_sound_names.csv') data = pd.concat([pd.read_csv(x) for x in src.listdir('LSN*.csv')]) data['is_correct'] = data.is_correct.astype(int) data.to_csv(dst, index=False) # also get subject info and questionnaire data to_get = ['questionnaire_v1', 'subject_info'] for x in to_get: src_file = Path(src, '{}.csv'.format(x)) dst_file = Path(data_raw, 'learning_sound_names_{}.csv'.format(x)) run('cp {} {}'.format(src_file, dst_file))
def __check_directory(self): """ Check if the entered directory exists :return: (unipath.Path or False) the path to the existing directory """ directory = Path(self.arguments['<directory>']) if not directory.exists() or not directory.isdir(): msg = '{} is not a valid directory'.format(directory.absolute()) self.__output(msg, error=True) return False return directory
def get(project=None): """Get the data from different experiments. Warning! Experiment directories are expected in a particular location outside of this (words-in-transition) directory. Options are: telephone-app acoustic-similarity learning-sound-names """ if project is None or project == 'telephone-app': app_dir = Path('../telephone-app') snapshot_dir = Path(app_dir, 'words-in-transition') if src_dir.exists(): src_dir.rmtree() copytree(snapshot_dir, src_dir) if project is None or project == 'acoustic-similarity': # src proj_dir = Path('../acoustic-similarity/data') judgments = Path(proj_dir, 'judgments') # dst acoustic_similarity_dir = Path(data_raw, 'acoustic-similarity') if not acoustic_similarity_dir.isdir(): acoustic_similarity_dir.mkdir() # copy the csvs in the root proj data dir for csv in proj_dir.listdir('*.csv'): csv.copy(Path(acoustic_similarity_dir, csv.name)) # concat and save judgments files judgments_csv = Path(acoustic_similarity_dir, 'judgments.csv') judgments = [pd.read_csv(x) for x in judgments.listdir('*.csv')] if judgments: (pd.concat(judgments, ignore_index=True) .to_csv(judgments_csv, index=False)) if project is None or project == 'learning-sound-names': src = Path('../learning-sound-names/data') dst = Path(data_raw, 'learning_sound_names.csv') data = pd.concat([pd.read_csv(x) for x in src.listdir('LSN*.csv')]) data['is_correct'] = data.is_correct.astype(int) data.to_csv(dst, index=False) # also get subject info and questionnaire data to_get = ['questionnaire_v1', 'subject_info'] for x in to_get: src_file = Path(src, '{}.csv'.format(x)) dst_file = Path(data_raw, 'learning_sound_names_{}.csv'.format(x)) run('cp {} {}'.format(src_file, dst_file))
def dump_path(path, prefix="", tab=" ", file=None): if file is None: file = sys.stdout p = Path(path) if p.islink(): print >>file, "%s%s -> %s" % (prefix, p.name, p.read_link()) elif p.isdir(): print >>file, "%s%s:" % (prefix, p.name) for p2 in p.listdir(): dump_path(p2, prefix+tab, tab, file) else: print >>file, "%s%s (%d)" % (prefix, p.name, p.size())
def process(appname): appdir = Path(appname) if not appdir.isdir(): print("Error: there is no app called {0}.".format(appdir)) sys.exit(1) # else static = Path(appname, 'static', appname) static.mkdir(True) templates = Path(appname, 'templates', appname) templates.mkdir(True) urls = Path(appname, 'urls.py') if not urls.isfile(): urls.write_file(urls_py)
def clean(options): ''' Clean the last build ''' cfg = options.cfg.default clean_dirs = [_dir % cfg for _dir in cfg.clean_dirs] clean_dirs = [Path(_dir).absolute() for _dir in clean_dirs if _dir and Path.isdir(_dir)] for _dir in clean_dirs: if _dir.components() < 4: print 'Directory %s is too close to the root. Skipping.' % _dir continue cmd = 'rd /s /q %(_dir)s' % locals() sub.check_call(cmd, shell=True)
def clean(options): ''' Clean the last build ''' cfg = options.cfg.default clean_dirs = [_dir % cfg for _dir in cfg.clean_dirs] clean_dirs = [ Path(_dir).absolute() for _dir in clean_dirs if _dir and Path.isdir(_dir) ] for _dir in clean_dirs: if _dir.components() < 4: print 'Directory %s is too close to the root. Skipping.' % _dir continue cmd = 'rd /s /q %(_dir)s' % locals() sub.check_call(cmd, shell=True)
def __init__(self, input_file, output_file=None, binary=None): self.width = None self.height = None self.quality = None self.group = None if binary: self.binary = binary self.input_file = Path(input_file) input_name = self.input_file.name.rsplit(".", 1)[0] if output_file: output_file = Path(output_file) if output_file.isdir(): self.output_file = output_file.child(input_name) else: self.output_file = output_file else: self.output_file = self.input_file.parent.child(self.input_file.stem)
class ReleaseUnpacker(object): """ReleaseUnpacker.""" def __init__(self, release_search_dir, tmp_dir, unpack_dir, no_remove=False): """Initialize and validate ReleaseUnpacker.""" self.release_search_dir = Path(release_search_dir) self.release_search_dir_abs = self.release_search_dir.absolute() self.tmp_dir = Path(tmp_dir) self.unpack_dir = Path(unpack_dir) self.no_remove = no_remove if not self.release_search_dir_abs.exists(): raise ReleaseUnpackerError( "Release search dir {} doesn't exist".format( self.release_search_dir)) elif not self.release_search_dir_abs.isdir(): raise ReleaseUnpackerError( "Release search dir {} is not a dir".format( self.release_search_dir)) elif not self.tmp_dir.exists(): raise ReleaseUnpackerError("Tmp dir {} doesn't exist".format( self.tmp_dir)) elif not self.tmp_dir.isdir(): raise ReleaseUnpackerError("Tmp dir {} is not a dir".format( self.tmp_dir)) elif not self.unpack_dir.exists(): raise ReleaseUnpackerError("Unpack dir {} doesn't exist".format( self.unpack_dir)) elif not self.unpack_dir.isdir(): raise ReleaseUnpackerError("Unpack dir {} is not a dir".format( self.unpack_dir)) def __repr__(self): """Return object string representation.""" return "<ReleaseUnpacker: {} ({}) ({})>".format( self.release_search_dir_abs, self.tmp_dir, self.unpack_dir) def file_exists_size_match(self, unpack_file_path, size_in_rar): """Return True if unpack_file_path exists already and size matches.""" if (unpack_file_path.exists() and unpack_file_path.size() == size_in_rar): log.info("%s already exists and size match", unpack_file_path) return True else: return False def unpack_release_dir_rars(self): """Run unpacker. Unpack all whitelisted file extensions found in RAR files. """ # Scan for RAR files self.rar_files = self.scan_rars() if not self.rar_files: log.debug("No RARs found in %s", self.release_search_dir_abs) return False # Process the RAR files in any were found for rar_file_path in self.rar_files: log.debug("Found RAR file %s", rar_file_path) release_unpacker_rar_file = ReleaseUnpackerRarFile(rar_file_path) if release_unpacker_rar_file.subs_dir: self.unpack_subs_rar(release_unpacker_rar_file) else: self.unpack_rar(release_unpacker_rar_file) # Remove release dirs when unpack is done self.remove_release_dirs() return self def scan_rars(self): """Scan release_search_dir for .rar files. Find all sub folders and return a list of the first .rar file in each folder if any is found. """ scan_dirs = [ dir for dir in self.release_search_dir_abs.walk(filter=DIRS) ] scan_dirs.append(self.release_search_dir_abs) rar_files = [] for dir in scan_dirs: rar_files_found = dir.listdir(pattern="*.rar", filter=FILES) if rar_files_found: rar_files.append(rar_files_found[0]) return rar_files def remove_release_dirs(self): """Remove all release dirs from rar_files list.""" for rar_file_path in self.rar_files: release_dir = rar_file_path.parent if release_dir.exists(): if self.no_remove: log.info("No remove active, not removing %s", release_dir) else: log.info("Unpack complete, removing %s", release_dir) release_dir.rmtree() def unpack_subs_rar(self, release_unpacker_rar_file): """Unpack a RAR in a Subs folder.""" log.debug( "Processing subs RAR file %s", release_unpacker_rar_file.rar_file_path_abs, ) for rarfile_file in release_unpacker_rar_file.file_list: # File in RAR is not a RAR file, extract if rarfile_file["name"].ext != ".rar": unpack_filename = "{}{}".format(release_unpacker_rar_file.name, rarfile_file["name"].ext) unpack_file_path_abs = Path(self.unpack_dir, unpack_filename) # File exists and size match if self.file_exists_size_match(unpack_file_path_abs, rarfile_file["size"]): continue self.unpack_move_rar_file( release_unpacker_rar_file, rarfile_file["name"], unpack_file_path_abs, ) # RAR file in RAR, extract to Subs folder and extract RAR else: log.debug( "RAR file %s in %s", rarfile_file["name"], release_unpacker_rar_file.rar_file_path_abs, ) # Extract the RAR to Subs folder subs_dir = release_unpacker_rar_file.rar_file_path_abs.parent log.debug("Extracting %s to %s", rarfile_file["name"], subs_dir) extracted_file_path = release_unpacker_rar_file.extract_file( rarfile_file["name"], subs_dir) # Extract the extracted Subs RAR file self.unpack_subs_rar( ReleaseUnpackerRarFile(extracted_file_path)) # Remove RAR file in Subs folder log.debug("Removing extracted Subs RAR %s", extracted_file_path) extracted_file_path.remove() def unpack_rar(self, release_unpacker_rar_file): """Unpack RAR files. Only process whitelisted file extensions.""" for rarfile_file in release_unpacker_rar_file.file_list: # Check file extension if rarfile_file["name"].ext not in ( ".avi", ".mkv", ".img", ".iso", ".mp4", ): log.info("Skipping %s, unwanted ext", rarfile_file["name"]) continue unpack_filename = "{}{}".format(release_unpacker_rar_file.name, rarfile_file["name"].ext) unpack_file_path_abs = Path(self.unpack_dir, unpack_filename) # File exists and size match if self.file_exists_size_match(unpack_file_path_abs, rarfile_file["size"]): continue # Unpack file in RAR self.unpack_move_rar_file( release_unpacker_rar_file, rarfile_file["name"], unpack_file_path_abs, ) return True def unpack_move_rar_file(self, release_unpacker_rar_file, rarfile_file_name, unpack_file_path): """Unpack and move RAR file. Extract an individual file from release_unpacker_rar_file to unpack_file_path. """ # Extract file to tmp_dir log.debug("Extracting %s to %s", rarfile_file_name, self.tmp_dir) log.info("%s unpack started", unpack_file_path.name) unpack_start = datetime.now().replace(microsecond=0) extracted_file_path = release_unpacker_rar_file.extract_file( rarfile_file_name, self.tmp_dir) unpack_end = datetime.now().replace(microsecond=0) unpack_time = human(unpack_end - unpack_start, past_tense="{}") if not unpack_time: log.info("%s unpack done", unpack_file_path.name) else: log.info("%s unpack done, %s", unpack_file_path.name, unpack_time) # Move file and rename to unpack_dir log.debug("Moving %s to %s", extracted_file_path, unpack_file_path) extracted_file_path.move(unpack_file_path)
def get_eggbaskets(): path = Path(app.config.get('INUPYPI_REPO', '')) if not path.exists() and not path.isdir(): abort(500, "%s doesn't exist." % path) return os.listdir(path)
class Project(TimeStampedModel): domain = models.CharField(max_length=100, blank=False, unique=True, validators=[DomainValidator]) path = models.CharField(max_length=255, blank=True) status = models.BooleanField(default=True) _unipath = None def __unicode__(self): return self.domain """ --------------------------------------------------------- """ def save(self, *args, **kwargs): self.path = Path('/data/www', self.safe_domain_name()) # create directory before create object if self.pk == None: # @todo do not allowed create on root permission if not self.path.isdir(): uid = 0 gid = 0 # find uid and gid of closest parent directory iterator = self.path.ancestor(1) component = len(iterator.components()) for i in xrange(component): if iterator.isdir(): stat = iterator.stat() uid = stat.st_uid gid = stat.st_gid break iterator = iterator.ancestor(1) # create all neccesary files and directories self.path.child('public').mkdir(True) self.path.child('logs').mkdir(True) self.apache_vhost_file().write_file('') self.apache_access_log().write_file('') self.apache_error_log().write_file('') # need 777 or apache won't sent errors here self.php_error_log().write_file('') self.php_error_log().chmod(0777) # make files available to user shell_exec(["chown", "-R", "%d:%d" % (uid, gid), iterator]) else: # get its previous domain value, if it changes, rename virtualhost file old = Project.objects.get(pk=self.pk) if self.domain != old.domain: old.apache_vhost_file().rename(self.apache_vhost_file()) # update database, /etc/hosts and apache virtualhost then reload apache super(Project, self).save(*args, **kwargs) #update_hostfile() self.apache_vhost_file().write_file( render_to_string("vhost.html", {'project': self})) Apache().reload() def clean_all(self, *args, **kwargs): self.apache_vhost_file().remove() self.get_path().rmtree() update_hostfile() Apache().reload() def safe_domain_name(self): return re.sub(r"[^a-zA-Z0-9\.\-]+", "-", self.domain) def get_path(self): if self._unipath == None: self._unipath = Path(self.path) return self._unipath def document_root(self): return Path(self.path).child('public') def apache_vhost_file(self): return Path('/data/vhosts/%s.conf' % self.safe_domain_name()) def apache_access_log(self): return self.get_path().child('logs', 'access.log') def apache_error_log(self): return self.get_path().child('logs', 'error.log') def php_error_log(self): return self.get_path().child('logs', 'error-php.log')
print(Path("/home/luke/..").norm()) # Expands .. and . notation print(Path("$HOME/..").expand()) # Expands system variables, ~ and also .. # Expands system variable and ~. Will also normalise the path ( remove redundant # .. . incorrect slashes and correct capitalisation on case sensitive file systems. Calls os.path.normpath # File Attributes and permissions print("\n*** File Attributes and permissions") # noinspection PyArgumentList print(here.atime()) # Last access time; seconds past epcoh # noinspection PyArgumentList print(here.ctime() ) # Last permission or ownership modification; windows is creation time; # noinspection PyArgumentList print(here.isfile()) # Is a file; symbolic links are followed. print(here.isdir()) # Is a directory; symbolic links are followed. # noinspection PyArgumentList print(here.islink()) # Is a symbolic link # noinspection PyArgumentList print(here.ismount() ) # Is a mount point; ie the parent is on a different device. # noinspection PyArgumentList print(here.exists()) # File exists; symbolic links are followed. # noinspection PyArgumentList print(here.lexists()) # Same as exists but symbolic links are not followed. # noinspection PyArgumentList print(here.size()) # File size in bytes. print(Path("/foo").isabsolute()) # Is absolute and not relative path # Epoch? print("\n*** gmtime")
class SubtitleDownloader(object): def __init__(self, search_dir, search_all=False): self.search_dir = Path(search_dir) self.search_all = search_all # Make sure the sort dir is a dir and cd into it if not self.search_dir.isdir(): raise SubtitleDownloaderError('Invalid search-dir {}'.format( search_dir)) @staticmethod def relative_path(path, root_path): """Return the relative path of path in root_path""" relative_path = path.replace(root_path, '') if relative_path[0:1] == '/': return relative_path[1:] else: return relative_path def scan_for_search_files(self): """Scan search dir and return all files to search subtiles for""" log.debug('Searching for files in dir {}'.format(self.search_dir)) search_files = [] for file_path in self.search_dir.walk(filter=FILES, top_down=False): if not file_path.ext in ('.mkv', '.avi'): continue subtitle_download = SubtitleDownload(file_path) # Search for subtitle if self.search_all is True or if the file # modified time is in the last week search_subtitle = self.search_all or \ subtitle_download.time_since_modified < timedelta(weeks=1) # Don't search subtitle for this file if not search_subtitle: continue # Check if subtitle already exists if subtitle_download.subtitle_exist(): log.debug('Subtitle for {} already exists'.format( self.relative_path(file_path, self.search_dir))) continue search_files.append(subtitle_download) return search_files def scan_search(self): """Scan for files to download subtitles for and try to download subtitle. """ search_files = self.scan_for_search_files() num_searches = len(search_files) for i, subtitle_download in enumerate(search_files): log.info('Subtitle search for {}'.format(subtitle_download.name)) subtitle_download.search_download_subtitle() # Sleep between searches if it's not the last search file if i + 1 != num_searches: log.info('Sleeping for {} seconds'.format(SLEEP_TIME)) sleep(SLEEP_TIME) def cleanup(self): """Remove subtitle files left over where the media file is removed""" log.debug('Running subtitle cleanup on dir {}'.format(self.search_dir)) subtitle_extensions = ('.srt', '.sub', '.idx') for file_path in self.search_dir.walk(filter=FILES, top_down=False): if not file_path.ext in subtitle_extensions: continue # Remove the subtitle file if no media file exists in the same dir media_file_path_mkv = Path(file_path.parent, '{}.mkv'.format( file_path.stem)) media_file_path_avi = Path(file_path.parent, '{}.avi'.format( file_path.stem)) if (not media_file_path_mkv.exists() and not media_file_path_avi.exists()): log.info('Removing leftover subtitle file {}'.format( self.relative_path(file_path, self.search_dir))) file_path.remove()
class Base(object): """ set and make directory Parameters ---------- home : str set a directory as home """ def __init__(self, home='.'): """ set home directory Parameters ---------- home : str set a directory as home Returns ------- """ self._home = Path(home).absolute() def __str__(self): return self.home def __repr__(self): return '%s(%r)' % (self.__class__.__name__, self.home) # def __abs(self, string): # return os.path.abspath(string) @property def home(self): return self._home.__str__() @home.setter def home(self, path): self._home = Path(path).absolute() def make_home(self, force=False): """ make home directory Parameters ---------- force : bool if True, if home exists and is a dir that containing contents, then delete contents in it, if exists and not a dir, remove it and make dir Returns ------- """ self.__mkdir(force) def __mkdir(self, force=False): if self._home.exists(): if not self._home.isdir(): if not force: raise Exception('%s exists but is not a dir' % self.home) self._home.remove() self._home.mkdir() if force: self._home.rmtree() self._home.mkdir() else: self._home.mkdir(parents=True) def __rmdir(self, force=False): if self._home.exists(): if not self._home.isdir(): if not force: raise Exception('%s exists but is not a dir' % self.home) self._home.remove() if force: self._home.rmtree() else: self._home.rmdir() def rm_home(self, force=False): """ remove home directory Parameters ---------- force : bool if True, if home exists and is a dir that containing contents, then delete it and it's contents, if exists and not a dir, remove then Returns ------- """ self.__rmdir(force)
def _clear_figs_for_report(report): report_dir = Path(report).parent figs_dir = Path(report_dir, 'figs') if figs_dir.isdir(): print 'removing figs dir:\n\t{}'.format(figs_dir) figs_dir.rmtree()
def get_package_path(eggbasket): path = Path(app.config.get('INUPYPI_REPO', ''), eggbasket) if not path.exists() and not path.isdir(): abort(500) return path
from unipath import Path # proj_root/py_pkg/settings.py PROJ_ROOT = Path(__file__).ancestor(2).absolute() DATA_DIR = Path(PROJ_ROOT, 'data') if not DATA_DIR.isdir(): DATA_DIR.mkdir() SQLITE_PATH = Path(DATA_DIR, 'article_qualities.sqlite')
class ReleaseUnpacker(object): def __init__(self, release_search_dir, tmp_dir, unpack_dir, no_remove=False): self.release_search_dir = Path(release_search_dir) self.release_search_dir_abs = self.release_search_dir.absolute() self.tmp_dir = Path(tmp_dir) self.unpack_dir = Path(unpack_dir) self.no_remove = no_remove if not self.release_search_dir_abs.exists(): raise ReleaseUnpackerError( 'Release search dir {} doesn\'t exist'.format( self.release_search_dir)) elif not self.release_search_dir_abs.isdir(): raise ReleaseUnpackerError( 'Release search dir {} is not a dir'.format( self.release_search_dir)) elif not self.tmp_dir.exists(): raise ReleaseUnpackerError( 'Tmp dir {} doesn\'t exist'.format(self.tmp_dir)) elif not self.tmp_dir.isdir(): raise ReleaseUnpackerError( 'Tmp dir {} is not a dir'.format( self.tmp_dir)) elif not self.unpack_dir.exists(): raise ReleaseUnpackerError( 'Unpack dir {} doesn\'t exist'.format(self.unpack_dir)) elif not self.unpack_dir.isdir(): raise ReleaseUnpackerError( 'Unpack dir {} is not a dir'.format( self.unpack_dir)) def __repr__(self): return '<ReleaseUnpacker: {} ({}) ({})>'.format( self.release_search_dir_abs, self.tmp_dir, self.unpack_dir) def file_exists_size_match(self, unpack_file_path, size_in_rar): """Returns True if unpack_file_path exists and size is a match""" if (unpack_file_path.exists() and unpack_file_path.size() == size_in_rar): log.info('{} already exists and size match'.format( unpack_file_path)) return True else: return False def scan_rars(self): """Find all folders in release_search_dir and return the first RAR file if one is found in a dir. """ rar_files = [] scan_dirs = [dir for dir in self.release_search_dir_abs.walk(filter=DIRS)] scan_dirs.append(self.release_search_dir_abs) for dir in scan_dirs: rar_files_found = dir.listdir(pattern='*.rar', filter=FILES) if rar_files_found: rar_files.append(rar_files_found[0]) return rar_files def unpack_release_dir_rars(self): """Run the unpacker. Find the first RAR file in dirs found in release_search_dir. """ # Scan for RAR files self.rar_files = self.scan_rars() if not self.rar_files: log.debug('No RARs found in {}'.format( self.release_search_dir_abs)) return False # Process the RAR files in any were found for rar_file_path in self.rar_files: log.debug('Found RAR file {}'.format(rar_file_path)) release_unpacker_rar_file = ReleaseUnpackerRarFile(rar_file_path) if release_unpacker_rar_file.subs_dir: self.unpack_subs_rar(release_unpacker_rar_file) else: self.unpack_rar(release_unpacker_rar_file) # Remove release dirs when unpack is done self.remove_release_dirs() return self def remove_release_dirs(self): """Remove all release dirs from rar_files list""" for rar_file_path in self.rar_files: release_dir = rar_file_path.parent if release_dir.exists(): if self.no_remove: log.info('No remove active, not removing {}'.format( release_dir)) else: log.info( 'Unpack complete, removing {}'.format(release_dir)) release_dir.rmtree() def unpack_subs_rar(self, release_unpacker_rar_file): """Unpack a RAR in a Subs folder""" log.debug('Processing subs RAR file {}'.format( release_unpacker_rar_file.rar_file_path_abs)) for rarfile_file in release_unpacker_rar_file.file_list: # File in RAR is not a RAR file, extract if rarfile_file['name'].ext != '.rar': unpack_filename = '{}{}'.format(release_unpacker_rar_file.name, rarfile_file['name'].ext) unpack_file_path_abs = Path(self.unpack_dir, unpack_filename) # File exists and size match if self.file_exists_size_match(unpack_file_path_abs, rarfile_file['size']): continue self.unpack_move_rar_file(release_unpacker_rar_file, rarfile_file['name'], unpack_file_path_abs) # RAR file in RAR, extract to Subs folder and extract RAR else: log.debug('RAR file {} in {}'.format( rarfile_file['name'], release_unpacker_rar_file.rar_file_path_abs)) # Extract the RAR to Subs folder subs_dir = release_unpacker_rar_file.rar_file_path_abs.parent log.debug('Extracting {} to {}'.format(rarfile_file['name'], subs_dir)) extracted_file_path = release_unpacker_rar_file.extract_file( rarfile_file['name'], subs_dir) # Extract the extracted Subs RAR file self.unpack_subs_rar(ReleaseUnpackerRarFile( extracted_file_path)) # Remove RAR file in Subs folder log.debug('Removing extracted Subs RAR {}'.format( extracted_file_path)) extracted_file_path.remove() def unpack_rar(self, release_unpacker_rar_file): """List all files in a RAR and determine if it should be extracted or not. """ for rarfile_file in release_unpacker_rar_file.file_list: # Check file ext if rarfile_file['name'].ext not in ('.avi', '.mkv', '.img', '.iso', '.mp4'): log.info('Skipping {}, unwanted ext'.format( rarfile_file['name'])) continue unpack_filename = '{}{}'.format(release_unpacker_rar_file.name, rarfile_file['name'].ext) unpack_file_path_abs = Path(self.unpack_dir, unpack_filename) # File exists and size match if self.file_exists_size_match(unpack_file_path_abs, rarfile_file['size']): continue # Unpack file in RAR self.unpack_move_rar_file(release_unpacker_rar_file, rarfile_file['name'], unpack_file_path_abs) return True def unpack_move_rar_file(self, release_unpacker_rar_file, rarfile_file_name, unpack_file_path): """Extract an individual file from release_unpacker_rar_file to unpack_file_path """ # Extract file to tmp_dir log.debug('Extracting {} to {}'.format(rarfile_file_name, self.tmp_dir)) log.info('{} unpack started'.format(unpack_file_path.name)) unpack_start = datetime.now().replace(microsecond=0) extracted_file_path = release_unpacker_rar_file.extract_file( rarfile_file_name, self.tmp_dir) unpack_end = datetime.now().replace(microsecond=0) unpack_time = human(unpack_end - unpack_start, past_tense='{}') if not unpack_time: log.info('{} unpack done'.format(unpack_file_path.name)) else: log.info('{} unpack done, {}'.format(unpack_file_path.name, unpack_time)) # Move file and rename to unpack_dir log.debug('Moving {} to {}'.format(extracted_file_path, unpack_file_path)) extracted_file_path.move(unpack_file_path)
class ReleaseSorter(object): def __init__(self, sort_dir): self.sort_dir = Path(sort_dir) # Make sure the sort dir is a dir and cd into it if not self.sort_dir.isdir(): raise ReleaseSorterError('Invalid sort-dir {}'.format(sort_dir)) os.chdir(sort_dir) self.files_to_sort = {} def relative_path(self, path, root_path): relative_path = path.replace(root_path, '') if relative_path[0:1] == '/': return relative_path[1:] else: return relative_path def check_extension(self, extension): if extension in ('.mkv', '.avi'): return True else: return False def check_modified_time(self, time_since_modified): if time_since_modified < timedelta(minutes=20): return False else: return True def create_series_folders(self, sorter_file): if sorter_file.series_dir and not sorter_file.series_dir.exists(): log.info('Creating series dir {}'.format( sorter_file.relative_path(sorter_file.series_dir))) sorter_file.series_dir.mkdir() if sorter_file.season_dir and not sorter_file.season_dir.exists(): log.info('Creating season dir {}'.format( sorter_file.relative_path(sorter_file.season_dir))) sorter_file.season_dir.mkdir() def move_subtitle_files(self, sorter_file): """Check for existing subtitle files matching media file and move them to sort folder too. """ for ext in ('.srt', '.sub', '.idx'): subtitle_path = Path(sorter_file.path.parent, '{}{}'.format( sorter_file.path.stem, ext)) if subtitle_path.exists(): log.info('Moving subtitle file {} to {}'.format( self.relative_path(subtitle_path, self.sort_dir), sorter_file.season_dir)) subtitle_path.move(Path(self.sort_dir, sorter_file.season_dir)) def move_sorter_file(self, sorter_file): log.info('Moving {} to {}'.format(sorter_file.relative_path(), sorter_file.season_dir)) sorter_file.path.move(Path(self.sort_dir, sorter_file.season_dir)) def get_sorter_files(self): """List sort dir and find all files to sort""" log.debug('Sorting dir {}'.format(self.sort_dir)) file_list = self.sort_dir.listdir(filter=FILES) for file in file_list: sorter_file = SorterFile(file, self.sort_dir) # File extension if not self.check_extension(sorter_file.extension): log.debug('Skipping {}, wrong file extension'.format( sorter_file.relative_path())) continue # Modifed time, only process files who hasen't been modified the # in the last 20 min time_since_modified = datetime.now() - sorter_file.mtime if not self.check_modified_time(time_since_modified): log.debug('Skipping {}, has been modified in the last 20 min ' '({})'.format(sorter_file.relative_path(), human(time_since_modified))) continue # Skip if file is not a TV release if not sorter_file.release.tv_release: log.debug('Skipping {}, not a TV release'.format( sorter_file.relative_path())) continue # Add file to sorter list series_name = sorter_file.release.tv_series_data['series_name'] series_episodes = self.files_to_sort.get(series_name) if not series_episodes: series_episodes = {} series_episodes[unicode(sorter_file)] = sorter_file self.files_to_sort[series_name] = series_episodes def sort_files(self): # If a season dir already exist use that when sorting. Else if there # is only one file found for the series skip processing and moving. for series in self.files_to_sort.keys(): series_episodes = self.files_to_sort[series] for episode_file in series_episodes: sorter_file = series_episodes[episode_file] # Episode already has a season dir if sorter_file.season_dir.exists(): log.info('Season dir for {} already exists {}'.format( episode_file, sorter_file.season_dir)) # No season dir for episode. Skip if only one episode was found else: # Skip if only one episode was found if len(series_episodes) < 2: log.debug('Skipping {}, only one episode found'.format( series_episodes.iterkeys().next())) del(self.files_to_sort[series]) # Loop remaining files for folder creating and moving for series in self.files_to_sort: series_episodes = self.files_to_sort[series] for episode_file in series_episodes: sorter_file = series_episodes[episode_file] # Create series folder if needed self.create_series_folders(sorter_file) # Move the file self.move_sorter_file(sorter_file) # Move subtitle files self.move_subtitle_files(sorter_file) def sort(self): self.get_sorter_files() self.sort_files() def cleanup_empty_folders(self): log.debug('Cleanup empty folders in {}'.format(self.sort_dir)) dirs_to_check_for_removal = [] for dir in self.sort_dir.walk(filter=DIRS, top_down=False): # Skip all dirs in _ dir if '/_' in dir: log.debug('Skipping cleanup on {}, _ dir'.format(dir)) continue dirs_to_check_for_removal.append(dir) for dir in dirs_to_check_for_removal: # If dir is empty, remove it if dir.isdir() and len(dir.listdir()) == 0: log.info('Removing empty dir {}'.format(self.relative_path( dir, self.sort_dir))) dir.rmtree()
print(Path("/home/luke/..").norm()) # Expands .. and . notation print(Path("$HOME/..").expand()) # Expands system variables, ~ and also .. # Expands system variable and ~. Will also normalise the path ( remove redundant # .. . incorrect slashes and correct capitalisation on case sensitive file systems. Calls os.path.normpath # File Attributes and permissions print("\n*** File Attributes and permissions") # noinspection PyArgumentList print(here.atime()) # Last access time; seconds past epcoh # noinspection PyArgumentList print(here.ctime()) # Last permission or ownership modification; windows is creation time; # noinspection PyArgumentList print(here.isfile()) # Is a file; symbolic links are followed. print(here.isdir()) # Is a directory; symbolic links are followed. # noinspection PyArgumentList print(here.islink()) # Is a symbolic link # noinspection PyArgumentList print(here.ismount()) # Is a mount point; ie the parent is on a different device. # noinspection PyArgumentList print(here.exists()) # File exists; symbolic links are followed. # noinspection PyArgumentList print(here.lexists()) # Same as exists but symbolic links are not followed. # noinspection PyArgumentList print(here.size()) # File size in bytes. print(Path("/foo").isabsolute()) # Is absolute and not relative path # Epoch? print("\n*** gmtime") print(gmtime(0))
def test_get_path_with_local_storage(self): with app.app_context(): backup = Backup() path = Path(backup.path) self.assertTrue(path.exists()) self.assertTrue(path.isdir())
def deluge(torrent_id, torrent_name, save_path): # Set up logging logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG) logging.info('Processing torrent from deluge {}: {} in {}'.format( torrent_id, torrent_name, save_path)) # Get the name of the title torrent_path = Path(save_path, torrent_name) if torrent_path.isdir(): raw_name = torrent_name else: raw_name = torrent_path.stem # Test if this is a TV series serie_re = re.compile( r'([\w.]+)(?=([sS]\d{2}[eE]\d{2}|\d{1,2}x{\d{1,2}))(\2)*') series_match = serie_re.match(raw_name) if series_match: series_name = series_match.group(1).replace('.', ' ').strip() episode = series_match.group(2) # Get information from TMDB about the series try: search, name = find_media(series_name, TV_MEDIA) # Reduce the results from the TMDB search to one final answer # Use the newest series found results = filter_results_by_name(search.results, raw_name) latest = select_newest_result_by_air_date(results) logging.debug('TMDB returned: {}, original was: {}'.format( latest['name'], series_name)) name = latest['name'] except MediaNotFoundInTMDBException: search = None name = series_name # Normalise the episode number episode_re = re.compile(r'\w?(\d+)\w?(\d+)') episode_match = episode_re.match(episode) if episode_match: serie_nr = episode_match.group(1) episode_nr = episode_match.group(2) episode = 'S' if len(serie_nr) < 2: # Series number is a single digit episode += '0' episode += serie_nr + 'E' if len(episode_nr) < 2: # Episode number is a single digit episode += '0' episode += episode_nr logging.debug('Series number updated to {}'.format(episode)) logging.info('Torrent is TV series: {}, episode {}'.format(name, episode)) sort_episode(name, episode, torrent_path) logging.info('Done processing torrent') sys.exit(0) # If it is not a single episode from a TV series then we can check # The Movie Database to see if it is a known series or movie likely_name = raw_name.lower() for sep in VIDEO_INDICATORS: likely_name = likely_name.split(sep, 1)[0] logging.debug('Reduced name from "{}" to "{}"'.format(raw_name, likely_name)) logging.info('Determining if movie/series based on file name') try: search, name = find_media(likely_name) results = filter_results_by_name(search.results, raw_name) # If there are still too many titles remove those with dates that # don't match with those in the filename if len(results) > 1: date_match = re.search(re.compile(r'(\d{4})'), raw_name) if date_match: date = date_match.group(1) results = filter_results_by_year(results, date) # There are no more methods to reduce the options, so if there are # still too many we have failed if len(results) > 1: logging.error('There are too many video results for torrent_name') sys.exit(3) result = results[0] if result['media_type'] == 'movie': logging.info('Media is a movie') sort_movie(result['title'], result['release_date'][:4], torrent_path) elif result['media_type'] == 'tv': logging.info('Media is a TV series') except MediaNotFoundInTMDBException: logging.info('Media not found in TMDB, it is likely audio') sys.exit(2)
def _clear_cache_for_report(report): report_dir = Path(report).parent cache_dir = Path(report_dir, '.cache') if cache_dir.isdir(): print 'removing cache dir:\n\t{}'.format(cache_dir) cache_dir.rmtree()
def test_mkdir_and_rmdir_with_parents(self): abc = Path(self.d, "a", "b", "c") abc.mkdir(parents=True) assert abc.isdir() abc.rmdir(parents=True) assert not Path(self.d, "a").exists()