def __init__(self, master_file_path, output_dir_path=None, adapter_str="rasa", base_filepath=None, local=False, seed=None, force_overwriting=False ): if local: self.output_dir_path = os.path.dirname(master_file_path) else: self.output_dir_path = getcwd() if output_dir_path is None: self.output_dir_path = os.path.join(self.output_dir_path, "output") else: self.output_dir_path = os.path.join(self.output_dir_path, output_dir_path) self.force_overwriting = force_overwriting # Initialize the random number generator if seed is None: seed = random_string() print("Executing Chatette with random seed '" + seed + "'.") else: print("Executing Chatette with seed '" + seed + "'.") random_seed(seed) self.adapter = adapter_factory.create_adapter( adapter_str, base_filepath ) self.parser = Parser(master_file_path) self.generator = None
def wrapper(*args, **kwargs): if 'out_file' not in kwargs or kwargs['out_file'] is None: kwargs['out_file'] = 'MDTF_NCO_temp.nc' move_back = True else: move_back = False if 'cwd' not in kwargs: kwargs['cwd'] = None if 'in_file' not in kwargs: print("nchelper didn't get in_file: {}".format(kwargs)) raise AssertionError() # only pass func the keyword arguments it accepts named_args = six.get_function_code(function).co_varnames fkwargs = dict((k, kwargs[k]) for k in named_args if k in kwargs) result = function(*args, **fkwargs) if move_back: # manually move file back if kwargs.get('dry_run', False): print('DRY_RUN: move {} to {}'.format(kwargs['out_file'], kwargs['in_file'])) else: if kwargs['cwd']: cwd = getcwd() os.chdir(kwargs['cwd']) os.remove(kwargs['in_file']) shutil.move(kwargs['out_file'], kwargs['in_file']) if kwargs['cwd']: os.chdir(cwd) return result
def _clone_git_plugin(git_url, rev=None): """Clone a plugin into a given destination directory :param git_url: Plugin's Git URL :param rev: git branch/tag/revision :return: Path to plugin cloned directory (str) """ plugin_git_name = os.path.split(git_url)[-1].split('.')[0] tmpdir = tempfile.mkdtemp(prefix="ir-") cwd = getcwd() os.chdir(tmpdir) try: repo = git.Repo.clone_from(url=git_url, to_path=os.path.join( tmpdir, plugin_git_name)) if rev is not None: repo.git.checkout(rev) except (git.exc.GitCommandError) as e: shutil.rmtree(tmpdir) raise IRFailedToAddPlugin( "Cloning git repo {} is failed: {}".format(git_url, e)) plugin_tmp_source = os.path.join(tmpdir, plugin_git_name) os.chdir(cwd) return plugin_tmp_source
def restart(exit=False): log.info('Restarting') if exit: # Exit current process and restart a new one with the same args # Get executable and args popen_list = [sys.executable, autosubliminal.EXECUTABLE] popen_list += autosubliminal.ARGS # Stop without exit stop(exit=False) log.info('Restarting application with command and arguments: %s', popen_list) log.info('Exiting application with PID: %s', autosubliminal.PID) # Shutdown _shutdown() # Start new process subprocess.Popen(popen_list, cwd=getcwd()) # Exit current process _exit() else: # Stop without killing current process and restart stop(exit=False) autosubliminal.initialize() start_server(True) start() log.info('Restarted')
def main(): description = "Organize files in your directory instantly,by classifying them into different folders" parser = argparse.ArgumentParser(description=description) parser.add_argument("-st", "--specific-types", type=str, nargs='+', help="Move all file extensions, given in the args list, in the current directory into the Specific Folder") parser.add_argument("-sf", "--specific-folder", type=str, help="Folder to move Specific File Type") parser.add_argument("-o", "--output", type=str, help="Main directory to put organized folders") parser.add_argument("-dt", "--date", action='store_true', help="Organize files by creation date") parser.add_argument("-rd", "--redo", action='store_true', help="Redo the last action") args = parser.parse_args() formats = { 'Music': ['.mp3', '.aac', '.flac', '.ogg', '.wma', '.m4a', '.aiff', '.wav'], 'Videos': ['.flv', '.ogv', '.avi', '.mp4', '.mpg', '.mpeg', '.3gp', '.mkv', '.ts'], 'Pictures': ['.png', '.jpeg', '.gif', '.jpg', '.bmp', '.svg', '.webp', '.psd'], 'Archives': ['.rar', '.zip', '.7z', '.gz', '.bz2', '.tar', '.dmg', '.tgz', '.xz'], 'Documents': ['.txt', '.pdf', '.doc', '.docx', '.xls', '.xlsv', '.xlsx', '.ppt', '.pptx', '.ppsx', '.odp', '.odt', '.ods', '.md', '.json', '.csv'], 'Books': ['.mobi', '.epub', '.chm'], 'DEBPackages': ['.deb'], 'RPMPackages': ['.rpm'] } if bool(args.specific_folder) ^ bool(args.specific_types): print( 'Specific Folder and Specific Types need to be specified together') sys.exit() if args.specific_folder and args.specific_types: specific_folder = _format_arg(args.specific_folder) formats = {specific_folder: args.specific_types} if args.output is None: output = getcwd() else: output = _format_arg(args.output) if args.date: classify_by_date('DD-MM-YYYY', output) elif args.redo: classify_redo(output) else: classify(formats, output) sys.exit()
def __init__(self): self.rasa_adapter = RasaAdapter() self.jsonl_adapter = JsonListAdapter self.cwd = getcwd() self.output_dirpath = \ os.path.join(self.cwd, "tests/system-testing/output") self.generator = None self.train_examples = None self.test_examples = None
def classify_by_date(date_format, output_dir): print("Scanning Files") directory = getcwd() files = [x for x in os.listdir(directory) if not x.startswith(".")] creation_dates = map(lambda x: (x, arrow.get(os.path.getctime(x))), files) for file, creation_date in creation_dates: folder = creation_date.format(date_format) moveto(file, directory, folder) print("Done!")
def classify_by_date(date_format, output_dir): print("Scanning Files") directory = getcwd() files = [x for x in os.listdir(directory) if not x.startswith('.')] creation_dates = map(lambda x: (x, arrow.get(os.path.getctime(x))), files) for file, creation_date in creation_dates: folder = creation_date.format(date_format) moveto(file, directory, folder) print("Done!")
def main(): description = "Generate a simple report for header files in your directory" parser = argparse.ArgumentParser(description=description) parser.add_argument("-i", "--input", type=str, help="directory that header(.h) files in. ") parser.add_argument("-o", "--output", type=str, help="file (or directory) path to put txt report in.") parser.add_argument( "-s", "--sortby", type=str, help="prefix or count . Means sort by prefix or count.") parser.add_argument("-d", "--order", type=str, help="desc or asc.") parser.add_argument("-p", "--prefixlength", type=int, help="prefix length for classify , default to 2.") parser.add_argument("-u", "--updatedb", help="force update cocoapods database.") args = parser.parse_args() output = _get_param(args.output, None) input = _get_param(args.input, getcwd()) sortby = _get_param(args.sortby, "prefix") is_asc = _get_param(args.order, "asc") == "asc" prefixlength = int(_get_param(args.prefixlength, "2")) forceupdate = args.updatedb if forceupdate is not None: force_update_db() sys.exit() check_cocoapods_db() global checker checker = Check() checker.load() classify(input, output, sortby=sortby, asc=is_asc, prefix_length=prefixlength) sys.exit()
def chdir(goto): """chdir to a directory and back, guaranteed. This contextmanager ensures that after changing directory you cahgne back, using a try/finally block. """ returnto = getcwd() os.chdir(goto) try: yield finally: os.chdir(returnto)
def test_piglit_root(): """core.get_config() finds "piglit root"/piglit.conf""" with open('piglit.conf', 'w') as f: f.write(_CONF_FILE) return_dir = getcwd() try: os.chdir('..') core.get_config() finally: os.chdir(return_dir) os.unlink('piglit.conf') nt.ok_(core.PIGLIT_CONFIG.has_section('nose-test'), msg='$PIGLIT_ROOT not found')
def classify_by_date(date_format, output_dir): print("Scanning Files") directory = getcwd() obj = [] files = [x for x in os.listdir(directory) if not x.startswith('.')] creation_dates = map(lambda x: (x, arrow.get(os.path.getctime(x))), files) for file, creation_date in creation_dates: folder = creation_date.format(date_format) moveto(file, directory, folder) obj.append({"file": file, "directory": directory, "folder": folder}) # record info to json dump_json(0, obj) print("Done!")
def classify(formats, output): print("Scanning Files") directory = getcwd() for file in os.listdir(directory): filename, file_ext = os.path.splitext(file) file_ext = file_ext.lower() for folder, ext_list in list(formats.items()): folder = os.path.join(output, folder) if file_ext in ext_list: moveto(file, directory, folder) print("Done!")
class ClassifierTest(unittest.TestCase): __location = os.path.realpath( os.path.join(getcwd(), os.path.dirname(__file__), '.unittest')) __tmp_files = [u'test_file', u'test_file_中文'] __tmp_dirs = [u'test_dir', u'test_dir_中文'] def setUp(self): if not os.path.exists(self.__location): os.mkdir(self.__location) os.chdir(self.__location) for file_ in self.__tmp_files: open(file_, 'w').close() for dir_ in self.__tmp_dirs: if not os.path.exists(dir_): os.mkdir(dir_) super(ClassifierTest, self).setUp() def tearDown(self): shutil.rmtree(self.__location) super(ClassifierTest, self).tearDown() def test_moveto(self): target_dir = os.path.abspath(os.path.join(self.__location, 'moveto')) for file_ in self.__tmp_files: clf.moveto(file_, self.__location, target_dir) for file_ in self.__tmp_files: final_file_path = os.path.join(target_dir, file_) self.assertTrue(os.path.exists(final_file_path)) def test_classify_bydate(self): date_format = 'DD-MM-YYYY' target_files = [] for file_ in self.__tmp_files: target_dir = arrow.get(os.path.getctime(file_)).format(date_format) final_file_path = os.path.join(target_dir, file_) target_files.append(final_file_path) clf.classify_by_date(date_format, self.__location) for file_ in target_files: self.assertTrue(os.path.exists(final_file_path)) for dir_ in self.__tmp_dirs: self.assertTrue(os.path.exists(dir_))
def test_in_tempdir(func): """Decorator that moves to a new directory to run a test. This decorator ensures that the test moves to a new directory, and then returns to the original directory after the test completes. """ original_dir = getcwd() @functools.wraps(func) def wrapper(*args, **kwargs): with tempdir() as tdir: try: os.chdir(tdir) func(*args, **kwargs) finally: os.chdir(original_dir) return wrapper
def classify(formats, output): print("Scanning Files") directory = getcwd() for file in os.listdir(directory): filename, file_ext = os.path.splitext(file) file_ext = file_ext.lower() for folder, ext_list in list(formats.items()): folder = os.path.join(output, folder) if file_ext in ext_list: try: moveto(file, directory, folder) except Exception as e: print('Cannot move file - {} - {}'.format(file, str(e))) print("Done!")
def classify(formats, output): print("Scanning Files") directory = getcwd() obj = [] for file in [x for x in os.listdir(directory) if not x.startswith('.')]: filename, file_ext = os.path.splitext(file) file_ext = file_ext.lower() for folder, ext_list in list(formats.items()): folder = os.path.join(output, folder) if file_ext in ext_list: try: moveto(file, directory, folder) obj.append({"file": file, "directory": directory, "folder": folder}) except Exception as e: print('Cannot move file - {} - {}'.format(file, str(e))) # record info to json dump_json(0, obj) print("Done!")
def resolve_path(path, root_path="", env=None): """Abbreviation to resolve relative paths. Args: path (:obj:`str`): path to resolve. root_path (:obj:`str`, optional): root path to resolve `path` with. If not given, resolves relative to `cwd`. Returns: Absolute version of `path`, relative to `root_path` if given, otherwise relative to `os.getcwd`. """ def _expandvars(path, env_dict): """Expand quoted variables of the form $key and ${key} in path, where key is a key in env_dict, similar to os.path.expandvars. See `<https://stackoverflow.com/a/30777398>`__; specialize to not skipping escaped characters and not changing unrecognized variables. """ return re.sub( r'\$(\w+|\{([^}]*)\})', lambda m: env_dict.get(m.group(2) or m.group(1), m.group(0)), path) if path == '': return path # default value set elsewhere path = os.path.expanduser(path) # resolve '~' to home dir path = os.path.expandvars(path) # expand $VAR or ${VAR} for shell envvars if isinstance(env, dict): path = _expandvars(path, env) if '$' in path: print("Warning: couldn't resolve all env vars in '{}'".format(path)) return path if os.path.isabs(path): return path if root_path == "": root_path = getcwd() assert os.path.isabs(root_path) return os.path.normpath(os.path.join(root_path, path))
def initialize(): global CONFIGFILE, CONFIGVERSION, CONFIGUPGRADED, \ CACHEDIR, DEREFERURL, GITHUBURL, VERSIONURL, USERAGENT, SYSENCODING, TIMEOUT, WANTEDQUEUE, WANTEDQUEUELOCK, \ WEBSOCKETMESSAGEQUEUE, WEBSOCKETBROADCASTER, SCHEDULERS, SCANDISK, SCANLIBRARY, CHECKSUB, CHECKVERSION, \ DEVELOPER, \ TVDBAPIKEY, TVDBURL, IMDBURL, SHOWINDEXER, MOVIEINDEXER, \ DBFILE, DBVERSION, DBTIMESTAMPFORMAT, \ PYTHONVERSION, DAEMON, STARTED, PID, UUID, \ PATH, VIDEOPATHS, DEFAULTLANGUAGE, DEFAULTLANGUAGESUFFIX, ADDITIONALLANGUAGES, MANUALSEARCHWITHSCORING, \ SCANDISKINTERVAL, CHECKSUBINTERVAL, CHECKSUBDEADLINE, CHECKSUBDELTA, CHECKVERSIONINTERVAL, \ CHECKVERSIONAUTOUPDATE, SCANEMBEDDEDSUBS, SCANHARDCODEDSUBS, SKIPHIDDENDIRS, DETECTINVALIDSUBLANGUAGE, \ DETECTEDLANGUAGEPROBABILITY, MINVIDEOFILESIZE, MAXDBRESULTS, TIMESTAMPFORMAT, \ LIBRARYMODE, LIBRARYPATHS, SCANLIBRARYINTERVAL, \ LOGFILE, LOGLEVEL, LOGSIZE, LOGNUM, LOGHTTPACCESS, LOGEXTERNALLIBS, LOGDETAILEDFORMAT, LOGREVERSED, \ LOGLEVELCONSOLE, \ WEBSERVERIP, WEBSERVERPORT, WEBROOT, USERNAME, PASSWORD, LAUNCHBROWSER, \ SHOWMINMATCHSCORE, SHOWMINMATCHSCOREDEFAULT, SHOWMATCHSOURCE, SHOWMATCHQUALITY, SHOWMATCHCODEC, \ SHOWMATCHRELEASEGROUP, \ MOVIEMINMATCHSCORE, MOVIEMINMATCHSCOREDEFAULT, MOVIEMATCHSOURCE, MOVIEMATCHQUALITY, MOVIEMATCHCODEC, \ MOVIEMATCHRELEASEGROUP, \ SUBLIMINALPROVIDERMANAGER, SUBLIMINALPROVIDERS, SUBLIMINALPROVIDERCONFIGS, \ SUBTITLEUTF8ENCODING, MANUALREFINEVIDEO, REFINEVIDEO, PREFERHEARINGIMPAIRED, \ ADDIC7EDUSERNAME, ADDIC7EDPASSWORD, OPENSUBTITLESUSERNAME, OPENSUBTITLESPASSWORD, \ SHOWNAMEMAPPING, ADDIC7EDSHOWNAMEMAPPING, ALTERNATIVESHOWNAMEMAPPING, \ MOVIENAMEMAPPING, ALTERNATIVEMOVIENAMEMAPPING, \ SKIPSHOW, SKIPMOVIE, \ NOTIFY, NOTIFYMAIL, MAILSRV, MAILFROMADDR, MAILTOADDR, MAILUSERNAME, MAILPASSWORD, MAILSUBJECT, MAILAUTH, \ MAILENCRYPTION, NOTIFYTWITTER, TWITTERKEY, TWITTERSECRET, NOTIFYPUSHALOT, PUSHALOTAPI, \ NOTIFYPUSHOVER, PUSHOVERKEY, PUSHOVERAPI, PUSHOVERDEVICES, \ NOTIFYGROWL, GROWLHOST, GROWLPORT, GROWLPASS, GROWLPRIORITY, NOTIFYPROWL, PROWLAPI, PROWLPRIORITY, \ NOTIFYPUSHBULLET, PUSHBULLETAPI, NOTIFYTELEGRAM, TELEGRAMBOTAPI, TELEGRAMCHATID, \ POSTPROCESS, POSTPROCESSINDIVIDUAL, POSTPROCESSUTF8ENCODING, SHOWPOSTPROCESSCMD, SHOWPOSTPROCESSCMDARGS, \ MOVIEPOSTPROCESSCMD, MOVIEPOSTPROCESSCMDARGS # Fake some entry points to get libraries working without installation _fake_entry_points() # Check python version PYTHONVERSION = get_python_version_strict() python_version_changed = _check_python_version_change() # System settings PATH = os.path.abspath(getcwd()) CACHEDIR = os.path.abspath(os.path.join(PATH, 'cache')) DEREFERURL = 'http://www.dereferer.org/?' GITHUBURL = 'https://github.com/h3llrais3r/Auto-Subliminal' VERSIONURL = 'https://raw.github.com/h3llrais3r/Auto-Subliminal/master/autosubliminal/version.py' USERAGENT = 'Auto-Subliminal/' + version.RELEASE_VERSION TIMEOUT = 300 # Wanted queue settings WANTEDQUEUE = [] WANTEDQUEUELOCK = False # Websocket settings WEBSOCKETMESSAGEQUEUE = [] # Scheduler settings SCHEDULERS = {} # Developer settings DEVELOPER = False # Indexer settings TVDBAPIKEY = '76F2D5362F45C5EC' TVDBURL = 'http://thetvdb.com/?tab=series&id=' IMDBURL = 'http://www.imdb.com/title/' SHOWINDEXER = ShowIndexer() MOVIEINDEXER = MovieIndexer() # Startup settings STARTED = False UUID = uuid.uuid4( ) # Generate random uuid each time we initialize the application # Webserver settings LAUNCHBROWSER = True # Score settings SHOWMINMATCHSCOREDEFAULT = 330 MOVIEMINMATCHSCOREDEFAULT = 90 # Cache settings _init_cache(python_version_changed) # Guessit settings _init_guessit() # Subliminal settings SUBLIMINALPROVIDERMANAGER = _init_subliminal(python_version_changed) SUBLIMINALPROVIDERCONFIGS = {} # Langdetect settings _init_langdetect() # Config settings CONFIGUPGRADED = False if CONFIGFILE is None: CONFIGFILE = 'config.properties' config.read_config(True) if CONFIGUPGRADED: print('INFO: Config seems to be upgraded. Writing config.') config.write_config() print('INFO: Writing config done.') # Change to the new work directory if os.path.exists(PATH): os.chdir(PATH) else: print('ERROR: PATH does not exist, check config.') os._exit(1) # Database settings DBFILE = 'database.db' DBTIMESTAMPFORMAT = '%Y-%m-%d %H:%M:%S' db.initialize() # Logging settings logger.initialize()
iPhone 6 Plus (5.5 Inch) 1242 x 2208 You need the screenshot in this resolution, the phone scales them down to 1080 x 1920 iPad (Air and Mini Retina) 1536 x 2048 Apple Watch 312 x 390 pixels (only one orientation) iPad Pro 2048 x 2732 """ OUTPUT_DIR = getcwd() + '/FramedAppScreens - %s/' % strftime("%d-%m-%Y AT %H.%M") def get_script_dir(follow_symlinks=True): if getattr(sys, 'frozen', False): # py2exe, PyInstaller, cx_Freeze path = os.path.abspath(sys.executable) else: path = inspect.getabsfile(get_script_dir) if follow_symlinks: path = os.path.realpath(path) return os.path.dirname(path) class FrameScreenshots: def __init__(self, screenshot_location, title_text, desc_text, title_color=None, desc_color=None): self.screenshot_location = screenshot_location self.title_text = title_text
def classify(input=None, output=None, sortby="prefix", asc=True, prefix_length=2): """ classify .h files :param input: directory that .h files in :param output: directory for the txt file :param sort: prefix=sort by prefix ; count=sort by file count ; other default to prefix :param desc: True, otherwize asc :param prefix_length: default 2 :return: """ if input is None: input = getcwd() sortindex = 0 if sortby == "count": sortindex = 1 print('Start analyzing directory:' + input) cocoapods = {} podsdummys = [] mapper = {} for root, dirs, files in os.walk(input): for f in files: if not f.endswith('.h'): continue path = os.path.join(root, f) filename = os.path.relpath(path, input) prefix = f[:prefix_length] # mapper if prefix in mapper: mapper[prefix].append(filename) else: mapper[prefix] = [filename] # pod dummy if filename.startswith('PodsDummy'): podsdummys.append(filename) # cocoapods podname = checker.check(filename) if podname is not None: cocoapods[podname] = 1 counter = {} for prefix in mapper: files = mapper[prefix] counter[prefix] = len(files) if six.PY2: ordered = sorted(counter.iteritems(), key=lambda d: d[sortindex], reverse=not asc) else: ordered = sorted(counter.items(), key=lambda d: d[sortindex], reverse=not asc) # console output ###################################### # - cocoa pods if len(cocoapods.keys()) > 0: print('CocoaPods (%d) : ' % len(cocoapods.keys())) for pod in cocoapods: print(' ' + pod) link, info = checker.fetch_cocoapods_link_and_info(pod) print(' ' + link) print(' ' + info) # - pods dummy if len(podsdummys) > 0: print('Pods Dummy Files (%d):' % len(podsdummys)) for pod in podsdummys: print(' ' + pod) # - file mapper total_file_count = 0 for tuple in ordered: prefix = tuple[0] files = mapper[prefix] print("%s (%d)" % (prefix, len(files))) total_file_count += len(files) for file in files: print(" " + file) if total_file_count == 0: print( 'No header (.h) files found , please specify target directory with -i option.' ) print('Or try --help for more options.') else: print("--- Total %d files ---" % total_file_count) # file output ######################################## if output is not None: if os.path.isdir(output): output = os.path.join(output, "result.txt") if os.path.isfile(output): print('!!!! output file already exist : %s !!!!' % output) return with open(output, mode='a+') as f: # - CocoaPods if len(cocoapods.keys()) > 0: f.write('CocoaPods (%d) : \n' % len(cocoapods.keys())) for pod in cocoapods: f.write(' ' + pod + '\n') link, info = checker.fetch_cocoapods_link_and_info(pod) f.write(' ' + link + '\n') f.write(' ' + info + '\n') # - pods dummy if len(podsdummys) > 0: f.write('Pods Dummy Files (%d):\n' % len(podsdummys)) for pod in podsdummys: f.write(' ' + pod + '\n') f.write('\n') for tuple in ordered: prefix = tuple[0] files = mapper[prefix] f.write("%s (%d)\n" % (prefix, len(files))) for file in files: if six.PY2: line = file.encode('utf-8') else: line = file f.write(" " + line + "\n") f.write("--- Total %d files ---" % total_file_count) print('Finish analyzing directory:' + input)
def test_copy_static(): """summary.html_._copy_static: puts status content in correct locations""" html_._copy_static_files(getcwd()) nt.ok_(os.path.exists('index.css'), msg='index.css not created correctly') nt.ok_(os.path.exists('result.css'), msg='result.css not created correctly')
from os import chdir,environ from time import time try: import seaborn except: pass # from histutils.fortrandates import datetime2yd from gridaurora.solarangle import solarzenithangle ################################# #TODO hack for module data path issue chdir(environ['HOME']) import glowaurora from glowaurora import glowfort chdir(glowaurora.__path__[0]) print('loaded glow from ' + getcwd()) ################################# #%% demo the solar zenith angle calclation vs AstroPy def demosolzen(dtime,glat,glon): #%% SZA with glow yd,utsec = datetime2yd(dtime)[:2] sza_glow = empty_like(dtime,dtype=float) for j,(d,s) in enumerate(zip(yd,utsec)): sza_glow[j] = glowfort.solzen(d,s,glat,glon) return DataFrame(index=dtime,data=sza_glow,columns=['glow']) def demosuncor(T,glat,glon,alt_m): #%% Solar location with GLOW yd,utsec = datetime2yd(T)[:2]
def main(argv=None): """Command line app main function. :param list | None argv: Overrides command options (for libuse or testing) """ parser = create_parser() args = parser.parse_args() if argv is None else parser.parse_args(argv) schemas = ('xsd',) if not args.schemas else tuple(args.schemas) if args.debug: logging.basicConfig( level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(name)s - %(message)s' ) print('DEBUG logging enabled.') try: import win_unicode_console win_unicode_console.enable() log.debug('Running with win-unicode-console patch') except Exception: pass log.debug('TYPE of path: %s' % type(args.path)) # validate current working dir if not args.infile and not args.path: args.path = getcwd() log.debug('NEW TYPE of path: %s' % type(args.path)) all_valid = True if args.infile: log.debug('TYPE of infile.name: %s' % type(args.infile.name)) print('Validating: %s' % args.infile.name) messages = validate(args.infile, schemas) is_valid = messages == [] if is_valid: print('VALID - No errors found') else: print('INVALID - errors found:', file=sys.stderr) all_valid = False for msg in messages: if args.debug: print(msg.__str__(), file=sys.stderr) else: print(msg.short, file=sys.stderr) if args.path: tree_or_dir = 'tree' if args.recursive else 'dir' print() print('Validating all files in %s %s' % (tree_or_dir, args.path)) for onix_file_path in iter_files(args.path, args.ext, args.recursive): print() print('Validating: %s' % onix_file_path) with open(onix_file_path, 'rb') as onix_file: messages = validate(onix_file, schemas) is_valid = messages == [] if is_valid: print('VALID - No errors found') else: print('INVALID - errors found:', file=sys.stderr) all_valid = False for msg in messages: if args.debug: print(msg.__str__(), file=sys.stderr) else: print(msg.short, file=sys.stderr) if all_valid: return 0 else: return 1
def main(): description = "Organize files in your directory instantly,by classifying them into different folders" parser = argparse.ArgumentParser(description=description) parser.add_argument( "-st", "--specific-types", type=str, nargs='+', help= "Move all file extensions, given in the args list, in the current directory into the Specific Folder" ) parser.add_argument("-sf", "--specific-folder", type=str, help="Folder to move Specific File Type") parser.add_argument("-o", "--output", type=str, help="Main directory to put organized folders") parser.add_argument("-d", "--directory", type=str, help="The directory whose files to classify") parser.add_argument("-dt", "--date", action='store_true', help="Organize files by creation date") parser.add_argument("-c", "--config", type=str, help="Config file") args = parser.parse_args() if args.config: conf_file_name = os.path.expanduser(args.config) else: conf_file_name = os.getenv("HOME") + "/.config/classifier" if os.path.exists(conf_file_name): formats = _load_config(conf_file_name) else: formats = default_formats _save_config(conf_file_name, formats) if bool(args.specific_folder) ^ bool(args.specific_types): print( 'Specific Folder and Specific Types need to be specified together') sys.exit() if args.specific_folder and args.specific_types: specific_folder = _format_arg(args.specific_folder) formats = {specific_folder: args.specific_types} if args.output is None: output = getcwd() else: output = _format_arg(args.output) if args.directory is None: directory = getcwd() else: directory = _format_arg(args.directory) if args.output is None: ''' if -d arg given without the -o arg, keeping the files of -d in the -d path only after classifying ''' output = directory if args.date: classify_by_date('YYYY-MM-DD', output, directory) else: classify(formats, output, directory) sys.exit()
iPhone 6 Plus (5.5 Inch) 1242 x 2208 You need the screenshot in this resolution, the phone scales them down to 1080 x 1920 iPad (Air and Mini Retina) 1536 x 2048 Apple Watch 312 x 390 pixels (only one orientation) iPad Pro 2048 x 2732 """ OUTPUT_DIR = getcwd( ) + '/FramedAppScreens - %s/' % strftime("%d-%m-%Y AT %H.%M") def get_script_dir(follow_symlinks=True): if getattr(sys, 'frozen', False): # py2exe, PyInstaller, cx_Freeze path = os.path.abspath(sys.executable) else: path = inspect.getabsfile(get_script_dir) if follow_symlinks: path = os.path.realpath(path) return os.path.dirname(path) class FrameScreenshots: def __init__(self, screenshot_location,
def main(): description = "Organize files in your directory instantly,by classifying them into different folders" parser = argparse.ArgumentParser(description=description) parser.add_argument( "-st", "--specific-types", type=str, nargs='+', help= "Move all file extensions, given in the args list, in the current directory into the Specific Folder" ) parser.add_argument("-sf", "--specific-folder", type=str, help="Folder to move Specific File Type") parser.add_argument("-o", "--output", type=str, help="Main directory to put organized folders") parser.add_argument("-d", "--directory", type=str, help="The directory whose files to classify") parser.add_argument("-dt", "--date", action='store_true', help="Organize files by creation date") args = parser.parse_args() formats = { 'Music': [ '.mp3', '.aac', '.flac', '.ogg', '.wma', '.m4a', '.aiff', '.wav', '.amr' ], 'Videos': [ '.flv', '.ogv', '.avi', '.mp4', '.mpg', '.mpeg', '.3gp', '.mkv', '.ts', '.webm', '.vob', '.wmv' ], 'Pictures': [ '.png', '.jpeg', '.gif', '.jpg', '.bmp', '.svg', '.webp', '.psd', '.tiff' ], 'Archives': [ '.rar', '.zip', '.7z', '.gz', '.bz2', '.tar', '.dmg', '.tgz', '.xz', '.iso', '.cpio' ], 'Documents': [ '.txt', '.pdf', '.doc', '.docx', '.odf', '.xls', '.xlsv', '.xlsx', '.ppt', '.pptx', '.ppsx', '.odp', '.odt', '.ods', '.md', '.json', '.csv' ], 'Books': ['.mobi', '.epub', '.chm'], 'DEBPackages': ['.deb'], 'Programs': ['.exe', '.msi'], 'RPMPackages': ['.rpm'] } if bool(args.specific_folder) ^ bool(args.specific_types): print( 'Specific Folder and Specific Types need to be specified together') sys.exit() if args.specific_folder and args.specific_types: specific_folder = _format_arg(args.specific_folder) formats = {specific_folder: args.specific_types} if args.output is None: output = getcwd() else: output = _format_arg(args.output) if args.directory is None: directory = getcwd() else: directory = _format_arg(args.directory) if args.output is None: ''' if -d arg given without the -o arg, keeping the files of -d in the -d path only after classifying ''' output = directory if args.date: classify_by_date('DD-MM-YYYY', output, directory) else: classify(formats, output, directory) sys.exit()
def initialize(): global CONFIGFILE, CONFIGVERSION, CONFIGUPGRADED, \ CACHEDIR, DEREFERURL, GITHUBURL, VERSIONURL, USERAGENT, SYSENCODING, TIMEOUT, WANTEDQUEUE, WANTEDQUEUELOCK, \ WEBSOCKETMESSAGEQUEUE, WEBSOCKETBROADCASTER, SCHEDULERS, SCANDISK, SCANLIBRARY, CHECKSUB, CHECKVERSION, \ DEVELOPER, \ TVDBAPIKEY, TVDBURL, IMDBURL, SHOWINDEXER, MOVIEINDEXER, \ DBFILE, DBVERSION, DBTIMESTAMPFORMAT, \ PYTHONVERSION, DAEMON, STARTED, PID, UUID, \ PATH, VIDEOPATHS, DEFAULTLANGUAGE, DEFAULTLANGUAGESUFFIX, ADDITIONALLANGUAGES, MANUALSEARCHWITHSCORING, \ SCANDISKINTERVAL, CHECKSUBINTERVAL, CHECKSUBDEADLINE, CHECKSUBDELTA, CHECKVERSIONINTERVAL, \ CHECKVERSIONAUTOUPDATE, SCANEMBEDDEDSUBS, SCANHARDCODEDSUBS, SKIPHIDDENDIRS, DETECTINVALIDSUBLANGUAGE, \ DETECTEDLANGUAGEPROBABILITY, MINVIDEOFILESIZE, MAXDBRESULTS, TIMESTAMPFORMAT, \ LIBRARYMODE, LIBRARYPATHS, SCANLIBRARYINTERVAL, \ LOGFILE, LOGLEVEL, LOGSIZE, LOGNUM, LOGHTTPACCESS, LOGEXTERNALLIBS, LOGDETAILEDFORMAT, LOGREVERSED, \ LOGLEVELCONSOLE, \ WEBSERVERIP, WEBSERVERPORT, WEBROOT, USERNAME, PASSWORD, LAUNCHBROWSER, \ SHOWMINMATCHSCORE, SHOWMINMATCHSCOREDEFAULT, SHOWMATCHSOURCE, SHOWMATCHQUALITY, SHOWMATCHCODEC, \ SHOWMATCHRELEASEGROUP, \ MOVIEMINMATCHSCORE, MOVIEMINMATCHSCOREDEFAULT, MOVIEMATCHSOURCE, MOVIEMATCHQUALITY, MOVIEMATCHCODEC, \ MOVIEMATCHRELEASEGROUP, \ SUBLIMINALPROVIDERMANAGER, SUBLIMINALPROVIDERS, SUBLIMINALPROVIDERCONFIGS, \ SUBTITLEUTF8ENCODING, MANUALREFINEVIDEO, REFINEVIDEO, PREFERHEARINGIMPAIRED, \ ADDIC7EDUSERNAME, ADDIC7EDPASSWORD, OPENSUBTITLESUSERNAME, OPENSUBTITLESPASSWORD, \ SHOWNAMEMAPPING, ADDIC7EDSHOWNAMEMAPPING, ALTERNATIVESHOWNAMEMAPPING, \ MOVIENAMEMAPPING, ALTERNATIVEMOVIENAMEMAPPING, \ SKIPSHOW, SKIPMOVIE, \ NOTIFY, NOTIFYMAIL, MAILSRV, MAILFROMADDR, MAILTOADDR, MAILUSERNAME, MAILPASSWORD, MAILSUBJECT, MAILAUTH, \ MAILENCRYPTION, NOTIFYTWITTER, TWITTERKEY, TWITTERSECRET, NOTIFYPUSHALOT, PUSHALOTAPI, \ NOTIFYPUSHOVER, PUSHOVERKEY, PUSHOVERAPI, PUSHOVERDEVICES, \ NOTIFYGROWL, GROWLHOST, GROWLPORT, GROWLPASS, GROWLPRIORITY, NOTIFYPROWL, PROWLAPI, PROWLPRIORITY, \ NOTIFYPUSHBULLET, PUSHBULLETAPI, NOTIFYTELEGRAM, TELEGRAMBOTAPI, TELEGRAMCHATID, \ POSTPROCESS, POSTPROCESSINDIVIDUAL, POSTPROCESSUTF8ENCODING, SHOWPOSTPROCESSCMD, SHOWPOSTPROCESSCMDARGS, \ MOVIEPOSTPROCESSCMD, MOVIEPOSTPROCESSCMDARGS # Fake some entry points to get libraries working without installation _fake_entry_points() # Check python version PYTHONVERSION = get_python_version_strict() python_version_changed = _check_python_version_change() # System settings PATH = os.path.abspath(getcwd()) CACHEDIR = os.path.abspath(os.path.join(PATH, 'cache')) DEREFERURL = 'http://www.dereferer.org/?' GITHUBURL = 'https://github.com/h3llrais3r/Auto-Subliminal' VERSIONURL = 'https://raw.github.com/h3llrais3r/Auto-Subliminal/master/autosubliminal/version.py' USERAGENT = 'Auto-Subliminal/' + version.RELEASE_VERSION TIMEOUT = 300 # Wanted queue settings WANTEDQUEUE = [] WANTEDQUEUELOCK = False # Websocket settings WEBSOCKETMESSAGEQUEUE = [] # Scheduler settings SCHEDULERS = {} # Developer settings DEVELOPER = False # Indexer settings TVDBAPIKEY = '76F2D5362F45C5EC' TVDBURL = 'http://thetvdb.com/?tab=series&id=' IMDBURL = 'http://www.imdb.com/title/' SHOWINDEXER = ShowIndexer() MOVIEINDEXER = MovieIndexer() # Startup settings STARTED = False UUID = uuid.uuid4() # Generate random uuid each time we initialize the application # Webserver settings LAUNCHBROWSER = True # Score settings SHOWMINMATCHSCOREDEFAULT = 330 MOVIEMINMATCHSCOREDEFAULT = 90 # Cache settings _init_cache(python_version_changed) # Guessit settings _init_guessit() # Subliminal settings SUBLIMINALPROVIDERMANAGER = _init_subliminal(python_version_changed) SUBLIMINALPROVIDERCONFIGS = {} # Langdetect settings _init_langdetect() # Config settings CONFIGUPGRADED = False if CONFIGFILE is None: CONFIGFILE = 'config.properties' config.read_config(True) if CONFIGUPGRADED: print('INFO: Config seems to be upgraded. Writing config.') config.write_config() print('INFO: Writing config done.') # Change to the new work directory if os.path.exists(PATH): os.chdir(PATH) else: print('ERROR: PATH does not exist, check config.') os._exit(1) # Database settings DBFILE = 'database.db' DBTIMESTAMPFORMAT = '%Y-%m-%d %H:%M:%S' db.initialize() # Logging settings logger.initialize()
def main(): description = "Organize files in your directory instantly,by classifying them into different folders" parser = argparse.ArgumentParser(description=description) parser.add_argument( "-st", "--specific-types", type=str, nargs="+", help="Move all file extensions, given in the args list, in the current directory into the Specific Folder", ) parser.add_argument("-sf", "--specific-folder", type=str, help="Folder to move Specific File Type") parser.add_argument("-o", "--output", type=str, help="Main directory to put organized folders") parser.add_argument("-dt", "--date", action="store_true", help="Organize files by creation date") args = parser.parse_args() formats = { "Music": [".mp3", ".aac", ".flac", ".ogg", ".wma", ".m4a", ".aiff", "wav"], "Videos": [".flv", ".ogv", ".avi", ".mp4", ".mpg", ".mpeg", ".3gp", ".mkv", ".ts"], "Pictures": [".png", ".jpeg", ".gif", ".jpg", ".bmp", ".svg", ".webp", ".psd"], "Archives": [".rar", ".zip", ".7z", ".gz", ".bz2", ".tar", ".dmg", ".tgz", ".xz"], "Documents": [ ".txt", ".pdf", ".doc", ".docx", ".xls", ".xlsv", ".xlsx", ".ppt", ".pptx", ".ppsx", ".odp", ".odt", ".ods", ".md", ".json", ".csv", ], "Books": [".mobi", ".epub"], "RPMPackages": [".rpm"], } if bool(args.specific_folder) ^ bool(args.specific_types): print("Specific Folder and Specific Types need to be specified together") sys.exit() if args.specific_folder and args.specific_types: specific_folder = _format_arg(args.specific_folder) formats = {specific_folder: args.specific_types} if args.output is None: output = getcwd() else: output = _format_arg(args.output) if args.date: classify_by_date("DD-MM-YYYY", output) else: classify(formats, output) sys.exit()
""" from __future__ import print_function from __future__ import unicode_literals import logging, time # both for logging # Configure TUF to use DER format instead of Python dictionaries / JSON. import tuf.conf tuf.conf.METADATA_FORMAT = 'json' # FIXME: I actually think other modules rely on the `os` imported here and # not just for getcwd import os # for getcwd only from six.moves import getcwd WORKING_DIR = getcwd() # When True, the reference implementation's primary.py code displays banners # when firmware images are rejected, to make the successful defense visible. DEMO_MODE = False ### Exceptions class Error(Exception): """ Base class for all Uptane-specific exceptions. """ pass class UnknownVehicle(Error):