def get_img_and_txt_dirs(g4bl, inc_phase, inc_sin, fit_type, exec_d4_d5, l_bound, bin_width): """ Generate the appropriate directory name and if neccessary make it. """ out_dir_root = "analysis_" if g4bl: out_dir_root += "g4bl_" if inc_sin: out_dir_root += "sin_" if inc_phase: out_dir_root += "phase_" if exec_d4_d5: out_dir_root += "exec_d4_d5_" out_dir_root += "l_bound"+str(l_bound) out_dir_root += "bin_"+str(bin_width) out_dir_root += fit_type img_dir, txt_dir = "images/"+out_dir_root+"/", "output_txt/"+out_dir_root+"/" # Make all of the image directories for sub_dir in filter(lambda x: not path_exists(img_dir+x), ("fits", "rates", "counts", "zoom")): makedirs(img_dir+sub_dir) # works recurisvely if not path_exists(txt_dir): makedirs(txt_dir) return img_dir, txt_dir
def test_jqpc_simple_bar_chart_ok2(): """ Tests: test_jqpc_simple_bar_chart_ok2 """ print('::: TEST: test_jqpc_simple_bar_chart_ok2()') scripts_bar_chart_path = path_join(SCRIPT_PATH, 'scripts_bar_chart_test_jqpc_simple_bar_chart_ok2') if path_exists(scripts_bar_chart_path): shutil_rmtree(scripts_bar_chart_path) js_css_resources_header, jqplotchart_script, html_chart_insert_tag = jqpc_simple_bar_chart( absolute_source_dir_path=scripts_bar_chart_path, script_src_tag_dir_path=path_relpath(scripts_bar_chart_path), chart_id='example_id', class_str='whatever', chart_title='JqPyCharts simple_bar_chart', chart_x_label='', chart_x_label_fontdict=None, chart_ticks_fontdict=None, chart_data_matrix=[ ('Fat', 200, '#EAA228', ''), ('Protein', 21, '#4bb2c5', ''), ('Carbohydrate', 10, '#c5b47f', '') ], highlighter_prefix='Gram', background='#fffdf6', horizontal=False, draw_grid_lines=False, width_px=550, height_px=300, margin_top_px=0, margin_bottom_px=0, margin_right_px=0, margin_left_px=0) for resource_name in [ 'jquery.min.js', 'jquery.jqplot.min.js', 'jqplot.canvasAxisLabelRenderer.min.js', 'jqplot.categoryAxisRenderer.min.js', 'jqplot.canvasTextRenderer.min.js', 'jqplot.barRenderer.min.js', 'jqplot.pointLabels.min.js', 'jquery.jqplot.min.css']: resource_dir_path__abspath = path_join(scripts_bar_chart_path, resource_name) resource_dir_path__relpath = path_relpath(resource_dir_path__abspath) if resource_dir_path__abspath[-2:] == 'js': check_line = '<script type="text/javascript" src="{}"></script>'.format(resource_dir_path__relpath) ok_(check_line in js_css_resources_header, msg=None) elif resource_dir_path__abspath[-3:] == 'css': check_line = '<link rel="stylesheet" type="text/css" href="{}">'.format(resource_dir_path__relpath) ok_(check_line in js_css_resources_header, msg=None) else: raise Err('test_jqpc_simple_bar_chart_ok2', [ '`resource_name`: <{}> must end with <.js> or <.css>'.format(resource_name) ]) if path_exists(scripts_bar_chart_path): shutil_rmtree(scripts_bar_chart_path)
def desinstall(self): if hasattr(self, 'filename'): if path_exists(self.filename): remove(self.filename) if hasattr(self, 'basedir'): if path_exists(self.basedir): rmdir(self.basedir)
def test_path_exists(self): """ Test collecting migrations when a previous collection exists """ self.tempdir = tempfile.mkdtemp() custom_dir = os.path.join(self.tempdir, 'migrations') # Test outputting to both the default directory and a custom directory for output_dir in (DEFAULT_DIR, custom_dir): blog_migrations = os.path.join(output_dir, 'blog_0001_project.py') try: # Do a normal collection to fill the directory to start call_command('collectmigrations', output_dir=output_dir, verbosity=0) self.assertTrue(path_exists(output_dir)) self.assertTrue(path_exists(blog_migrations)) # Fully migrate the 'blog' test app call_command('migrate', 'blog', verbosity=0) # Check that the directory looks as we expect, which means it # should have no file for the fully migrated 'blog' test app call_command('collectmigrations', output_dir=output_dir, verbosity=0) self.assertTrue(path_exists(output_dir)) self.assertFalse(path_exists(blog_migrations)) finally: # Clean up immediately to prevent dangling temp dirs if path_exists(output_dir): shutil.rmtree(output_dir) # Revert the migration of the 'blog' test app call_command('migrate', 'blog', 'zero', verbosity=0)
def test_alt_database(self): """ Test collecting migrations with an alternate database selected """ # Migrate test apps all the way forward on the default DB call_command('migrate', 'blog', verbosity=0) call_command('migrate', 'cookbook', verbosity=0) # Unapply all migrations on the other DB call_command('migrate', 'blog', 'zero', database='other', verbosity=0) call_command('migrate', 'cookbook', 'zero', database='other', verbosity=0) # Collect on the default DB to confirm nothing is collected out = six.StringIO() call_command('collectmigrations', stdout=out, verbosity=3) self.assertFalse(path_exists(DEFAULT_DIR)) self.assertNotIn("migrations collected", out.getvalue().lower()) self.assertNotIn("optimizing", out.getvalue().lower()) # Collect on the other DB out = six.StringIO() call_command('collectmigrations', database='other', stdout=out, verbosity=3) self.assertTrue(path_exists(DEFAULT_DIR)) self.assertIn("migrations collected", out.getvalue().lower()) self.assertIn("optimizing", out.getvalue().lower())
def test_all_imports_pyx(): """ Tests: test_all_imports_pyx: for rebuild, syntax correctness and internal imports """ print('::: TEST: test_all_imports_pyx()') remove_files = [] remove_dirs = [] all_modules_path = [] for root, dirnames, filenames in walk(ROOT_PACKAGE_PATH): all_modules_path.extend(glob(root + '/*.pyx')) for pyx_module_file_path in all_modules_path: module_filename = path_basename(pyx_module_file_path) module_filename_no_ext = path_splitext(module_filename)[0] cython_extension_module_path, cython_module_c_file_path, cython_build_dir_path = build_cython_extension( pyx_module_file_path, cython_force_rebuild=True ) so_loader = ExtensionFileLoader(module_filename_no_ext, cython_extension_module_path) so_loader.load_module(module_filename_no_ext) # add for cleanup remove_files.append(cython_module_c_file_path) remove_dirs.append(cython_build_dir_path) # Cleanup try: for file_ in remove_files: if path_exists(file_): os_remove(file_) for dir_ in remove_dirs: if path_exists(dir_): rmtree(dir_) except Exception as err: raise Exception('test_all_imports_pyx', 'Could not cython_clean_up: Exception: <{}>'.format(err))
def init(self, *args, **kwargs): super(RPlugins, self).init(*args, **kwargs) if "rplugins" not in self.config: raise ConfigError("Remote Plugins not configured!") for param in ("path",): if param not in self.config["rplugins"]: raise ConfigError("Remote Plugins not configured! Missing: {0}".format(repr(param))) self.data.init( { "rplugins": { "allowed": {}, "pending": {}, "enabled": {}, } } ) rplugins_path = self.config["rplugins"]["path"] if not path_exists(rplugins_path): mkdir(rplugins_path) rplugins_init_py = path_join(rplugins_path, "__init__.py") if not path_exists(rplugins_init_py): with open(rplugins_init_py, "w") as f: f.write("") if rplugins_path not in module_search_path: module_search_path.append(rplugins_path) Commands().register(self) RPluginsCommands().register(self)
def copy_release(): if path_exists(STATIC_OUTPUT_PATH): rmtree(STATIC_OUTPUT_PATH) for pattern in RELEASE_PAGE_FILES: for f in glob(path_join(STATIC_TEMPLATE_PATH, pattern)): srcfile = normpath(f) dstfile = normpath(path_join(STATIC_OUTPUT_PATH, relpath(f, STATIC_TEMPLATE_PATH))) dst_dir = dirname(dstfile) if dst_dir != "" and not path_exists(dst_dir): makedirs(dst_dir) shutil_copy(srcfile, dstfile) for f in RELEASE_FILES: srcfile = normpath(f) dstfile = normpath(path_join(STATIC_OUTPUT_PATH, f)) dst_dir = dirname(dstfile) if dst_dir != "" and not path_exists(dst_dir): makedirs(dst_dir) shutil_copy(srcfile, dstfile) shutil_copy("benchmark.canvas.js", normpath(path_join(STATIC_OUTPUT_PATH, "benchmark.canvas.js"))) shutil_copytree(normpath('staticmax'), path_join(STATIC_OUTPUT_PATH, 'staticmax')) copy_release_capture(config_name=DEFAULT_CAPTURE_NAME)
def getAdvancedAttrs(self): cache_file = self._getCacheFile() if not path_exists(self._getCacheFolder()): os.mkdir(self._getCacheFolder()) if not path_exists(cache_file): try: print("Fetching cache of imdb data!") urllib.request.urlretrieve('http://www.omdbapi.com/?i=%s&plot=full&r=json&tomatoes=true' % self.imdbID, cache_file) except: pass with open(cache_file) as f: self.attrs = override_join(json.loads(f.read()), self.attrs) if 'PosterFile' not in self.attrs.keys() or not path_exists(self.attrs['PosterFile']): cache_poster_path = self._getCachePosterFile() success = True if not path_exists(cache_poster_path): try: print("Fetching cache of imdb poster!") urllib.request.urlretrieve(self.attrs['Poster'], cache_poster_path) except: success=False if success: self.attrs['PosterFile'] = relpath(cache_poster_path, self.folder) Video.addAttrs(self.attrs.keys())
def run(self): need_normal_clean = True exclude_files = [] remove_files = [] remove_dirs = [] # remove also: DIRS: `build, dist, cover, *._pyxbld, *.egg-info` # and FILES in MAIN_PACKAGE_PATH: `*.so, *.c` and cython annotate html if self.all: need_normal_clean = True for dir_ in {'build', 'dist', 'cover'}: dir_path = path_join(ROOT_PACKAGE_PATH, dir_) if path_exists(dir_path): remove_dirs.append(dir_path) for root, dirs, files in os_walk(ROOT_PACKAGE_PATH): for dir_ in dirs: if '_pyxbld' in dir_ or 'egg-info' in dir_: remove_dirs.append(path_join(root, dir_)) # remove FILES in MAIN_PACKAGE_PATH: `*.so, *.c` and cython annotate html for root, dirs, files in os_walk(MAIN_PACKAGE_PATH): for file_ in files: if file_ not in exclude_files: if path_splitext(file_)[-1] in {'.so', '.c'}: remove_files.append(path_join(root, file_)) tmp_name, tmp_ext = path_splitext(file_) if tmp_ext == '.pyx': # Check if we have a html with the same name check_html_path = path_join(root, tmp_name + '.html') if isfile(check_html_path): remove_files.append(check_html_path) # do the general clean if need_normal_clean: for file_ in {'.coverage', 'MANIFEST'}: if path_exists(file_): remove_files.append(file_) for root, dirs, files in os_walk(ROOT_PACKAGE_PATH): for file_ in files: if file_ not in exclude_files: if path_splitext(file_)[-1] in {'.pyc', '.pyo', '.pyd', '.o', '.orig'}: remove_files.append(path_join(root, file_)) for dir_ in dirs: if '__pycache__' in dir_: remove_dirs.append(path_join(root, dir_)) # REMOVE ALL SELECTED # noinspection PyBroadException try: for file_ in remove_files: if path_exists(file_): os_remove(file_) for dir_ in remove_dirs: if path_exists(dir_): rmtree(dir_) except Exception: pass
def check_args(self): result = True if not path_exists(self.src_path): log.error('source directory "%s" does not exist' % self.src_path) result = False if not path_exists(self.dst_path): log.error('destination directory %s does not exist' % self.dst_path) result = False return result
def test_cli_with_namespace(tmpfolder): # Given the command line with the namespace option, sys.argv = ["pyscaffold", "proj", "--namespace", "ns"] # when pyscaffold runs, run() # then namespace package should exist assert path_exists("proj/src/ns/__init__.py") assert path_exists("proj/src/ns/proj/__init__.py")
def test_create_project_without_no_skeleton(tmpfolder): # Given options without the tox extension, opts = dict(project="proj") # when the project is created, create_project(opts) # then skeleton file should exist assert path_exists("proj/src/proj/skeleton.py") assert path_exists("proj/tests/test_skeleton.py")
def test_cli_without_no_skeleton(tmpfolder): # Given the command line without the tox option, sys.argv = ["pyscaffold", "proj"] # when pyscaffold runs, run() # then skeleton file should exist assert path_exists("proj/src/proj/skeleton.py") assert path_exists("proj/tests/test_skeleton.py")
def test_cli_without_pre_commit(tmpfolder): # Given the command line without the pre-commit option, sys.argv = ["pyscaffold", "proj"] # when pyscaffold runs, run() # then pre-commit files should not exist assert not path_exists("proj/.pre-commit-config.yaml") assert not path_exists("proj/.isort.cfg")
def test_create_project_without_pre_commit(tmpfolder): # Given options without the pre-commit extension, opts = dict(project="proj") # when the project is created, create_project(opts) # then pre-commit files should not exist assert not path_exists("proj/.pre-commit-config.yaml") assert not path_exists("proj/.isort.cfg")
def test_create_project_wit_no_skeleton(tmpfolder): # Given options with the tox extension, opts = dict(project="proj", extensions=[no_skeleton.NoSkeleton('no-skeleton')]) # when the project is created, create_project(opts) # then skeleton file should not exist assert not path_exists("proj/src/proj/skeleton.py") assert not path_exists("proj/tests/test_skeleton.py")
def validate_exists(self, value): if not self.force_check: return if isdir(value): if not path_exists(value): raise ValidationError(self.error_messages['dir_not_exists'].format(filepath=value)) elif isfile(value): if not path_exists(value): raise ValidationError(self.error_messages['file_not_exists'].format(filepath=value)) else: raise ValidationError(self.error_messages['not_exists'].format(filepath=value))
def test_jqpc_write__resource_dict__expect_failure1(): """ Tests: test_jqpc_write__resource_dict__expect_failure1 """ print('::: TEST: test_jqpc_write__resource_dict__expect_failure1()') out_dir_path = path_join(SCRIPT_PATH, 'out_dir_test_jqpc_write__resource_dict__expect_failure1') if path_exists(out_dir_path): shutil_rmtree(out_dir_path) jqpc_write__resource_dict('wrong_name', out_dir_path, force=True) if path_exists(out_dir_path): shutil_rmtree(out_dir_path)
def main_create(args: Namespace) -> None: 'create command entry point.' original = args.original modified = args.modified patch = args.patch if not is_file(original): exit("pat create error: 'original' is not an existing file: " + original) if path_exists(modified): exit("pat create error: 'modified' file already exists: " + modified) if path_exists(patch): exit("pat create error: 'patch' file already exists: " + patch) with open(patch, 'w') as f: f.write('pat v' + pat_version + '\n') f.write(original + '\n') copyfile(original, modified)
def test_initial_collect(self): """ Test collecting migrations when test apps have none applied """ self.tempdir = tempfile.mkdtemp() custom_dir = os.path.join(self.tempdir, 'migrations') # Test outputting to both the default directory and a custom directory for output_dir in (DEFAULT_DIR, custom_dir): out = six.StringIO() try: if output_dir == custom_dir: call_command('collectmigrations', stdout=out, output_dir=output_dir, verbosity=3) else: call_command('collectmigrations', stdout=out, verbosity=3) self.assertTrue(path_exists(output_dir)) blog_migrations, cookbook_migrations = self.load_migrations( dir=output_dir) finally: # Clean up immediately to prevent dangling temp dirs if path_exists(output_dir): shutil.rmtree(output_dir) # Check that the human visible output looks as expected self.assertIn("migrations collected", out.getvalue().lower()) self.assertIn("optimizing", out.getvalue().lower()) self.assertIn("optimized from", out.getvalue().lower()) # Check that migrations have the correct number of operations self.assertEqual(len(blog_migrations[0].Migration.operations), BLOG_FULL_MIGRATION_OPERATION_COUNT[0]) self.assertEqual(len(cookbook_migrations[0].Migration.operations), COOKBOOK_FULL_MIGRATION_OPERATION_COUNT[0]) self.assertEqual(len(cookbook_migrations[1].Migration.operations), COOKBOOK_FULL_MIGRATION_OPERATION_COUNT[1]) # Check the migration dependencies self.assertEqual( sorted(blog_migrations[0].Migration.dependencies), [('auth', '__first__'), ('cookbook', '__first__')] ) self.assertEqual(cookbook_migrations[0].Migration.dependencies, []) self.assertEqual( sorted(cookbook_migrations[1].Migration.dependencies), [('blog', '0001_project'), ('cookbook', '0001_project')] ) # Check the migration replaces count self.assertEqual(len(blog_migrations[0].Migration.replaces), 3) self.assertEqual(len(cookbook_migrations[0].Migration.replaces), 1) self.assertEqual(len(cookbook_migrations[1].Migration.replaces), 5)
def test_single_app_migration(self): """ Test collecting migrations when a single app needs a migration """ # Migrate 'blog' all the way so only 'cookbook' is unmigrated call_command('migrate', 'blog', verbosity=0) call_command('collectmigrations', verbosity=0) blog_migrations = os.path.join(DEFAULT_DIR, 'blog_0001_project.py') cookbook_migrations = os.path.join( DEFAULT_DIR, 'cookbook_0001_project.py') self.assertTrue(path_exists(DEFAULT_DIR)) self.assertTrue(path_exists(cookbook_migrations)) self.assertFalse(path_exists(blog_migrations))
def test_jqpc_write__selected_resources__expect_failure1(): """ Tests: test_jqpc_write__selected_resources__expect_failure1 """ print('::: TEST: test_jqpc_write__selected_resources__expect_failure1()') out_dir_path = path_join(SCRIPT_PATH, 'out_dir_test_jqpc_write__selected_resources__expect_failure1') if path_exists(out_dir_path): shutil_rmtree(out_dir_path) list_of_resource_names = ['excanvas.min.js', 'jqplot.highlighter.min.js', 'wrong_resource_name'] jqpc_write__selected_resources('jqplot_scripts', list_of_resource_names, out_dir_path, force=False) if path_exists(out_dir_path): shutil_rmtree(out_dir_path)
def test_pretend_create_project_with_django(tmpfolder, caplog): # Given options with the django extension, opts = parse_args([PROJ_NAME, '--pretend', '--django']) # when the project is created, create_project(opts) # then files should exist assert not path_exists(PROJ_NAME) for path in DJANGO_FILES: assert not path_exists(path) # but activities should be logged assert re.search(r'run\s+django', caplog.text)
def test_create_project_with_namespace(tmpfolder): # Given options with the namespace extension, opts = dict(project="my-proj", namespace="ns.ns2", extensions=[namespace.Namespace('namespace')]) # when the project is created, create_project(opts) # then nested structure should exist assert path_exists("my-proj/src/ns/__init__.py") assert path_exists("my-proj/src/ns/ns2/__init__.py") assert path_exists("my-proj/src/ns/ns2/my_proj/__init__.py") # and plain structure should not exist assert not path_exists("my-proj/src/my_proj/__init__.py")
def test_jqpc_write__resource_dict_ok1(): """ Tests: test_jqpc_write__resource_dict_ok1 """ print('::: TEST: test_jqpc_write__resource_dict_ok1()') out_dir_path = path_join(SCRIPT_PATH, 'out_dir_path_test_jqpc_write__resource_dict_ok1') if path_exists(out_dir_path): shutil_rmtree(out_dir_path) jqpc_write__resource_dict('jqplot_scripts', out_dir_path, force=False) jqpc_write__resource_dict('jqplot_scripts', out_dir_path, force=True) jqpc_write__resource_dict('jqplot_scripts', out_dir_path, force=False) if path_exists(out_dir_path): shutil_rmtree(out_dir_path)
def install(self): """Install product""" out = StringIO() Layers =[] mtool = getToolByName(self, 'portal_migration') plone_version = mtool.getFileSystemVersion() product_path = package_home(qSEO_globals) versioned_skin = path_join(product_path, 'skins','qSEOptimizer', plone_version) #add exposeDCMetaTags property to Plone 2.0.x props = getToolByName(self, 'portal_properties').site_properties if not hasattr(props, 'exposeDCMetaTags'): props._setProperty('exposeDCMetaTags', True, 'boolean') Layers.append('qSEOptimizer') out.write(' Searching for %s... ' % versioned_skin) if path_exists(versioned_skin): out.write('found.\n') Layers.append('qSEOptimizer/%s' % plone_version) else: out.write("""not found.\nLimited functionality mode. Upgrade qSEOptimizer product or report to [email protected] if uprade not available.\n\n""") out.write('Call setupSkin... \n') setupSkin(self, out, Layers) out.write('Call setupActions... \n') setupActions(self, out) configTool = getToolByName(self, 'portal_controlpanel', None) if configTool: for conf in configlets: configTool.registerConfiglet(**conf) out.write('Added configlet %s\n' % conf['id']) return out.getvalue()
def standard_output_version(version, dependencies, output_file=None): main_module_name = path_basename(sys.argv[0]) version_string = None if dependencies: deps = { } def get_dependencies_set(this_module_name, deps_list): for module_name in deps_list: if module_name not in deps: m = None try: m = __import__(module_name, globals(), locals(), ['__version__', '__dependencies__']) except ImportError: print "Failed to import %s, listed in dependencies " \ "for %s" % (module_name, this_module_name) exit(1) else: # Test is the module actually has a version attribute try: version_ = m.__version__ except AttributeError as e: print 'No __version__ attribute for tool %s' \ % m.__name__ print ' >> %s' % str(e) else: deps[module_name] = m if m is not None: try: get_dependencies_set(module_name, m.__dependencies__) except AttributeError: pass get_dependencies_set(main_module_name, dependencies) module_names = deps.keys() module_names.sort() module_list = ', '.join(['%s %s' % (deps[m].__name__, deps[m].__version__) for m in module_names]) version_string = '%s %s (%s)' % (main_module_name, version, module_list) else: version_string = '%s %s' % (main_module_name, version) # If we are given an output file, write the versions info there if # either: # the file doesn't exist already, or # the file contains different data # If we are given no output file, just write to stdout. print version_string if output_file is not None: if path_exists(output_file): with open(output_file, "rb") as f: old_version = f.read() if old_version == version_string: return with open(output_file, "wb") as f: f.write(version_string)
def path_checksum(paths): """ Recursively calculates a checksum representing the contents of all files found with a sequence of file and/or directory paths. http://code.activestate.com/recipes/576973-getting-the-sha-1-or-md5-hash-of-a-directory/ """ if not hasattr(paths, '__iter__'): self.failed_builds_counter += 1 raise TypeError('sequence or iterable expected not %r!' % type(paths)) def _update_checksum(checksum, dirname, filenames): """ Update the checksum for a file """ for filename in sorted(filenames): path = path_join(dirname, filename) if isfile(path): #print path file_handler = open(path, 'rb') while 1: buf = file_handler.read(4096) if not buf: break checksum.update(buf) file_handler.close() chksum = hashlib.sha1() for path in sorted([normpath(f) for f in paths]): if path_exists(path): if isdir(path): walk(path, _update_checksum, chksum) elif isfile(path): _update_checksum(chksum, dirname(path), basename(path)) return chksum.hexdigest()
def git_versions_from_vcs(tag_prefix_, root, verbose=False): # this runs 'git' from the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* # expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out # source tree. if not path_exists(path_join(root, '.git')): if verbose: print('no .git in {}'.format(root)) return {} # noinspection PyPep8Naming GITS = ['git'] stdout = run_command(GITS, ['describe', '--tags', '--dirty', '--always'], cwd=root) if stdout is None: return {} if not stdout.startswith(tag_prefix_): if verbose: print('tag <{}> does not start with prefix <{}>'.format(stdout, tag_prefix_)) return {} tag = stdout[len(tag_prefix_):] stdout = run_command(GITS, ['rev-parse', 'HEAD'], cwd=root) if stdout is None: return {} full = stdout.strip() if tag.endswith('-dirty'): full += '-dirty' return {'version': tag, 'full': full}
def load(self): path = self.get_path() if not path_exists(path): raise NotFound('No data share with id "%s"' % self.datashare_id) try: with open(path, 'r') as f: yaml_data = yaml.load(f) self.owner = yaml_data['owner'] self.created = yaml_data['created'] self.users = yaml_data['users'] self.store = yaml_data['store'] self.joinable = yaml_data['joinable'] except (IOError, KeyError, yaml.YAMLError) as e: LOG.error('Failed loading datashare file "%s": %s', self.path, str(e)) raise
def __init__(self, path, assets_paths, old_hash=None): self.path = path for p in assets_paths: self.asset_path = path_join(p, path) if path_exists(self.asset_path): break else: raise IOError('Source asset path not found for %s' % self.path) if old_hash: self.hash = old_hash self.hash_checked = False self.changed = None else: self.hash = self.calculate_hash() self.hash_checked = True self.changed = True self.built = False
def file_spinlock(file_name, timeout, step=0.01): """ Run a spinlock, which will last until file with name `file_name` is created. Maximal waiting time is determined by `timeout` argument (in seconds). File existence will be checked every `step` seconds. :return True if spinlock was unlocked; False if timeout occurred """ elapsed = 0.0 while elapsed < timeout: if path_exists(file_name): return True elapsed += step sleep(step) return False
class Settings: """ Globally accessible settings throughout whole project """ # locations log_location = localize_path("logs") database_location = localize_path("db", "instapy.db") specific_chromedriver = "chromedriver_{}".format(OS_ENV) chromedriver_location = localize_path("assets", specific_chromedriver) if (not chromedriver_location or not path_exists(chromedriver_location)): chromedriver_location = localize_path("assets", "chromedriver") # minimum supported version of chromedriver chromedriver_min_version = 2.36 # set a logger cache outside the InstaPy object to avoid # re-instantiation issues loggers = {} logger = None # set current profile credentials for DB operations profile = {"id": None, "name": None} # hold live Quota Supervisor configuration for global usage QS_config = {} # specify either connected locally or through a proxy connection_type = None # store user-defined delay time to sleep after doing actions action_delays = {} # store configuration of text analytics meaningcloud_config = {} yandex_config = {} # store the parameter for global access show_logs = None # store what browser the user is using, if they are using firefox it is # true, chrome if false. use_firefox = None # state of instantiation of InstaPy InstaPy_is_running = False
def locateMO(project, command, mo_filename): """ Locate full path of a MO file used by a command using strace program. """ command = ' '.join(command) log = '/tmp/strace' if path_exists(log): unlink(log) system(project, "%s -e open -o %s %s >/dev/null 2>&1" % (STRACE, log, command)) regex = re.compile('open\("([^"]+%s)", [^)]+\) = [0-9]+' % mo_filename) for line in open(log): match = regex.match(line.rstrip()) if match: return match.group(1) print >>stderr, "Unable to locate MO file (%s) used by command %r" \ % (mo_filename, command) exit(1)
def test_clean_created_dirs(): """ Tests: test_clean_created_dirs: this is just a helper to clean the dirs created in the tests """ print('::: TEST: test_clean_created_dirs()') for dir_name in [ 'scripts_bar_chart_test_jqpc_simple_bar_chart_ok1', 'scripts_bar_chart_test_jqpc_simple_bar_chart_ok2', 'scripts_bar_chart_test_jqpc_simple_bar_chart_ok3', 'scripts_bar_chart_test_jqpc_simple_bar_chart_ok4', 'scripts_bar_chart_test_jqpc_simple_bar_chart_ok5', 'scripts_bar_chart_test_jqpc_simple_bar_chart_ok6', 'scripts_bar_chart_test_jqpc_simple_bar_chart__expect_failure1', 'scripts_pie_chart_test_jqpc_simple_bar_chart__not_absolute_source_path_expect_failure', ]: clean_path = path_join(SCRIPT_PATH, dir_name) if path_exists(clean_path): shutil_rmtree(clean_path)
def save_data(inputdata={},file = 'data.txt'): # concatenate data to data already in data.mat file if path_exists(file): #data = loadmat(mat_file,struct_as_record=True) data = txt_to_dict(file = file) else: data = {} if not(inputdata=={}): try: data.update(inputdata) except: raise CustomError('trying to update',data,'with',inputdata) try: dict_to_txt(data,file = file) #print "DEBUG, saved",data,'to',mat_file except: raise CustomError('trying to write',data,'to',file) return data
def __init__(self, url, user=None, pwd=None, trust_me=False): self.url = self._fix_url(url) # initialise some stuff self.api_url = self.url + '/api.php' self.logged_in = [] self.active = None self.folder = expanduser('~') + path_sep + '.mwbot' + path_sep if not path_exists(self.folder): makedirs(self.folder) # check wiki exists if need to if not trust_me: if self.api('query') != []: raise ValueError('can\'t access wiki API at \'{0}\''.format( self.api_url)) # log in if asked if user is not None: if self.login(user, pwd): self.active = self.logged_in[0]
def build_asset(asset_info, source_list, tools, build_path, verbose): src = asset_info.path asset_tool = tools.get_asset_tool(src) dst_path = path_join(build_path, tools.get_asset_destination(src)) asset_info.build_path = dst_path source = source_list.get_source(src) deps = [source_list.get_source(path) for path in asset_info.deps] if any([dep.has_changed() for dep in deps]) or asset_tool.has_changed() or not path_exists(dst_path) \ or asset_tool.check_external_deps(source.asset_path, dst_path, asset_info.args): stdout.write('[%s] %s\n' % (asset_tool.name.upper(), src)) asset_tool.run(source.asset_path, dst_path, verbose, asset_info.args) source.built = True return True else: source.built = True return False
def get_scss_paths(): """ Return a set of SCSS import paths from all apps that provide `website.scss`. If `$BENCH_PATH/apps/frappe/frappe/public/scss/website.scss` exists, the returned set will contain 'frappe/public/scss/website'. """ import_path_list = [] bench_path = frappe.utils.get_bench_path() for app in frappe.get_installed_apps(): relative_path = join_path(app, 'public/scss/website.scss') full_path = get_path('apps', app, relative_path, base=bench_path) if path_exists(full_path): import_path = splitext(relative_path)[0] import_path_list.append(import_path) return import_path_list
def __init__(self, root_directory, directories, settings): self.picture = 0 self.root_directory = root_directory self.directories = directories self.settings = settings for directory in self.directories: if not path_exists( join_path(self.root_directory, self.directories[directory])): makedirs( join_path(self.root_directory, self.directories[directory])) self.pictures = find_pictures(root_directory, settings['extentions'], level=settings['level']) self.initialise_tk()
def __init__(self, game): self.leaderboards = {} self.ordered_leaderboards = [] self.leaderboard_path = None self.issues = [] yaml_path = unicode(get_absolute_path(join_path(game.path, 'leaderboards.yaml'))) total_yaml_errors = 0 if path_exists(yaml_path): try: f = open(yaml_path, 'r') try: file_meta = yaml.load(f) for (i, m) in enumerate(file_meta): key = m['key'] leaderboard = Leaderboard(game, key, m, i) num_errors = len(leaderboard.errors) if num_errors > 0: total_yaml_errors += num_errors self.issues.append((key, { 'errors': leaderboard.errors, 'warnings': leaderboard.warnings })) elif len(leaderboard.warnings) > 0: self.issues.append((key, { 'errors': leaderboard.errors, 'warnings': leaderboard.warnings })) self.leaderboards[key] = leaderboard self.ordered_leaderboards.append(leaderboard) finally: f.close() except (IOError, yaml.YAMLError) as e: LOG.error('Failed loading leaderboards: %s', str(e)) raise LeaderboardError('Failed loading leaderboards.yaml file: %s' % str(e)) else: raise LeaderboardsUnsupported() if total_yaml_errors > 0: raise ValidationException(self.issues)
def get_log_files_local(options, files_list, enc_key): verbose = options.verbose silent = options.silent overwrite = options.overwrite output_dir = options.outputdir filename_prefix = options.project + '-' try: for filename in files_list: if filename.startswith('http'): error('Unexpected file to retrieve') exit(-1) # Format v1: 'eventlogspath/gamefolder/events-yyyy-mm-dd.json.gz' # Format v2: 'eventlogspath/gamefolder/events-yyyy-mm-dd.bin' # Convert to 'gameslug-events-yyyy-mm-dd.json' filename_patched = filename_prefix + filename.rsplit('/', 1)[-1].split('.', 1)[0] + '.json' output_path = normpath(path_join(output_dir, filename_patched)) if not overwrite and path_exists(output_path): if not silent: warning('Skipping existing file: %s' % output_path) continue if verbose: log('Retrieving file: %s' % filename_patched) if filename.endswith('.bin'): with open(filename, 'rb') as fin: file_content = fin.read() file_content = decrypt_data(file_content, enc_key) file_content = zlib_decompress(file_content) else: # if filename.endswith('.json.gz'): gzip_file = GzipFile(filename=filename, mode='rb') file_content = gzip_file.read() gzip_file.close() file_content = decrypt_data(file_content, enc_key) write_to_file(options, file_content, filename=filename_patched, output_path=output_path) except (IOError, OSError) as e: error(e) exit(-1)
def setupProject(project): for filename in FILENAMES: if path_exists(filename): continue raise ValueError("File doesn't exist: %s! Fix FILENAMES constant" % filename) module_name = project.application().getInputFilename( 'Module name (use "ALL" to test all modules)') project.error("Use python interpreter: %s" % PYTHON) project.error("Use filenames: %s" % ', '.join(FILENAMES)) source = PythonSource(project, module_name) process = PythonProcess(project, [PYTHON, '-u', '<source.py>'], timeout=10.0, stdin='null') WatchProcess(process, exitcode_score=0) stdout = WatchStdout(process) stdout.max_nb_line = (1000, 1.0) # Disable dummy error messages stdout.words = { 'oops': 0.30, 'bug': 0.30, 'memory': 0.40, 'overflow': 0.40, 'fatal': 1.0, 'assert': 1.0, 'assertion': 1.0, 'critical': 1.0, 'panic': 1.0, 'glibc detected': 1.0, 'segfault': 1.0, 'segmentation fault': 1.0, } # PyPy messages stdout.addRegex("Fatal RPython error", 1.0) if DEBUG: stdout.show_matching = True stdout.show_not_matching = True
def prepare(files, output): """ convert an excel file to the oocytes format """ from vartools.oo_prepare import convert_all abs_files = [] for f in files: abs_f = abspath(f) if not path_exists(abs_f): print('Invalid path name supplied') raise SystemExit if not isfile(abs_f): continue abs_files.append(abs_f) if len(abs_files) == 0: print('Invalid path name supplied') install_dir = dirname(__file__) header_abs = path_join(install_dir, 'blank.oo') convert_all(abs_files, header=header_abs, outdir=output)
def load(self): if path_exists(self.conf.stats_path): try: with open(self.conf.stats_path) as f: d: dict = json.load(f) except JSONDecodeError: print("Can't load stats") return self.stats_created.set(d.get('stats_created', 0)) self.program_started.set(d.get('program_started', 0)) self.song_played.set(d.get('song_played', 0)) self.song_skipped.set(d.get('song_skipped', 0)) self.paused.set(d.get('paused', 0)) self.song_replayed.set(d.get('song_replayed', 0)) self.song_selected.set(d.get('song_selected', 0)) self.playlist_completed.set(d.get('playlist_completed', 0)) self.total_time.set(d.get('total_time', 0)) self.h_volume_max.set(d.get('h_volume_max', 0)) self.h_playlist_count.set(d.get('h_playlist_count', 0))
def get(self, url, file_name=None): if (file_name and not self.overwrite and path_exists(self.directory + file_name)): print(file_name + " exists - skipping") return self.browser.open(url) if self.browser.response.status_code != req_codes.ok: raise DagrException("incorrect status code - " + str(self.browser.response.status_code)) if file_name is None: return str(self.browser.parsed) else: # Open our local file for writing local_file = open(self.directory + file_name, "wb") # Write to our local file local_file.write(self.browser.response.content) local_file.close()
def get(self, url, file_name=None): if (file_name and not self.overwrite and path_exists(file_name)): print(file_name + " exists - skipping") return None get_resp = self.browser.open(url) if get_resp.status_code != req_codes.ok: raise DagrException("incorrect status code - " + str(get_resp.status_code)) if file_name is None: return get_resp.text # Open our local file for writing local_file = open(file_name, "wb") # Write to our local file local_file.write(get_resp.content) local_file.close() return file_name
def pack(target, sources, no_compress, verbose): from six import StringIO outtype, outtxt = target.split(".")[-1], '' jsm = JavascriptMinify() for f in sources: suffix = None if ':' in f: f, suffix = f.split(':') if not path_exists(f) or isdir(f): print("did not find " + f) continue timestamps[f] = os.path.getmtime(f) try: with open(f, 'r') as sourcefile: data = text_type(sourcefile.read(), 'utf-8', errors='ignore') extn = f.rsplit(".", 1)[1] if outtype=="js" and extn=="js" and (not no_compress) and suffix!="concat" and (".min." not in f): tmpin, tmpout = StringIO(data.encode('utf-8')), StringIO() jsm.minify(tmpin, tmpout) minified = tmpout.getvalue() if minified: outtxt += text_type(minified or '', 'utf-8').strip('\n') + ';' if verbose: print("{0}: {1}k".format(f, int(len(minified) / 1024))) elif outtype=="js" and extn=="html": # add to frappe.templates outtxt += html_to_js_template(f, data) else: outtxt += ('\n/*\n *\t%s\n */' % f) outtxt += '\n' + data + '\n' except Exception: print("--Error in:" + f + "--") print(frappe.get_traceback()) with open(target, 'w') as f: f.write(outtxt.encode("utf-8")) print("Wrote %s - %sk" % (target, str(int(os.path.getsize(target)/1024))))
def _empty_leaderboard(self): self.scores = [] self.user_scores = {} self.aggregate_score = 0 self._set_path() unicode_path = unicode(self.path) if not path_exists(unicode_path): return with self.lock: try: f = open(unicode_path, 'wt') f.close() except IOError as e: LOG.error('Failed emptying leaderboard file "%s": %s' % (self.path, str(e))) raise LeaderboardError('Failed emptying leaderboard file %s' % self.path)
def combine_prog(): print('*' * 10, 'Combine program', '*' * 10) current_filename = input("Enter the filepath of current master list to " "add entries to:\n>>> ") [current_list_type, df_current] = lr.list_read(current_filename) input_filename = input( "\nEnter the filepath of new import list or press " "Enter to use default\n (whatever lone file is in the 'input_files' " "directory):\n>>> ") if input_filename == "": avail_input_files = listdir('./input_data/') if len(avail_input_files) == 1: [input_list_type, df_input] = lr.list_read('./input_data/' + avail_input_files[0]) else: raise InputDirectoryError( "There must be exactly one file in the 'input_data' dir.") elif path_exists(input_filename): [input_list_type, df_input] = lr.list_read(input_filename) else: raise InputDirectoryError("Invalid response. Start over.") if not input_list_type == current_list_type: raise ListTypeError("Input list type and master don't match.") df_out = lc.list_combine(df_current, df_input) while True: confirm = input("Review pending changes. Press Enter to output " "a new master file or Q to quit:\n>>> ") if confirm == "": new_file = lw.list_write(df_out, current_list_type + "_master") print( "New master list (in SOURCE format) %s written to output_data " "directory. Consider copying to master directory." % new_file) break elif confirm == "q": quit() else: print("Invalid response. Try again.\n")
def update_breakpoints(self, target, hard_update=False): # pylint: disable=too-many-branches """ Decorates buffer with signs corresponding to breakpoints in target. """ self.bp_list = {} if target is None or not target.IsValid(): for (key, sign) in self.bp_signs.items(): if not sign.hidden: sign.hide() return needed_bps = set() source_map = llu.settings_target_source_map( self.ctrl.get_command_result) for bp in target.breakpoint_iter(): bplocs = llu.get_bploc_tuples(bp, source_map) for (filepath, line) in bplocs: if filepath and path_exists(filepath): bufnr = self.vimx.buffer_add(filepath) key = (bufnr, line) needed_bps.add(key) if key in self.bp_list: self.bp_list[key].append(bp) else: self.bp_list[key] = [bp] # Hide all (outdated) breakpoint signs new_bps = needed_bps bp_signs = self.bp_signs.copy() for (key, sign) in bp_signs.items(): if hard_update or key not in new_bps: sign.hide() del self.bp_signs[key] else: if bp_signs[key].hidden: bp_signs[key].show() new_bps.discard(key) # Show all (new) breakpoint signs for (bufnr, line) in new_bps: self.bp_signs[(bufnr, line)] = BPSign(self.vimx, bufnr, line, (bufnr, line) in self.pc_signs)
def organize(args, size): """Organizes the target directory to match the file structure of the source directory Arguments: args {Namespace} -- args.source {str} and args.target {str} are the source and target directories to coppy the file structure from and to, respectively size {bool} -- If true -> id files by their size. Else -> id files by their md5 hash """ source_files = {} for folder in [args.source, args.target]: for subdir, dirs, files in walk(folder): for f in files: # Set id to either the size of the file or the md5 hash if size: id = getsize(join(subdir, f)) else: id = hash(join(subdir, f)) if folder == args.source: # Add files from source to an array source_files[id] = ({ 'path': join(subdir[len(args.source):], f) }) else: # Check if hash exists in source_files. If yes -> move and rename to match source if id in source_files: current_path = join(subdir, f) new_path = join(args.target, source_files[id]['path']) print(current_path + ' --> ' + new_path) if current_path != new_path: # Check if file already exists in new_path. If yes -> rename it if isfile(new_path): move(new_path, new_path + '.old') # Make sure the path exists. If not -> create it new_dir = dirname(new_path) if not path_exists(new_dir): makedirs(new_dir) # Move the file move(current_path, new_path) # If the old directory is now empty -> delete it if len(listdir(subdir)) == 0: rmdir(subdir)
def sub_devs(self): if self.uuid == "" or self.uuid is None: # try to get the info from the config so pr co-resource can reserv # during provision try: self.devs = self.oget("devs") devs = self.devs if devs is None: return set() return set([os.path.realpath(dev) for dev in devs]) except ex.OptNotFound: return set() try: devpath = self.md_devpath() except ex.Error as e: return self.sub_devs_inactive() if path_exists(devpath): return self.sub_devs_active() else: return self.sub_devs_inactive()
def _compact_directory(path): # Search for folders and recurse. for p in [ f for f in os_listdir(path) if path_isdir(path_join(path, f)) ]: _compact_directory(_join(path, p)) # Search the development path for all src files. for dev_filename in iglob(_join(path, '*.%s' % src_type)): dev_filename = _posixpath(dev_filename) current_hash = hash_for_file(dev_filename) # Build a suitable output filename - hash.ext rel_filename = _join(rel_path, src_type, '%s.%s' % (current_hash, src_type)) if not path_exists(rel_filename): compactor_fn(dev_filename, rel_filename) # Update the list of compact files, so it can be reused when generating script tags. new_versions[ dev_filename[len(dev_path):]] = rel_filename[len(rel_path):]
def download(self): try: if not path_exists(get_dirname(self.di.destination)): makedirs(get_dirname(self.di.destination)) self.fileptr = open(self.di.destination, "wb") self.ftp = FTP() self.ftp.connect(self.di.host, self.di.port) self.ftp.login() self.running = True self.ftp.retrbinary("RETR " + self.di.source, self.callback) except Exception as e: print("download:", self.di.filename, e) self.di.guisignal.raiseError() else: self.di.guisignal.complete.emit() finally: print(self.di.filename, "completed by", self) self.di.worker = None self.cleanup() self.sharedSem.release()
def oo_upload(files, force): """ uploads prepared oocyte .csv files to the linked database """ from vartools.database import oocytes_upload_all abs_files = [] for f in files: abs_f = abspath(f) if not path_exists(abs_f): print('Invalid path name supplied') raise SystemExit if not isfile(abs_f): continue abs_files.append(abs_f) if len(abs_files) == 0: print('Invalid path name supplied') oocytes_upload_all(abs_files, force) return None
def _read_leaderboard(self): self._set_path() with self.lock: self.user_scores = {} self.scores = [] self.aggregate_score = 0 unicode_path = unicode(self.path) if path_exists(unicode_path): try: try: f = open(unicode_path, 'rt') file_leaderboard = yaml.load(f) if file_leaderboard: for s in file_leaderboard: self._add_score( UserScore(s['user'], s['score'], s['time'])) finally: f.close() except (IOError, KeyError, yaml.YAMLError) as e: LOG.error('Failed loading leaderboards file "%s": %s' % (self.path, str(e))) raise LeaderboardError( 'Failed loading leaderboard file "%s": %s' % (self.path, str(e))) else: self.user_scores = {} self.scores = [] for s in self.default_scores: username = s.user if username not in self.user_scores: # copy the score so that if the scores are reset then # the default is left unchanged self._add_score(s.copy()) self._sort_scores()
def save_files(): # erikpyado save # check plugged memories, if name available in SD_SRC_NAME # erikpyado videos # 2019/ # month (1-12) # year(19)month(01-12)day(01-31)_micname_duration(mmm-ss)_ # 191131-2355_H6LR_20-02_JP-TOKYO[optional(place,mood)].WAV # get_external_storage help_error = '''erikpyado save [src_dir|all] jp tag [src_name]\n\t all - saves configured sd card names\n\t src_dir - requires src_name''' if len(sys.argv) != 6 and len(sys.argv) != 5: logger.error(help_error) return src_dir = sys.argv[2] country_code = sys.argv[3] city_name = sys.argv[4] if src_dir == 'all': # look for sds for sd in SD_SRC_NAME: src_path = join_path(SD_ROOT_DIR, sd) if path_exists(src_path): logger.info('{} found. Saving...'.format(src_path)) save_tag_files(src_path, SD_SRC_NAME[sd], country_code, city_name) elif src_dir == 'hijack': src_name = 'hijack' src_dir = AUDIO_HIJACK_DIR save_tag_files(src_dir, src_name, country_code, city_name) else: if len(sys.argv) != 6: logger.error(help_error) return src_name = sys.argv[5] save_tag_files(src_dir, src_name, country_code, city_name)
def mount(self, dev, mount_path): mounts = self.get_mounts() if dev['devpath'] not in mounts: if not isdir(mount_path): if path_exists(mount_path): raise Error('A non-directory filesystem entry with pathname' ' \'%s\' already exists' % mount_path) try: makedirs(mount_path) except OSError as err: raise Error('Could not create mount point directory' \ ' \'%s\': %s' % (mount_path, err)) try: subprocess.check_call(['mount', '-t', dev['fstype'], '-r', dev['devpath'], mount_path]) except subprocess.CalledProcessError as err: raise Error('Could not mount filesytem on \'%s\'' \ ' to mount point at \'%s\'' \ ' (returncode: %s, output: %s)' % ( dev['devpath'], mount_path, err.returncode, err.output))