def request_cover(self, end_callback=None): thumb = os.path.join(get_cover_path(), "%s - %s.jpg" % (normalize_path(self.artist), normalize_path(self.title))) if os.path.exists(thumb): self.thumb = self.cover = thumb if end_callback: end_callback(self.cover) return if not self.remote_thumbnail: return def refresh(remote_url, local_path): try: urllib.urlretrieve(remote_url, local_path) except: pass if os.path.exists(local_path): return local_path else: return None def refresh_finished(exception, retval): self.thumb = self.cover = retval if end_callback: end_callback(retval) log.warning("requesting thumb %s" % self.remote_thumbnail) ThreadedFunction(refresh_finished, refresh, self.remote_thumbnail, thumb).start()
def apply(self): patch_list = os.listdir(self.patches_folder) with open(self.normalize_paths, 'r') as data: normalize_dict = load(data) for patch_file in os.listdir(self.patches_folder): patch_path = os.path.join(self.patches_folder, patch_file) normalize_path(patch_path, normalize_dict) repo_path = os.path.join(self.builds_path, patch_file) logger.info('Cloning into %s' % repo_path) repo = GitWrapper(repo_path, self.project) try: #TODO: Add internal path as parameter work_path = os.path.join(repo_path,'hv-rhel7.x/hv') logger.info('Appling patch on %s' % work_path) apply_patch(work_path, patch_path) logger.info('Successfully applied patch') except RuntimeError as exc: logger.error('Unable to apply patch %s on %s' % (patch_file, repo_path)) with open('%s/%s.log' % (self.failures_path, patch_file), 'w') as log_file: log_file.write(exc[1]) log_file.write(exc[2]) move(repo_path, '%s/%s' % (self.failures_path, '%s-build' % patch_file)) logger.error('Logs cand be found at %s' % (self.failures_path+patch_file+'.log'))
def apply(self): patch_list = os.listdir(self.patches_folder) with open(self.normalize_paths, 'r') as data: normalize_dict = load(data) for patch_file in os.listdir(self.patches_folder): patch_path = os.path.join(self.patches_folder, patch_file) normalize_path(patch_path, normalize_dict) repo_path = os.path.join(self.builds_path, patch_file) logger.info('Cloning into %s' % repo_path) repo = GitWrapper(repo_path, self.project) try: #TODO: Add internal path as parameter work_path = os.path.join(repo_path, 'hv-rhel7.x/hv') logger.info('Appling patch on %s' % work_path) apply_patch(work_path, patch_path) logger.info('Successfully applied patch') except RuntimeError as exc: logger.error('Unable to apply patch %s on %s' % (patch_file, repo_path)) with open('%s/%s.log' % (self.failures_path, patch_file), 'w') as log_file: log_file.write(exc[1]) log_file.write(exc[2]) move(repo_path, '%s/%s' % (self.failures_path, '%s-build' % patch_file)) logger.error('Logs cand be found at %s' % (self.failures_path + patch_file + '.log'))
def main(): """ Main program """ rel_path_file = '../../test_data/relative_path/data.txt' rel_path_file_bad = '../../test_data/relative_path/bad/data.txt' # abs_path assumes mac OSX abs_path = '/tmp' abs_path_bad = '/junk' # use os version just till we figure out how pathlib works assert os.path.exists( rel_path_file), f"file does not exist: {rel_path_file}" assert os.path.exists( rel_path_file_bad) is False, f"file does exist: {rel_path_file_bad}" # What do we want to know # is a path or file relative or absolute # does it exist # start with relative paths and move to absolute p = Path(rel_path_file) assert p.exists(), f"file does not exist: {rel_path_file}" # because this is a file not a dir assert p.is_dir() is False, f"dir does not exist: {rel_path_file}" # get absolute path p_abs = p.resolve() assert p.exists(), f"absolute file does not exist: {rel_path_file}" print(f"absolute path for {rel_path_file}: {p_abs}") # RELATIVE PATHS # what happens with p.resolve() with path that does not exist p_bad = Path(rel_path_file_bad) # this does notg throw and error p_bad_abs = p_bad.resolve() print(f"absolute path for {rel_path_file_bad}: {p_bad_abs}") assert p_bad_abs.exists( ) is False, f"{rel_path_file_bad} does exist, it should not" # ABSOLUTE PATHS print("results of normalize_path() with good absolute path") p_exists, p_abs_ = normalize_path(abs_path) assert p_exists, f"path {abs_path} should exist" print(p_exists, p_abs_) print("results of normalize_path() with bad absolute path") p_exists, p_abs_ = normalize_path(abs_path_bad) assert p_exists is False, f"path {abs_path_bad} should not exist" print(p_exists, p_abs_) # can we read a file using pathlib.PosixPath as the file str? print( f"contents of {rel_path_file}\n {get_file_pathlib_contents(rel_path_file)}" ) assert get_file_pathlib_contents( rel_path_file ) == '# this is content in a relative path', "contents of file not correct"
def __init__(self): # Just make sure the modules/ directory actually exists with utils.ok_directory(): utils.ping_directory('modules') self._cache_file = utils.normalize_path('modules/.cache.json') self._registry_file = utils.normalize_path('modules/.registry.json') self._load_cache() self._load_registry() self._loaded_modules = {} # In-memory cache of loads on THIS run
def main(): """ Demonstrate the usage of the list comprehension to clean up a commented file The cleanup: removes blank lines removes empty lines after strip() removed commented lines with specified comment delimiter returns all remaining lines strip() :return: N/A """ # remove comments and strip what space from lines in file commented_file = '../../test_data/commented_data.txt' commented_file_semicolon = '../../test_data/commented_data_different_comment.txt' file_exists, commented_file_abs = normalize_path(commented_file) assert file_exists, f"commented file {commented_file_abs} does not exist" file_exists, commented_file_semicolon_abs = normalize_path( commented_file_semicolon) assert file_exists, f"commented file {commented_file_semicolon_abs} does not exist" # get file contents with open(commented_file_abs, 'r') as f: lines = f.readlines() with open(commented_file_semicolon_abs, 'r') as f: lines_semicolon = f.readlines() assert len(lines) > 0, "fi;e contents does not exist" print("original lines from file\n=====================") for line in lines: print(line, end='') print("list comprehensions") # new_lines = [line.strip() for line in lines if len(line.strip())>0 if not line.strip()[0] == '#'] new_lines = clean_commented_lines(lines) print( "process content to remove all unwanted lines with #\n=====================" ) for line in new_lines: print(line) print("=====================") print("===== test with ; comment delimiter =====") new_lines = clean_commented_lines(lines_semicolon, comment_delimiter=';') print( "process content to remove all unwanted lines with ;\n=====================" ) for line in new_lines: print(line) print("=====================")
def do_GET(self): path = normalize_path(self.path) up_path = path.split("/")[-2] print(up_path) handlers = { "/": [self.handle_root, []], "/hello/": [self.handle_hello, []], "/number/": [self.handle_number, []], f"/img/{up_path}/": [ self.handle_uploader, ["images", str(up_path), "rb", "image/png"] ], "/style/": [ self.handle_uploader, ["static/styles", "style.css", "r", "text/css"] ], } try: handler, args = handlers[path] handler(*args) except (NotFound, KeyError): self.handle_404() except MethodNotAllowed: self.handle_405() except Exception: self.handle_500()
def update_referrer(self, referrer): pattern = self.get_placeholder_re() if self.placeholder else \ self.get_version_name_re() referrer = normalize_path(referrer) with open(referrer) as handler: content = handler.read().decode('utf-8') if self.placeholder: logger.info('replace placeholder: ' + self.filename) spawn = ''.join(self.dump()) temp = tempfile.mkstemp('.' + self.extension, text=True) handle, abspath = temp os.write(handle, spawn) spawn = compile_csjs(abspath) content = re.sub(pattern, '\g<1>' + spawn + '\g<3>', content) os.close(handle) os.remove(abspath) else: content = re.sub(pattern, self.get_version_name(), content) with open(referrer, 'w') as handler: handler.write(content.encode('utf-8'))
def push(self, destination, extension=[]): content = self.dump(extension=extension) path = normalize_path(destination, self.get_version_name()) with open(path, 'w') as handler: handler.write(''.join(content)) return path
def main(): """ Test console logging by capturing the console log and looking for conternt in that stream """ log_location = './logs' print(__file__) # pycharm is stuck and thinks I'm in a different folder file_parts = __file__.split('/') # remove script name del file_parts[-1] new_cwd = "/".join(file_parts) print(new_cwd) os.chdir(new_cwd) # pycharm thinks this file in in the ./logs folder # this is necessary to make it work wiht pycharm log_location_exists, log_location_abs = normalize_path(log_location) cwd = os.getcwd() print(f"cwd: {cwd}") print(log_location) print(log_location_abs) log_debug = True log_console = True cwd = os.getcwd() print(cwd) assert log_location_exists, f"log location needs to exist: {log_location_abs}" print("setup logger") log_stream = StringIO() setup_logger(log_debug, log_location, log_console, console_log_to=log_stream) print("setup root logger to use") log_root = logging.getLogger() log_root.info("log to root logger") log_root.info("XYZZY should be in stderr because we are logging to the console") content_ = log_stream.getvalue() print(f"len of log content: {len(content_)}") print("\nlog_stream contents before assertions below\n=================") sys.stdout.write(content_) print("end of content\n=================") assert "INFO" in content_, f"INFO text not found in console logging output" # do DEBUG messages appear in root - expect not to find then log_root.info("We do not expect D E B U G messages in root log file") content_ = log_stream.getvalue() assert ("DEBUG" in content_) is False, f"DEBUG text found in console logging output" # add DEBUG message to root log_root.debug("we expect D E B U G message in root log file") content_ = log_stream.getvalue() assert "DEBUG" in content_, f"DEBUG text NOT found in root console logging output"
def _download_lectures(self, lectures, modulenum, secnum, section_dir): lecture_filter = self._args.lecture_filter file_formats = self._args.file_formats resource_filter = self._args.resource_filter combined_section_lectures_nums = self._args.combined_section_lectures_nums overwrite = self._args.overwrite resume = self._args.resume skip_download = self._args.skip_download for (lecnum, (lecname, lecture)) in enumerate(lectures): if lecture_filter and not re.search(lecture_filter, lecname): logging.debug('Skipping b/c of lf: %s %s', lecture_filter, lecname) continue if not os.path.exists(section_dir): mkdir_p(normalize_path(section_dir)) resources_to_get = find_resources_to_get(lecture, file_formats, resource_filter, self._ignored_formats) # write lecture resources for fmt, url, title in resources_to_get: lecture_filename = get_lecture_filename( combined_section_lectures_nums, section_dir, modulenum, secnum, lecnum, lecname, title, fmt) lecture_filename = normalize_path(lecture_filename) try: self._last_update = handle_resource( self._downloader, lecture_filename, fmt, url, overwrite, resume, skip_download, self.skipped_urls, self._last_update) except requests.exceptions.RequestException as e: logging.error('The following error has occurred while ' 'downloading URL %s: %s', url, str(e)) self.failed_urls.append(url)
def test_normalize_path(): dataset = { "": "/", "/": "/", "/xxx": "/xxx/", "/xxx//": "/xx//", "xxx": "xxx/", } for path, expected in dataset.items(): got = normalize_path(path) assert got == expected, f"path '{path} normalized to '{got}, while {expected} expected"
def do_GET(self): path = normalize_path(self.path) if path == "/": self.handle_root() elif path == "/hello/": self.handle_hello() elif path == "/style/": self.handle_style() elif path == "/image/": self.handle_image() else: self.handle_404()
def test_normalize_path(): data_set = { "": "/", "/": "/", "hello": "hello/", "hello/": "hello/", } for input_data, expected_data in data_set.items(): output_data = normalize_path(input_data) assert \ output_data == expected_data, \ f"path `{input_data}` normalized to `{output_data}`," \ f" while `{expected_data}` expected"
def parse_import_path(self, path): url_base = self.url_base url_map = self.url_map path = normalize_path(path) if path.find('!') >= 0 : path = path.split('!') prefix = path[0] if prefix in url_map: path = normalize_path(url_base + url_map[prefix] + path[1]) else: raise Exception('prefix not found ! (' + prefix + ')') else: path = url_base + path if os.path.isfile(path): fileext = os.path.splitext(path)[1] if fileext == '.js' and jsx: with open(path) as handler: content = handler.read().decode('utf-8') if self._match_jsx_notation(content): temp = tempfile.mkstemp(fileext) self._tempfile.append(temp) jsx.transform(path, temp[1]) path = temp[1] if fileext == '.scss' and SCSSCompiler: compiler = SCSSCompiler(search_path=(self.scss_root,)) temp = tempfile.mkstemp('.css') self._tempfile.append(temp) with open(path) as handler: content = handler.read().decode('utf-8') content = compiler.compile_string(content) os.write(temp[0], content.encode('utf-8')) path = temp[1] return path
def main(): """ """ log_location = './logs' log_location_exists, log_location_abs = normalize_path(log_location) log_debug = True log_console = True # TODO create example script without console logging turned on app_name = "App logging example 1" print(f"abs log location: {log_location_abs}") assert log_location_exists, "log location needs to exist" if log_location_exists: remove_folder_files(log_location_abs) else: raise FileExistsError(f"folder does not exist: {log_location_abs}") print("setup logger") setup_logger(log_debug, log_location, log_console) print("setup root logger to use") log_root = logging.getLogger() print("set up app name logger") log_app = logging.getLogger(app_name) log_root.info("log to root logger") log_app.info("log to app name logger") some_func() # Assert that we are seeing log files in the folder log_loc_path = Path(log_location_abs) root_file = log_loc_path / "root.log" assert root_file.exists(), f"root logging file does not exist: {root_file}" # do info messages appear in root content_ = get_file_pathlib_contents(root_file) assert "INFO" in content_, f"INFO text not found in root log file: {root_file}" # do DEBUG messages appear in root - expect not to find then log_root.info("We do not expect D E B U G messages in root log file") assert ("DEBUG" in content_ ) is False, f"DEBUG text found in root log file: {root_file}" # add DEBUG message to root log_root.debug("we expect D E B U G message in root log file") content_ = get_file_pathlib_contents(root_file) assert "DEBUG" in content_, f"DEBUG text NOT found in root log file: {root_file}"
def test_normalize_path(): test_data = [ "/", "hello", "hello/", ] expected = [ "/", "hello/", "hello/", ] for i in range(3): t = test_data[i] e = expected[i] g = normalize_path(t) assert g == e, f"path {t} normalize to {g}, while {e} expected"
def do_GET(self): path = normalize_path(self.path) handlers = { "/": self.handle_root, "/hello/": self.handle_hello, "/style/": self.handle_style, "/logo/": self.handle_logo, "/0/": self.handle_zde, } try: handler = handlers[path] handler() except (NotFound, KeyError): self.handle_404() except MethodNotAllowed: self.handle_405() except Exception: self.handle_500()
def get_s2_material_path(mat_name, s1_materials): for mat, mat_path, _ in s1_materials: if mat == mat_name: path = normalize_path((Path('materials') / mat_path / mat).with_suffix('.vmat')).resolve() return path
def convert_model(s1_model, s2fm_addon_folder): print(f'\033[94mWorking on {s1_model.stem} model\033[0m') s1_mdl = Mdl(s1_model) s1_mdl.read() eye_conv = EyeConverter() content_manager = ContentManager() content_manager.scan_for_content(s1_model) mod_path = get_mod_path(s1_model) rel_model_path = normalize_path(s1_model.relative_to(mod_path)) print('\033[94mCollecting materials\033[0m') s1_materials = collect_materials(s1_mdl) os.makedirs(s2fm_addon_folder / rel_model_path.with_suffix(''), exist_ok=True) eyes = eye_conv.process_mdl( s1_mdl, s2fm_addon_folder / rel_model_path.with_suffix('')) print('\033[94mDecompiling model\033[0m') model_decompiler = ModelDecompiler(s1_model) model_decompiler.decompile(remove_eyes=True) model_decompiler.save(s2fm_addon_folder / rel_model_path.with_suffix('')) s2_vmodel = (s2fm_addon_folder / rel_model_path.with_suffix('.vmdl')) os.makedirs(s2_vmodel.parent, exist_ok=True) print('\033[94mWriting VMDL\033[0m') vmdl = KV3mdl() for dmx_model in model_decompiler.dmx_models: vmdl.add_render_mesh( sanitize_name(dmx_model.mdl_model.name), normalize_path( rel_model_path.with_suffix('') / f'{Path(dmx_model.mdl_model.name).stem}.dmx')) for eyeball_name, eyeball_path in eyes: vmdl.add_render_mesh( sanitize_name(eyeball_name), normalize_path(eyeball_path.relative_to(s2fm_addon_folder))) for bone in s1_mdl.bones: if bone.procedural_rule_type == ProceduralBoneType.JIGGLE: procedural_rule = bone.procedural_rule # type:JiggleRule jiggle_type = 0 if procedural_rule.flags & JiggleRuleFlags.IS_RIGID: jiggle_type = 0 elif procedural_rule.flags & JiggleRuleFlags.IS_FLEXIBLE: jiggle_type = 1 elif procedural_rule.flags & JiggleRuleFlags.HAS_BASE_SPRING: jiggle_type = 2 jiggle_data = { "name": f"{bone.name}_jiggle", "jiggle_root_bone": bone.name, "jiggle_type": jiggle_type, 'length': procedural_rule.length, 'tip_mass': procedural_rule.tip_mass, 'has_yaw_constraint': bool(procedural_rule.flags & JiggleRuleFlags.HAS_YAW_CONSTRAINT), 'has_pitch_constraint': bool(procedural_rule.flags & JiggleRuleFlags.HAS_PITCH_CONSTRAINT), 'has_angle_constraint': bool(procedural_rule.flags & JiggleRuleFlags.HAS_ANGLE_CONSTRAINT), 'allow_flex_length ': bool(procedural_rule.flags & JiggleRuleFlags.HAS_LENGTH_CONSTRAINT), 'invert_axes': bone.position[0] < 0, 'angle_limit': math.degrees(procedural_rule.angle_limit), 'max_yaw': procedural_rule.max_yaw, 'min_yaw': procedural_rule.min_yaw, 'yaw_bounce': procedural_rule.yaw_bounce, 'yaw_damping': procedural_rule.yaw_damping or 10, 'yaw_stiffness': procedural_rule.yaw_stiffness or 10, 'yaw_friction': procedural_rule.yaw_friction or 10, 'max_pitch': procedural_rule.max_pitch, 'min_pitch': procedural_rule.min_pitch, 'pitch_bounce': procedural_rule.pitch_bounce or 10, 'pitch_damping': procedural_rule.pitch_damping or 10, 'pitch_stiffness': procedural_rule.pitch_stiffness or 10, 'pitch_friction': procedural_rule.pitch_friction or 10, 'base_left_max': procedural_rule.base_max_left, 'base_left_min': procedural_rule.base_min_left, 'base_left_friction': procedural_rule.base_left_friction, 'base_up_max': procedural_rule.base_max_up, 'base_up_min': procedural_rule.base_min_up, 'base_up_friction': procedural_rule.base_up_friction, 'base_forward_max': procedural_rule.base_min_forward, 'base_forward_min': procedural_rule.base_min_forward, 'base_forward_friction': procedural_rule.base_forward_friction, 'along_stiffness': procedural_rule.along_stiffness / 10, 'along_damping': procedural_rule.along_damping or 15, } vmdl.add_jiggle_bone(jiggle_data) for s1_bodygroup in s1_mdl.body_parts: if 'clamped' in s1_bodygroup.name: continue bodygroup = vmdl.add_bodygroup(sanitize_name(s1_bodygroup.name)) for mesh in s1_bodygroup.models: if len(mesh.meshes) == 0 or mesh.name == 'blank': vmdl.add_bodygroup_choice(bodygroup, []) continue vmdl.add_bodygroup_choice(bodygroup, sanitize_name(mesh.name)) reference_skin = s1_mdl.skin_groups[0] for n, skin in enumerate(s1_mdl.skin_groups[1:]): vmdl_skin = vmdl.add_skin(f'skin_{n}') for ref_mat, skin_mat in zip(reference_skin, skin): if ref_mat != skin_mat: ref_mat = get_s2_material_path(normalize_path(ref_mat), s1_materials) skin_mat = get_s2_material_path(normalize_path(skin_mat), s1_materials) if ref_mat and skin_mat: vmdl.add_skin_remap(vmdl_skin, ref_mat, skin_mat) else: print( '\033[91mFailed to create skin!\nMissing source or destination material!\033[0m' ) with s2_vmodel.open('w') as f: f.write(vmdl.dump()) print('\033[94mConverting materials\033[0m') for mat in s1_materials: mat_name = normalize_path(mat[0]) print('\033[92mConverting {}\033[0m'.format(mat_name)) result, shader = convert_material(mat, s2fm_addon_folder) if result: pass else: print(f'\033[91mUnsupported Source1 shader "{shader}"!\033[0m') return s2_vmodel