def encode(str, key): str = str.replace(" ", "").lower() # Add arbitrary letters if necessary if (len(str) % len(key) != 0): for i in range(len(key) - len(str) % len(key)): str += "x" # Transform the word into a matrix of numbers wordMatrix = np.array( [[split(chars).index(char.lower()) + 1 for char in str]]) res = "" for i in range(0, len(split(str)), len(key)): if (str[i] not in split(chars)): print("Illegal character found: " + str[i]) return False # Get group of letters tmpM = np.array([wordMatrix[0][i + j] for j in range(len(key))]) # Multiply group with key multM = np.matmul(key, tmpM) # Add encoded chars for j in range(len(key)): res += chars[int(round(multM[j] % 26 - 1))] return (res.upper())
def decode(str, key): # Get determinant of the key matrix A = round(np.linalg.det(key)) if (A == 0): print("Invalid determinant") return False # Get the inverse matrix invKey = np.linalg.inv(key) * A # Find k^-1 (mod26) def bruteKMod26(k): primeNumbers = [1, 3, 5, 7, 9, 11, 15, 17, 19, 21, 23, 25] for m in primeNumbers: if (k * m) % 26 == 1: return m invA = bruteKMod26(A) if not invA: print("Invalid key matrix") return False decodeKey = (np.round(invKey) * invA) % 26 wordMatrix = np.array( [[split(chars).index(char.lower()) + 1 for char in str]]) str = str.replace(" ", "").lower() res = "" for i in range(0, len(split(str)), len(decodeKey)): tmpM = np.array([wordMatrix[0][i + j] for j in range(len(decodeKey))]) multM = np.matmul(decodeKey, tmpM) for j in range(len(decodeKey)): res += chars[int(round(multM[j] % 26 - 1))] return (res.upper())
def write_no_ok(module): print "no" apps= tools.split(open(applist, "r").read(), "\n") apps= [a for a in apps if a != module] tools.rewrite(applist, "\n".join(apps)) tools.rewrite(os.path.join("data", "build_info", "IMP."+module), "ok=False\n") sys.exit(1)
def main(): (options, args) = parser.parse_args() disabled= tools.split(open("data/build_info/disabled", "r").read(), "\n") if options.name in disabled: print options.name, "is disabled" write_no_ok(options.name) tools.rmdir(os.path.join("module_bin", options.name)) tools.rmdir(os.path.join("benchmark", options.name)) if options.scons=="yes": sys.exit(0) else: sys.exit(1) if setup_module(options.name, options.source, options.datapath): make_header(options) make_cpp(options) make_version_check(options) link_bin(options) link_benchmark(options) sys.exit(0) else: tools.rmdir(os.path.join("module_bin", options.name)) tools.rmdir(os.path.join("benchmark", options.name)) if options.scons=="yes": sys.exit(0) sys.exit(1)
def main(): (options, apps) = parser.parse_args() disabled = tools.split(open("data/build_info/disabled", "r").read(), "\n") if options.name in disabled: print("%s is disabled" % options.name) write_no_ok(options.name) tools.rmdir(os.path.join("module_bin", options.name)) tools.rmdir(os.path.join("benchmark", options.name)) tools.rmdir(os.path.join("lib", "IMP", options.name)) sys.exit(1) success, modules = setup_module(options.name, options.source, options.datapath) if success: make_header(options) make_doxygen(options, modules) make_overview(options, apps) link_bin(options) link_py_apps(options) link_benchmark(options) sys.exit(0) else: tools.rmdir(os.path.join("module_bin", options.name)) tools.rmdir(os.path.join("benchmark", options.name)) tools.rmdir(os.path.join("lib", "IMP", options.name)) sys.exit(1)
def main(): (options, apps) = parser.parse_args() disabled = tools.split(open("data/build_info/disabled", "r").read(), "\n") if options.name in disabled: print("%s is disabled" % options.name) write_no_ok(options.name) tools.rmdir(os.path.join("module_bin", options.name)) tools.rmdir(os.path.join("benchmark", options.name)) tools.rmdir(os.path.join("lib", "IMP", options.name)) sys.exit(1) success, modules = setup_module( options.name, options.source, options.datapath) if success: make_header(options) make_doxygen(options, modules) make_overview(options, apps) link_bin(options) link_py_apps(options) link_benchmark(options) sys.exit(0) else: tools.rmdir(os.path.join("module_bin", options.name)) tools.rmdir(os.path.join("benchmark", options.name)) tools.rmdir(os.path.join("lib", "IMP", options.name)) sys.exit(1)
def main(): options, apps = parser.parse_args() disabled = tools.split(open("build_info/disabled", "r").read(), "\n") if options.name in disabled: print("%s is disabled" % options.name) write_no_ok(options.name) tools.rmdir(os.path.join("module_bin", options.name)) tools.rmdir(os.path.join("benchmark", options.name)) tools.rmdir(os.path.join("lib", "IMP", options.name)) sys.exit(1) mf = tools.ModulesFinder(source_dir=options.source, external_dir=options.build_dir, module_name=options.name) module = mf[options.name] success, modules = setup_module(module, mf) if success: make_header(options, module) make_doxygen(options, module, modules, mf) make_overview(module, apps) link_bin(options, module) link_py_apps(module) link_benchmark(options, module) sys.exit(0) else: tools.rmdir(os.path.join("module_bin", options.name)) tools.rmdir(os.path.join("benchmark", options.name)) tools.rmdir(os.path.join("lib", "IMP", options.name)) sys.exit(1)
def write_no_ok(module): print "no" apps = tools.split(open(applist, "r").read(), "\n") apps = [a for a in apps if a != module] tools.rewrite(applist, "\n".join(apps)) tools.rewrite(os.path.join("data", "build_info", "IMP." + module), "ok=False\n") sys.exit(1)
def get_dep_merged(finder, modules, name, extra_data_path): ret = [] alldeps = finder.get_all_dependencies(modules) for d in alldeps: info = tools.get_dependency_info(d, extra_data_path) # cmake lists are semicolon-separated lst = tools.split(info[name], ';') ret.extend(lst) ret = sorted(set(ret)) return ret
def get_dep_merged(modules, name, ordered): ret = [] alldeps = tools.get_all_dependencies(".", modules, "", ordered) for d in alldeps: info = tools.get_dependency_info(d, ".") # cmake lists are semicolon-separated lst = tools.split(info[name], ';') ret.extend(lst) ret = sorted(set(ret)) return ret
def get_dep_merged(modules, name, ordered): ret=[] alldeps=tools.get_all_dependencies(".", modules, "", ordered) for d in alldeps: info = tools.get_dependency_info(d, ".") lst= tools.split(info[name], ';') # cmake lists are semicolon-separated ret.extend(lst) ret=list(set(ret)) ret.sort() return ret
def main(): (options, args) = parser.parse_args() info = tools.get_module_info(options.name, "/") if not info["ok"]: tools.rewrite("src/%s_swig.deps"%options.name, "") return cmd = [options.swig, '-MM', '-Iinclude', '-Iswig', '-ignoremissing'] \ + ["-I"+x for x in tools.split(options.swigpath)] \ + ["-I"+x for x in tools.split(options.includepath)] \ + ["swig/IMP_%s.i" % options.name] outfile = open("src/%s_swig.deps.in"%options.name, "w") ret = subprocess.call(cmd, stdout=outfile) outfile.close() if ret != 0: raise OSError("subprocess failed with return code %d: %s" \ % (ret, " ".join(cmd))) lines= open("src/%s_swig.deps.in"%options.name, "r").readlines() names= [x[:-2].strip() for x in lines[1:]] final_names=[_fix(x, options.build_system) for x in names] final_list= "\n".join(final_names) tools.rewrite("src/%s_swig.deps"%options.name, final_list)
def decryptCBC(key, data, iv = b'\000' * 16): encBlocks = tools.split(data, 16, False) result = b'' cipher=AES.new(key, AES.MODE_ECB) for block in encBlocks: decBlock = cipher.decrypt(block) dec = xor(decBlock, iv) iv = block result += dec return tools.stripPadding(result)
def decryptCBC(key, data, iv=b'\000' * 16): encBlocks = tools.split(data, 16, False) result = b'' cipher = AES.new(key, AES.MODE_ECB) for block in encBlocks: decBlock = cipher.decrypt(block) dec = xor(decBlock, iv) iv = block result += dec return tools.stripPadding(result)
def encryptCBC(key, data, iv = b'\000' * 16): data = tools.addPadding(data) blocks = tools.split(data, 16, False) result = b'' cipher=AES.new(key, AES.MODE_ECB) for block in blocks: inBlock = xor(block, iv) encBlock = cipher.encrypt(inBlock) iv = encBlock result += encBlock return result
def encryptCBC(key, data, iv=b'\000' * 16): data = tools.addPadding(data) blocks = tools.split(data, 16, False) result = b'' cipher = AES.new(key, AES.MODE_ECB) for block in blocks: inBlock = xor(block, iv) encBlock = cipher.encrypt(inBlock) iv = encBlock result += encBlock return result
def dnn_factory_from_model_dir(model_dir, **kwargs): """Creates a DNN from the :model_dir: argument. Any additional keyword arguments provided override the details of the DNN found.""" if model_dir[-1] in ('/', '\\'): model_dir = model_dir[:-1] model_dir_split = tools.split(['/', '\\'], model_dir) dir_name = model_dir_split[-1] # I suspect that we should be able to restore the DNN just from the # information saved in the model directory, without needing to know # its structure from the directory name... dnn_details, uuid = _dnn_hyperparameters_from_dir(dir_name) dnn_details['compile_kwargs']['model_dir'] = model_dir dnn_details.update(kwargs) dnn_factory = fac.DNNFactory(**dnn_details) return dnn_factory
def write_ok(module, modules, unfound_modules, dependencies, unfound_dependencies): print "yes" config=["ok=True"] if len(modules) > 0: config.append("modules = \"" + ":".join(modules)+"\"") if len(unfound_modules) > 0: config.append("unfound_modules = \""+ ":".join(unfound_modules)+"\"") if len(dependencies) > 0: config.append("dependencies = \"" + ":".join(dependencies)+"\"") if len(unfound_dependencies) > 0: config.append("unfound_dependencies = \"" + ":".join(unfound_dependencies)+"\"") apps= tools.split(open(applist, "r").read(), "\n") if module not in apps: apps.append(module) tools.rewrite(applist, "\n".join(apps)) tools.rewrite(os.path.join("data", "build_info", "IMP."+module), "\n".join(config)) sys.exit(0)
def main(): with open(F,'r') as f: b64 = f.read() enc = tools.fromB64(b64) encBlocks = tools.split(enc, 16, False) iv = IV result = b'' cipher=AES.new(KEY, AES.MODE_ECB) for block in encBlocks: decBlock = cipher.decrypt(block) dec = crypto.xor(decBlock, iv) iv = block result += dec print(tools.toStr(tools.stripPadding(result)))
def encryptCTR(key, nonce, data, littleEndian=False): ''' Perform CTR encrpytion Cipher input is 128bit key, 64bit nonce. Counter is generated big endian (usual) or little endian. ''' inBlocks = tools.split(data, 16) iv = bytearray(nonce + (b'\000' * 8)) result = b'' cipher = AES.new(key, AES.MODE_ECB) for inBlock in inBlocks: encBlock = cipher.encrypt(bytes(iv)) enc = xor(inBlock, encBlock) _nextBlock(iv, littleEndian) result += enc return result
def encryptCTR(key, nonce, data, littleEndian=False): ''' Perform CTR encrpytion Cipher input is 128bit key, 64bit nonce. Counter is generated big endian (usual) or little endian. ''' inBlocks = tools.split(data, 16) iv = bytearray(nonce + (b'\000' * 8)) result = b'' cipher=AES.new(key, AES.MODE_ECB) for inBlock in inBlocks: encBlock = cipher.encrypt(bytes(iv)) enc = xor(inBlock, encBlock) _nextBlock(iv, littleEndian) result += enc return result
def main(): with open(F, 'r') as f: b64 = f.read() enc = tools.fromB64(b64) encBlocks = tools.split(enc, 16, False) iv = IV result = b'' cipher = AES.new(KEY, AES.MODE_ECB) for block in encBlocks: decBlock = cipher.decrypt(block) dec = crypto.xor(decBlock, iv) iv = block result += dec print(tools.toStr(tools.stripPadding(result)))
def buildBlocks(prefixlen, data): ''' Builds blocks for hashing from given data. ''' ml = (prefixlen + len(data)) * 8 blocks = tools.split(data, BLOCK_LEN) if len(blocks[-1]) == BLOCK_LEN: blocks.append(b'') blocks[-1] = blocks[-1] + b'\x80' remSpace = BLOCK_LEN - len(blocks[-1]) if remSpace < 8: blocks[-1] = blocks[-1] + b'\x00' * remSpace blocks.append(b'\x00' * (BLOCK_LEN - 8)) remSpace = 8 if remSpace > 8: blocks[-1] = blocks[-1] + b'\x00' * (remSpace - 8) blocks[-1] = blocks[-1] + struct.pack(">q", ml) return blocks
def write_ok(module, modules, unfound_modules, dependencies, unfound_dependencies): print "yes" config = ["ok=True"] if len(modules) > 0: config.append("modules = \"" + ":".join(modules) + "\"") if len(unfound_modules) > 0: config.append("unfound_modules = \"" + ":".join(unfound_modules) + "\"") if len(dependencies) > 0: config.append("dependencies = \"" + ":".join(dependencies) + "\"") if len(unfound_dependencies) > 0: config.append("unfound_dependencies = \"" + ":".join(unfound_dependencies) + "\"") apps = tools.split(open(applist, "r").read(), "\n") if module not in apps: apps.append(module) tools.rewrite(applist, "\n".join(apps)) tools.rewrite(os.path.join("data", "build_info", "IMP." + module), "\n".join(config)) sys.exit(0)
def model_from_model_dir(model_dir, **kwargs): """Creates a model for a DNN from the :model_dir: argument. Any additional keyword arguments provided override the details of the DNN found. """ if not os.path.isdir(model_dir): raise RuntimeError(f'Model dir {model_dir} does not exist') if model_dir[-1] in ('/', '\\'): model_dir = model_dir[:-1] dir_name = tools.split(['/', '\\'], model_dir)[-1] # I suspect that we should be able to restore the DNN just from the # information saved in the model directory, without needing to know # its structure from the directory name... details, uuid = _dnn_hyperparameters_from_dir(dir_name) details.update(kwargs) model = ds.Network.define_dnn(hidden_units=details.hidden_units, logits=details.logits, activation=details.activation, drop_rate=0.0, processor=details.processor, model_dir=model_dir) return model
def send_text_and_photos(self): text = split(self.post.text) if self.post.text and self.post.photos: if len(self.post.photos) > 1: send_splitted_message(self.bot, text, self.chat_id) self.bot.send_message(self.chat_id, text[-1], parse_mode='HTML', reply_markup=self.post.reply_markup, disable_web_page_preview=True) self.bot.send_media_group(self.chat_id, self.post.photos) elif len(self.post.photos) == 1: if len(self.post.text) > 1024: send_splitted_message(self.bot, text, self.chat_id) self.bot.send_message(self.chat_id, text[-1], parse_mode='HTML', reply_markup=self.post.reply_markup, disable_web_page_preview=True) self.bot.send_photo(self.chat_id, self.post.photos[0]['media'], parse_mode='HTML') else: send_splitted_message(self.bot, text, self.chat_id) self.bot.send_photo(self.chat_id, self.post.photos[0]['media'], text[-1], parse_mode='HTML', reply_markup=self.post.reply_markup, disable_web_page_preview=True) elif not self.post.text and self.post.photos: self.send_photos() elif self.post.text and not self.post.photos: send_splitted_message(self.bot, text, self.chat_id) self.bot.send_message(self.chat_id, text[-1], parse_mode='HTML', reply_markup=self.post.reply_markup, disable_web_page_preview=True)
def make_header(options): dir = os.path.join("include", "IMP", options.name) file = os.path.join(dir, "%s_config.h" % options.name) header_template = open( os.path.join( options.source, "tools", "build", "config_templates", "header.h"), "r").read( ) try: os.makedirs(dir) except: # exists pass data = {} data["name"] = options.name data["filename"] = "IMP/%s/%s_config.h" % (options.name, options.name) data["cppprefix"] = "IMP%s" % options.name.upper().replace("_", "") if data["name"] != "base": data["showable"] = """#if !defined(IMP_DOXYGEN) && !defined(SWIG) #include <IMP/base/Showable.h> #include <IMP/base/hash.h> namespace IMP { namespace %(name)s { using ::IMP::base::Showable; using ::IMP::base::operator<<; using ::IMP::base::hash_value; } } // namespace namespace IMP { namespace %(name)s { namespace internal { using ::IMP::base::Showable; using ::IMP::base::operator<<; using ::IMP::base::hash_value; } } } // namespace #endif // !defined(SWIG) && !defined(IMP_DOXYGEN) """ % data else: data["showable"] = "" cppdefines = [] if options.defines != "": for define in tools.split(options.defines): parts = define.split("=") if len(parts) == 2: cppdefines.append("#define %s %s" % (parts[0], parts[1])) else: cppdefines.append("#define %s" % parts[0]) d = {'required_modules': "", 'lib_only_required_modules': "", 'required_dependencies': "", 'optional_dependencies': ""} exec(open(os.path.join(options.source, "modules", data["name"], "dependencies.py"), "r").read(), d) info = tools.get_module_info(data["name"], options.datapath) optional_modules = [ x for x in info[ "modules"] if x not in tools.split( d['required_modules']) and x != ""] unfound_modules = [x for x in info["unfound_modules"] if x != ""] optional_dependencies = [ x for x in info[ "dependencies"] if x not in tools.split( d['required_dependencies']) and x != ""] unfound_dependencies = [x for x in info["unfound_dependencies"] if x != ""] add_list_to_defines(cppdefines, data, "USE", 1, ["imp_" + x for x in optional_modules]) add_list_to_defines(cppdefines, data, "NO", 0, ["imp_" + x for x in unfound_modules]) add_list_to_defines(cppdefines, data, "USE", 1, optional_dependencies) add_list_to_defines( cppdefines, data, "NO", 0, info["unfound_dependencies"]) data["cppdefines"] = "\n".join(cppdefines) tools.rewrite(file, header_template % data)
def attack(data): blocks = tools.split(data, 16) return blocks[0] + blocks[2] + blocks[1]
def make_header(options, module): if module.python_only: return if module.name == 'kernel': dir = os.path.join("include", "IMP") else: dir = os.path.join("include", "IMP", module.name) file = os.path.join(dir, "%s_config.h" % module.name) header_template = tools.CPPFileGenerator(os.path.join(TOPDIR, "config_templates", "header.h")) try: os.makedirs(dir) except: # exists pass data = {} data["name"] = module.name if module.name == 'kernel': data["namespace"] = "IMP" data["begin_ns"] = "namespace IMP{" data["end_ns"] = "}" data["filename"] = "IMP/%s_config.h" % module.name else: data["namespace"] = "IMP::%s" % module.name data["begin_ns"] = "namespace IMP{ namespace %s {" % module.name data["end_ns"] = "} }" data["filename"] = "IMP/%s/%s_config.h" % (module.name, module.name) data["cppprefix"] = "IMP%s" % module.name.upper().replace("_", "") if data["name"] != "kernel": data["showable"] = """#if !defined(IMP_DOXYGEN) && !defined(SWIG) #include <IMP/Showable.h> #include <IMP/hash.h> %(begin_ns)s using ::IMP::Showable; using ::IMP::operator<<; using ::IMP::hash_value; %(end_ns)s // namespace %(begin_ns)s namespace internal { using ::IMP::Showable; using ::IMP::operator<<; using ::IMP::hash_value; } %(end_ns)s // namespace #endif // !defined(SWIG) && !defined(IMP_DOXYGEN) """ % data else: data["showable"] = "" cppdefines = [] if options.defines != "": for define in tools.split(options.defines): parts = define.split("=") if len(parts) == 2: cppdefines.append("#define %s %s" % (parts[0], parts[1])) else: cppdefines.append("#define %s" % parts[0]) cf = module.configured optional_modules = [x for x in cf.modules if x not in module.required_modules] optional_dependencies = [x for x in cf.dependencies if x not in module.required_dependencies] add_list_to_defines(cppdefines, data, "USE", 1, ["imp_" + x.name for x in optional_modules]) add_list_to_defines(cppdefines, data, "NO", 0, ["imp_" + x.name for x in cf.unfound_modules]) add_list_to_defines(cppdefines, data, "USE", 1, optional_dependencies) add_list_to_defines(cppdefines, data, "NO", 0, cf.unfound_dependencies) data["cppdefines"] = "\n".join(cppdefines) header_template.write(file, data)
def split(self, selector=None): feat_dict = self.to_dict() train, test = tools.split(feat_dict, selector) return from_dict(train), from_dict(test)
def make_header(options): dir = os.path.join("include", "IMP", options.name) file = os.path.join(dir, "%s_config.h" % options.name) header_template = open( os.path.join(options.source, "tools", "build", "config_templates", "header.h"), "r").read() try: os.makedirs(dir) except: # exists pass data = {} data["name"] = options.name data["filename"] = "IMP/%s/%s_config.h" % (options.name, options.name) data["cppprefix"] = "IMP%s" % options.name.upper().replace("_", "") if data["name"] != "base": data["showable"] = """#if !defined(IMP_DOXYGEN) && !defined(SWIG) #include <IMP/base/Showable.h> #include <IMP/base/hash.h> namespace IMP { namespace %(name)s { using ::IMP::base::Showable; using ::IMP::base::operator<<; using ::IMP::base::hash_value; } } // namespace namespace IMP { namespace %(name)s { namespace internal { using ::IMP::base::Showable; using ::IMP::base::operator<<; using ::IMP::base::hash_value; } } } // namespace #endif // !defined(SWIG) && !defined(IMP_DOXYGEN) """ % data else: data["showable"] = "" cppdefines = [] if options.defines != "": for define in tools.split(options.defines): parts = define.split("=") if len(parts) == 2: cppdefines.append("#define %s %s" % (parts[0], parts[1])) else: cppdefines.append("#define %s" % parts[0]) d = { 'required_modules': "", 'lib_only_required_modules': "", 'required_dependencies': "", 'optional_dependencies': "" } exec( open( os.path.join(options.source, "modules", data["name"], "dependencies.py"), "r").read(), d) info = tools.get_module_info(data["name"], options.datapath) optional_modules = [ x for x in info["modules"] if x not in tools.split(d['required_modules']) and x != "" ] unfound_modules = [x for x in info["unfound_modules"] if x != ""] optional_dependencies = [ x for x in info["dependencies"] if x not in tools.split(d['required_dependencies']) and x != "" ] unfound_dependencies = [x for x in info["unfound_dependencies"] if x != ""] add_list_to_defines(cppdefines, data, "USE", 1, ["imp_" + x for x in optional_modules]) add_list_to_defines(cppdefines, data, "NO", 0, ["imp_" + x for x in unfound_modules]) add_list_to_defines(cppdefines, data, "USE", 1, optional_dependencies) add_list_to_defines(cppdefines, data, "NO", 0, info["unfound_dependencies"]) data["cppdefines"] = "\n".join(cppdefines) tools.rewrite(file, header_template % data)
def extractKey(message): blocks = tools.split(message, 16) iv = crypto.xor(blocks[0], blocks[2]) return iv
from dense_net import * from tools import load_jpgs, split X_tr, Y_tr = load_jpgs() #Y_tr = onehot() # print an image index = 10 plt.imshow(X_train[index]) plt.show() X_train, X_test, y_train, y_test = split(X_tr, Y_tr) #reshape X and y X_train = X_train.reshape(X_train.shape[0], -1).T X_test = X_test.reshape(X_test.shape[0], -1).T #y_train = y_train.reshape(y_train.shape[0], -1).T #y_test = y_test.reshape(y_test.shape[0], -1).T ft = flowtools() y_train = ft.one_hot(y_train, 2) y_test = ft.one_hot(y_test,2) params = ft.model(X_train, y_train, X_test, y_test, learning_rate=.0003, num_epochs=10)
def count(folder_path='.', hidden_files=False, hidden_folders=False, print_result=True, include_zero=False, add_subfolders=True, print_files=False, print_folders=True): """ Counts the number of lines of code in a folder. :str folder_path: The path to the folder. Defaults to the current folder. :bool hidden_files: Optional, whether to count hidden files. Defaults to False. :bool hidden_folders: Optional, whether to count hidden folders. Defaults to False. :bool print_result: Optional, whether to print out the results in a pretty format at the end. Defaults to True. :bool include_zero: Optional, whether to include files and folders containing zero lines of code. Defaults to False. :bool add_subfolders: Optional, whether to include the amount of code in subfolders when stating the amount of lines of code/comment/whitespace in a folder. Defaults to True. :bool print_files: Optional. Whether or not to print the counts for each file as well as each folder. Defaults to False. :bool print_folders: Optional. Whether or not to print the counts for each folder. Defaults to True. :return: If return_val is truthy, then it is a dictionary, with the keys being the paths to folders, and the values being 'Folder' objects as above. Note that the specific combination of :print_result: True, :print_files: False, :print_folders: False will raise a RuntimeError as that's probably not desired. (Asking to print, and then giving it nothing to print out.) """ folder_path = tools.split(['/', '\\'], folder_path) folder_path = os.path.join(*folder_path) if print_result is True and print_files is False and print_folders is False: raise RuntimeError( 'The argument print_results was True, but print_files and print_folders are both False.' ) folders = {} for dirpath, subdirnames, filenames in os.walk(folder_path): unhidden_subdirnames = [] for subdirname in subdirnames: if not hidden_folders and subdirname.startswith('.'): # Hidden folder continue if subdirname == '__pycache__': continue unhidden_subdirnames.append(subdirname) subdirnames[:] = unhidden_subdirnames files = [] for filename in filenames: if not hidden_files and filename.startswith('.'): # Hidden file continue if filename.endswith('.py') or filename.endswith('.ipynb'): file_path = os.path.join(dirpath, filename) file_lines = file_count(file_path) file = File(filename, file_lines) files.append(file) folders[dirpath] = Folder(dirpath, files, subdirnames) if add_subfolders: # Go through in order of length of path, as a string, from longest to shortest. This guarantees that we evaluate # all deeper folders before we evaluate shallower ones. for folder_name, folder in sorted(folders.items(), key=lambda x: len(x[0]))[::-1]: folder.add_lines_from_subfolders(folders) if print_result: if print_files and print_folders: first_heading_str = "File/Folder location" elif print_files and not print_folders: first_heading_str = "File location" elif print_folders and not print_files: first_heading_str = "Folder location" max_folder_loc = len(first_heading_str) max_code = len("Code") max_comment = len("Comment") max_whitespace = len("Whitespace") max_all = len("Total") for folder_loc, folder in folders.items(): if include_zero or folder.code_lines != 0: if print_folders: max_folder_loc = max(max_folder_loc, len(folder_loc)) max_code = max(max_code, tools.num_digits(folder.code_lines)) max_comment = max(max_comment, tools.num_digits(folder.comment_lines)) max_whitespace = max( max_whitespace, tools.num_digits(folder.whitespace_lines)) max_all = max( max_all, tools.num_digits(folder.code_lines + folder.comment_lines + folder.whitespace_lines)) if print_files: for file in folder.files: if include_zero or file.code_lines != 0: file_loc = os.path.join(folder_loc, file.name) max_folder_loc = max(max_folder_loc, len(file_loc)) max_code = max(max_code, tools.num_digits(file.code_lines)) max_comment = max( max_comment, tools.num_digits(file.comment_lines)) max_whitespace = max( max_whitespace, tools.num_digits(file.whitespace_lines)) max_all = max( max_all, tools.num_digits(file.code_lines + file.comment_lines + file.whitespace_lines)) print_str = ("{:<%s} | {:%s} | {:%s} | {:%s} | {:%s}" % (max_folder_loc, max_code, max_comment, max_whitespace, max_all)).format(first_heading_str, "Code", "Comment", "Whitespace", "Total") print(print_str) print("-" * (max_folder_loc + 1) + "+" + "-" * (max_code + 2) + "+" + "-" * (max_comment + 2) + "+" + "-" * (max_whitespace + 2) + "+" + "-" * (max_all + 1)) # Starting to look a bit spaghettified this! for folder_loc, folder in folders.items(): if include_zero or folder.code_lines != 0: if print_folders: print_str = ("{:<%s} | {:%s} | {:%s} | {:%s} | {:%s}" % (max_folder_loc, max_code, max_comment, max_whitespace, max_all)).format( folder_loc, folder.code_lines, folder.comment_lines, folder.whitespace_lines, folder.code_lines + folder.comment_lines + folder.whitespace_lines) print(print_str) if print_files: for file in folder.files: if include_zero or file.code_lines != 0: file_loc = os.path.join(folder_loc, file.name) print_str = ( "{:<%s} | {:%s} | {:%s} | {:%s} | {:%s}" % (max_folder_loc, max_code, max_comment, max_whitespace, max_all)).format( file_loc, file.code_lines, file.comment_lines, file.whitespace_lines, file.code_lines + file.comment_lines + file.whitespace_lines) print(print_str) return folders
import pylab as pl import scipy import reader as rd import filters import tools s = rd.read('canary.wav') minima, idx = filters.ceci(s, freq_fast=250, freq_slow=40, freq_env=700, thr=0.2) syl = tools.split(s, idx) for i, s in enumerate(syl): print i fig, axarr = pl.subplots(2, 1, figsize=(5, 8)) axarr[0].plot(s[:, 0], s[:, 1]) axarr[1].specgram(s[:, 1], NFFT=int(0.005 * 44800), window=scipy.signal.tukey(int(44800 * 0.005)), Fs=44800, noverlap=int(44800 * 0.004), cmap='jet') pl.savefig("syl_{0}.png".format(i)) #pl.close()
def normalize(self, img): img = split(img) return OCR.normalize(self, img)