def show(self, args): """ Display the help for requested service/api Join the keywords inputs to return the help file Example: jcs compute help This would point to help_topics/compute.txt jcs compute describe-instances help This would point to help_topic/describe-instances.txt """ help_file = self.help_dir for arg in args: if arg != 'help': help_file = utils.join_path(help_file, arg) else: if help_file == self.help_dir: # Display jcs help help_file = utils.join_path(help_file, 'jcs') help_file += '.txt' # Handle errors like jcs compute help describe-instances if 'help' != args[-1]: raise IndexError() self.process_help_file(help_file)
def parse_organism_seq(org, in_path=cs.STRING_PATH, out_path=cs.JSON_PATH): seq_name = '{}.protein.sequences.v10.5.fa'.format(org) pseq_name = '{}_parsed_sequences.json'.format(org) seq_path = utils.join_path(in_path, seq_name) pseq_path = utils.join_path(out_path, pseq_name) with open(seq_path, 'r') as index: # list of all protein sequences proteins = [] prt = '' seq = '' while True: line = index.readline() line = line.strip() if line == '': proteins.append({'code': prt, 'sequence': seq}) break elif line[0] == '>': proteins.append({'code': prt, 'sequence': seq}) prt = line[1:] seq = '' else: seq += line utils.write_json(proteins, pseq_path)
def parse_organism(org, in_path=cs.STRING_PATH, out_path=cs.JSON_PATH, check=True): ppi_name = '{}.protein.links.v10.5.txt'.format(org) node_name = '{}_parsed_nodes.json'.format(org) edge_name = '{}_parsed_edges.json'.format(org) ppi_path = utils.join_path(in_path, ppi_name) node_path = utils.join_path(out_path, node_name) edge_path = utils.join_path(out_path, edge_name) if (check and utils.files_exist([node_name, edge_name], out_path)): message = 'using existing parsed jsons for {}'.format(org) utils.print_log(message) else: message = ('parsing ppi information of {}').format(org) utils.print_log(message) parse_organism_ppi(org, ppi_path, node_path, edge_path) message = ('ppi parsing finished for {}').format(org) utils.print_log(message) return organism.Organism(nodes_file=node_path, edges_file=edge_path, org_id=org)
def align_save_side(side): os.makedirs(join_path(output_folder, "{}_aligned".format(type_data), side), exist_ok=True) idx_list = events[ np.logical_and(events[:, 1] == " {}".format(side), events[:, 2] == " FootStrike"), 0, ].astype(int) cycle_data = [] for idx in range(len(idx_list) - 1): cycle_angle = [] for n_angle in range(data.shape[0]): x = np.linspace(0, 101, num=idx_list[idx + 1] - idx_list[idx]) cycle_cord = [] for cord in range(3): f = interp1d( x, data[n_angle, idx_list[idx]:idx_list[idx + 1], cord], kind="cubic", ) cycle_cord.append(f(np.arange(0, 101))) cycle_angle.append(np.array(cycle_cord).T) cycle_data.append(np.array(cycle_angle)) if len(cycle_data): np.save( join_path(output_folder, "{}_aligned".format(type_data), side, file_), np.array(cycle_data), )
def modify_grub_configuration(self): template_file = join_path(self.info.data_dir, 'grub.install.cfg') template = read_file(template_file) rootflags = "rootflags=sync" if self.info.distro.name=='LliureX-Live': isopath = unix_path(self.info.iso_path) dic = dict( custom_installation_dir = unix_path(self.info.custominstall), iso_path = isopath, keyboard_variant = self.info.keyboard_variant, keyboard_layout = self.info.keyboard_layout, locale = self.info.locale, accessibility = self.info.accessibility, kernel = unix_path(self.info.kernel), initrd = unix_path(self.info.initrd), rootflags = rootflags, title1 = "Booting the LliureX installation.", title2 = "For more boot options, press `ESC' now...", lliurex_mode_title = "LliureX", normal_mode_title = "LliureX Live", #pae_mode_title = "PAE mode", # safe_graphic_mode_title = "Safe graphic mode", #intel_graphics_workarounds_title = "Intel graphics workarounds", #nvidia_graphics_workarounds_title = "Nvidia graphics workarounds", #acpi_workarounds_title = "ACPI workarounds", #verbose_mode_title = "Verbose mode", #demo_mode_title = "Demo mode", ) else: isopath = "" kernel='' initrd='' dic=dict( lliurex_mode_title = "LliureX", title1 = "Booting the LliureX installation.", title2 = "For more boot options, press `ESC' now...", rootflags = rootflags, custom_installation_dir = unix_path(self.info.custominstall), iso_path = isopath, keyboard_variant = self.info.keyboard_variant, keyboard_layout = self.info.keyboard_layout, locale = self.info.locale, accessibility = self.info.accessibility, ) ## TBD at the moment we are extracting the ISO, not the CD content #~ elif self.info.cd_path: #~ isopath = unix_path(self.info.cd_path) content = template for k,v in dic.items(): k = "$(%s)" % k content = content.replace(k, v) if self.info.run_task == "cd_boot": content = content.replace(" automatic-ubiquity", "") content = content.replace(" iso-scan/filename=", "") grub_config_file = join_path(self.info.install_boot_dir, "grub", "grub.cfg") write_file(grub_config_file, content)
def create_preseed(self): template_file = join_path(self.info.data_dir, "preseed." + self.info.distro.name) if not os.path.exists(template_file): template_file = join_path(self.info.data_dir, "preseed.lupin") template = read_file(template_file) partitioning = "" partitioning += "d-i partman-auto/disk string LIDISK\n" partitioning += "d-i partman-auto/method string loop\n" partitioning += "d-i partman-auto-loop/partition string LIPARTITION\n" partitioning += "d-i partman-auto-loop/recipe string \\\n" disks_dir = unix_path(self.info.disks_dir) + "/" if self.info.root_size_mb: partitioning += ( " %s 3000 %s %s $default_filesystem method{ format } format{ } use_filesystem{ } $default_filesystem{ } mountpoint{ / } . \\\n" % (disks_dir + "root.disk", self.info.root_size_mb, self.info.root_size_mb) ) if self.info.swap_size_mb: partitioning += " %s 100 %s %s linux-swap method{ swap } format{ } . \\\n" % ( disks_dir + "swap.disk", self.info.swap_size_mb, self.info.swap_size_mb, ) if self.info.home_size_mb: partitioning += ( " %s 100 %s %s $default_filesystem method{ format } format{ } use_filesystem{ } $default_filesystem{ } mountpoint{ /home } . \\\n" % (disks_dir + "home.disk", self.info.home_size_mb, self.info.home_size_mb) ) if self.info.usr_size_mb: partitioning += ( " %s 100 %s %s $default_filesystem method{ format } format{ } use_filesystem{ } $default_filesystem{ } mountpoint{ /usr } . \\\n" % (disks_dir + "usr.disk", self.info.usr_size_mb, self.info.usr_size_mb) ) partitioning += "\n" safe_host_username = self.info.host_username.replace(" ", "+") user_directory = self.info.user_directory.replace("\\", "/")[2:] host_os_name = "Windows XP Professional" # TBD password = md5_password(self.info.password) dic = dict( timezone=self.info.timezone, password=password, user_full_name=self.info.user_full_name, distro_packages=self.info.distro.packages, host_username=self.info.host_username, username=self.info.username, partitioning=partitioning, user_directory=user_directory, safe_host_username=safe_host_username, host_os_name=host_os_name, ) content = template for k, v in dic.items(): k = "$(%s)" % k content = content.replace(k, v) preseed_file = join_path(self.info.custominstall, "preseed.cfg") write_file(preseed_file, content)
def apply_plugins_config(config, app_root_dst): nameClassPairs = [] activities = [] permissions = [] dependencies = [] plugins = config_get_plugins(config) for p in plugins: plugin_json = join_path(PLUGINS_DIR, 'src/' + p + '/plugin.json'); with open(plugin_json, 'r') as f: plugin_config = json.load(f); name = plugin_config['name']; if 'android' in plugin_config: android_config = plugin_config['android']; if 'class' in android_config: className = android_config['class'] nameClassPairs.append((name, className)) if 'permissions' in android_config: permissions += android_config['permissions'] if 'activities' in android_config: activities += android_config['activities'] if 'dependencies' in android_config: dependencies += android_config['dependencies'] activities = sorted(set(activities)) permissions = sorted(set(permissions)) dependencies = sorted(set(dependencies)) activities = '\n '.join(activities); permissions = '\n '.join(permissions); dependencies = '\n '.join(dependencies); filename = join_path(app_root_dst, 'app/build.gradle') file_replace(filename, 'EXTRA_DEPENDENCIES', dependencies); filename = join_path(app_root_dst, 'app/src/main/AndroidManifest.xml') file_replace(filename, 'EXTRA_ACTIVITIES', activities); file_replace(filename, 'EXTRA_PERMISSION', permissions); imports = '' registers = '' for iter in nameClassPairs: name,className = iter registers += 'PluginManager.register(\"' registers += name.lower()+'\", new '+className+'(PluginManager.activity, id++));\n'; filename = join_path(app_root_dst, 'app/src/main/java/org/zlgopen/plugins/common/PluginManager.java') file_replace(filename, 'EXTRA_IMPORTS', imports); file_replace(filename, 'EXTRA_REGISTERS', registers);
def show_version_manifest(): num_version = gv.cdn_version() cdn_path = utils.join_path(gv.cdn_path(), project_manifest_name) client_path = utils.join_path(gv.client_path(), project_manifest_name) cdn_config = inout.read_json(cdn_path) client_config = inout.read_json(client_path) L.debug("version manifest:") L.debug("dev.json => %s", num_version) L.debug("client/project.json => %s", client_config['version']) L.debug("cdn/project.json => %s", cdn_config['version']) pass
def modify_grub_configuration(self): template_file = join_path(self.info.data_dir, 'grub.install.cfg') template = read_file(template_file) #if self.info.run_task == "cd_boot": # isopath = "" if self.info.iso_path: isopath = unix_path(self.info.iso_path) if self.info.target_drive.is_fat(): rootflags = "rootflags=sync" else: rootflags = "rootflags=syncio" if self.info.run_task == "cd_boot": title = "StartOS LiveCD" elif self.info.flag: title = "StartOS" else: title = "StartOS LiveCD" if self.info.run_task == "cd_boot": mode = "" elif self.info.flag: mode = "install-automatic" else: mode = "" dic = dict( title1 = "Completing the StartOS installation.", title2 = "For more installation boot options, press `ESC' now...", normal_mode_title = title, kernel = unix_path(self.info.kernel), iso_path = isopath, install_mode = mode, locale = self.info.locale, keyboard_layout = self.info.keyboard_layout, keyboard_variant = self.info.keyboard_variant, rootflags = rootflags, initrd = unix_path(self.info.initrd), ) content = template for k,v in dic.items(): k = "$(%s)" % k log.debug("%s,%s" %(k,v)) content = content.replace(k, v) #if self.info.run_task == "cd_boot": # content = content.replace(" automatic-ubiquity", "") # content = content.replace(" iso-scan/filename=", "") grub_config_file = join_path(self.info.install_boot_dir, "grub", "grub.cfg") log.debug("grub_config_file === %s" %grub_config_file) write_file(grub_config_file, content)
def create_preseed(self): template_file = join_path(self.info.data_dir, 'preseed.' + self.info.distro.name) if not os.path.exists(template_file): template_file = join_path(self.info.data_dir, 'preseed.lupin') template = read_file(template_file) if self.info.distro.packages: distro_packages_skip = '' else: distro_packages_skip = '#' partitioning = "" partitioning += "d-i partman-auto/disk string LIDISK\n" partitioning += "d-i partman-auto/method string loop\n" partitioning += "d-i partman-auto-loop/partition string LIPARTITION\n" partitioning += "d-i partman-auto-loop/recipe string \\\n" disks_dir = unix_path(self.info.disks_dir) + '/' if self.info.root_size_mb: partitioning += ' %s 3000 %s %s $default_filesystem method{ format } format{ } use_filesystem{ } $default_filesystem{ } mountpoint{ / } . \\\n' \ %(disks_dir + 'root.disk', self.info.root_size_mb, self.info.root_size_mb) if self.info.swap_size_mb: partitioning += ' %s 100 %s %s linux-swap method{ swap } format{ } . \\\n' \ %(disks_dir + 'swap.disk', self.info.swap_size_mb, self.info.swap_size_mb) if self.info.home_size_mb: partitioning += ' %s 100 %s %s $default_filesystem method{ format } format{ } use_filesystem{ } $default_filesystem{ } mountpoint{ /home } . \\\n' \ %(disks_dir + 'home.disk', self.info.home_size_mb, self.info.home_size_mb) if self.info.usr_size_mb: partitioning += ' %s 100 %s %s $default_filesystem method{ format } format{ } use_filesystem{ } $default_filesystem{ } mountpoint{ /usr } . \\\n' \ %(disks_dir + 'usr.disk', self.info.usr_size_mb, self.info.usr_size_mb) partitioning += "\n" safe_host_username = self.info.host_username.replace(" ", "+") user_directory = self.info.user_directory.replace("\\", "/")[2:] host_os_name = "Windows XP Professional" #TBD password = md5_password(self.info.password) dic = dict( timezone=self.info.timezone, password=password, user_full_name=self.info.user_full_name, distro_packages_skip=distro_packages_skip, distro_packages=self.info.distro.packages, host_username=self.info.host_username, username=self.info.username, partitioning=partitioning, user_directory=user_directory, safe_host_username=safe_host_username, host_os_name=host_os_name, ) content = template for k, v in dic.items(): k = "$(%s)" % k content = content.replace(k, v) preseed_file = join_path(self.info.custominstall, "preseed.cfg") write_file(preseed_file, content)
def gen_folders(json_result, path, parent): path = abs_path(path) files = os.listdir(path) # L.info("+ dir: %s", path) for name in files: full_path = join_path(path, name) if os.path.isdir(full_path): json_result = gen_folders(json_result, full_path, join_path(parent, name)) elif os.path.isfile(full_path): json_result = gen_files(json_result, parent, name, full_path) return json_result
def create_diskimage_dirs(self, associated_task=None): self.info.disks_dir = join_path(self.info.target_dir, "disks") self.info.disks_boot_dir = join_path(self.info.disks_dir, "boot") dirs = [ self.info.target_dir, self.info.disks_dir, self.info.disks_boot_dir, join_path(self.info.disks_boot_dir, "grub"), ] for d in dirs: if not os.path.isdir(d): log.debug("Creating dir %s" % d) os.mkdir(d)
def apply_features(config, app_root_dst): if is_fullscreen(config): activityPreferTheme = 'android:theme="@android:style/Theme.NoTitleBar.Fullscreen"' appPreferTheme = 'android:theme="@android:style/Theme.NoTitleBar"' filename = join_path(app_root_dst, 'app/src/main/java/org/libsdl/app/SDLActivity.java') file_replace(filename, '//setWindowStyle(false);', "setWindowStyle(true);"); else: activityPreferTheme = 'android:theme="@style/AppTheme"' appPreferTheme = '' filename = join_path(app_root_dst, 'app/src/main/AndroidManifest.xml') file_replace(filename, 'APP_PREFER_THEME', appPreferTheme); file_replace(filename, 'ACTIVITY_PREFER_THEME', activityPreferTheme);
def create_project(config, app_root_src): app_name = config_get_app_name(config) app_full_name = config_get_app_full_name(config) app_root_dst = join_path(BUILD_DIR, app_name) copy_folder(TEMPLATE_DIR, app_root_dst) rename_files_content(app_root_dst, app_full_name, app_name) copy_awtk_files(join_path(app_root_dst, 'src/awtk')) copy_app_sources(config, join_path(app_root_dst, 'src/app'), app_root_src) copy_app_assets(config, join_path(app_root_dst, 'assets/default/raw'), app_root_src) set_cmake_includes(config, join_path(app_root_dst, "awtk_source.mk")) show_result(app_name)
def extract_all_organism_GO(go_file, out_path=cs.JSON_PATH): in_file = utils.join_path(cs.STRING_PATH, go_file) out_file = utils.join_path(out_path, '{}-GO.json'.format(go_file)) if utils.file_exists(out_file, ''): message = ('GO json already exists for {}').format(go_file) utils.print_log(message) return utils.load_json(out_file) else: with open(in_file, 'r') as index: # list of all gene annotations for all proteins go_dict = {} # start reading GO file message = 'Extracting GO information for {}'.format(go_file) utils.print_log(message) line_count = 1 while True: if line_count % cs.GO_REPORT_FREQ == 0: message = 'reached line #{}'.format(line_count) utils.print_log(message, mode='progress') line_count += 1 line = index.readline() if line == '': break line = line.strip() words = line.split('\t') org = words[0] prot = words[1] go = words[3] evidence = words[6] # score = words[7] prot_id = '{}.{}'.format(org, prot) # if int(score) > INTERACTION_THR: if evidence in ['EXP', 'IDA', 'IMP', 'IGI', 'IEP', 'IPI']: go_dict[prot_id] = go_dict.get(prot_id, []) + [go] message = 'Extracting GO for {} finished!'.format(go_file) utils.print_log(message, mode='end_progress') utils.write_json(go_dict, out_file) return go_dict
def __init__(self, network, batch_env, args): logging.debug('PAAC init is started') self.checkpoint_dir = join_path(args.debugging_folder, self.CHECKPOINT_SUBDIR) ensure_dir(self.checkpoint_dir) checkpoint = self._load_latest_checkpoint(self.checkpoint_dir) self.last_saving_step = checkpoint['last_step'] if checkpoint else 0 self.global_step = self.last_saving_step self.network = network self.batch_env = batch_env self.optimizer = optim.RMSprop( self.network.parameters(), lr=args.initial_lr, eps=args.e, ) #RMSprop defualts: momentum=0., centered=False, weight_decay=0 if checkpoint: logging.info('Restoring agent variables from previous run') self.network.load_state_dict(checkpoint['network_state_dict']) self.optimizer.load_state_dict(checkpoint['optimizer_state_dict']) self.lr_scheduler = LinearAnnealingLR(self.optimizer, args.lr_annealing_steps) #pytorch documentation says: #In most cases it’s better to use CUDA_VISIBLE_DEVICES environmental variable #Therefore to specify a particular gpu one should use CUDA_VISIBLE_DEVICES. self.device = self.network._device self.gamma = args.gamma # future rewards discount factor self.entropy_coef = args.entropy_regularisation_strength self.loss_scaling = args.loss_scaling #5. self.critic_coef = args.critic_coef #0.25 self.total_steps = args.max_global_steps self.rollout_steps = args.rollout_steps self.clip_norm = args.clip_norm self.num_emulators = batch_env.num_emulators self.evaluate = None self.reshape_r = lambda r: np.clip(r, -1., 1.) self.compute_returns = n_step_returns if args.clip_norm_type == 'global': self.clip_gradients = nn.utils.clip_grad_norm_ elif args.clip_norm_type == 'local': self.clip_gradients = utils.clip_local_grad_norm elif args.clip_norm_type == 'ignore': self.clip_gradients = lambda params, _: utils.global_grad_norm( params) else: raise ValueError('Norm type({}) is not recoginized'.format( args.clip_norm_type)) logging.debug('Paac init is done') self.curr_learning = True self.starting_length = [[5, 10], [5, 10], [5, 10], [5, 10], [15, 20], [15, 20], [15, 20], [15, 20]] # 1. 5-10; 2. 15-20; 3.40-50; 4.90-100 self.checking_length = [15, 20]
def backup_instance(request, instance_id_list, image_path): for instance_id in instance_id_list: # Create a snapshot of an instance snapshot_name = 'snapshot_of_' + instance_id image_id = nova.snapshot_create(request, instance_id, snapshot_name) # Waiting for creating snapshot image = glance.image_get(request, image_id) while image.status != 'active': time.sleep(5) image = glance.image_get(request, image_id) # Download image data image_data = glance.glanceclient(request).images.data(image.id) image_filename = u.join_path(image_path, instance_id+'.raw') f = io.open(image_filename, 'wb') for chunk in image_data: f.write(chunk) glance.image_delete(request, image.id) # TODO: Transform image format from raw to qcow2 os.system('qemu-img convert -f raw -O qcow2 ' + image_filename + ' ' + image_filename + '.qcow2')
def copy_iso(self, iso_path, associated_task): if not iso_path: return iso_name = os.path.basename(iso_path) dest = join_path(self.info.install_dir, "installation.iso") check_iso = associated_task.add_subtask( self.check_iso, description = _("Checking installation files")) if check_iso(iso_path): if os.path.dirname(iso_path) == dest \ or os.path.dirname(iso_path) == self.info.backup_dir: move_iso = associated_task.add_subtask( shutil.move, description = _("Copying installation files")) log.debug("Moving %s > %s" % (iso_path, dest)) move_iso(iso_path, dest) else: copy_iso = associated_task.add_subtask( copy_file, description = _("Copying installation files")) log.debug("Copying %s > %s" % (iso_path, dest)) copy_iso(iso_path, dest) self.info.cd_path = None self.info.iso_path = dest return True
def _iter_items(cls, repo, common_path = None): if common_path is None: common_path = cls._common_path_default rela_paths = set() # walk loose refs # Currently we do not follow links for root, dirs, files in os.walk(join_path_native(repo.git_dir, common_path)): if 'refs/' not in root: # skip non-refs subfolders refs_id = [ i for i,d in enumerate(dirs) if d == 'refs' ] if refs_id: dirs[0:] = ['refs'] # END prune non-refs folders for f in files: abs_path = to_native_path_linux(join_path(root, f)) rela_paths.add(abs_path.replace(to_native_path_linux(repo.git_dir) + '/', "")) # END for each file in root directory # END for each directory to walk # read packed refs for sha, rela_path in cls._iter_packed_refs(repo): if rela_path.startswith(common_path): rela_paths.add(rela_path) # END relative path matches common path # END packed refs reading # return paths in sorted order for path in sorted(rela_paths): try: yield cls.from_path(repo, path) except ValueError: continue
def gephi_network_aligned_comp(alignment, bio_net, file_path=cs.GEPHI_PATH): file_name = '{}_{}-{}{}_comparative_alignment_{}.gdf'.format( bio_net.org1.org_id, bio_net.org2.org_id, bio_net.similarity_mode, bio_net.status, alignment.method) with open(utils.join_path(file_path, file_name), 'w') as gdf: gdf.write('nodedef>name VARCHAR,label VARCHAR,level INT\n') for pair in alignment.pairs: gdf.write('a{}b{},{}/{},1\n'.format( pair[0], pair[1], bio_net.org1.id_to_node[pair[0]], bio_net.org2.id_to_node[pair[1]], )) for nid in bio_net.org1.id_to_node: gdf.write('a{},{},0\n'.format(nid, bio_net.org1.id_to_node[nid])) for nid in bio_net.org2.id_to_node: gdf.write('b{},{},2\n'.format(nid, bio_net.org2.id_to_node[nid])) gdf.write('edgedef>node1 VARCHAR,node2 VARCHAR,' 'directed BOOLEAN,weight DOUBLE,visible BOOLEAN\n') for edge in alignment.pair_edges: gdf.write('a{}b{},a{}b{},false,1.0,true\n'.format( edge[0][0], edge[0][1], edge[1][0], edge[1][1])) for edge in bio_net.org1.edges: gdf.write('a{},a{},false,1.0,true\n'.format(edge[0], edge[1])) for edge in bio_net.org2.edges: gdf.write('b{},b{},false,1.0,true\n'.format(edge[0], edge[1])) for pair in alignment.pairs: gdf.write('a{}b{},a{},false,1000.0,false\n'.format( pair[0], pair[1], pair[0])) gdf.write('a{}b{},b{},false,1000.0,false\n'.format( pair[0], pair[1], pair[1]))
def calculate_rel_blast_matrix(self): file_name = '{}-{}-{}_scores.npy'.format(self.org1.org_id, self.org2.org_id, 'rel_blast') if utils.file_exists(file_name, path_name=cs.NP_PATH): message = 'using saved relative blast from {}'.format(file_name) utils.print_log(message) self.blast_sim_n_rel = utils.load_np(self.np_file) self.blast_sim = utils.load_np(self.raw_np_file) # self.blast_sim = interface.blast_xml_to_matrix(self) else: # blast similarity measure blast_sim = interface.blast_xml_to_matrix(self) self.blast_sim = blast_sim blast_1 = interface.self_blast_xml_to_vec(self.org1) blast_1[blast_1 == 0] = 1 blast_1 = np.array([blast_1]).repeat(self.org2.node_count, axis=0).T blast_2 = interface.self_blast_xml_to_vec(self.org2) blast_2[blast_2 == 0] = 1 blast_2 = np.array([blast_2]).repeat(self.org1.node_count, axis=0) blast_sim = blast_sim.reshape(self.org1.node_count, self.org2.node_count) blast_sim = blast_sim / np.power((blast_1 * blast_2), 0.5) blast_sim = blast_sim.reshape(self.dim_sim) # normalize blast matrix self.blast_sim_n_rel = utils.normalize(blast_sim) np_file = utils.join_path(cs.NP_PATH, file_name) utils.write_np(self.blast_sim_n_rel, np_file)
def check_cd(self, cd_path, associated_task=None): associated_task.description = _("Checking CD %s") % cd_path if not self.info.distro.is_valid_cd(cd_path, check_arch=False): return False self.set_distro_from_arch(cd_path) #if self.info.skip_md5_check: return True md5sums_file = join_path(cd_path, self.info.distro.md5sums) for rel_path in self.info.distro.get_required_files(): if rel_path == self.info.distro.md5sums: continue check_file = associated_task.add_subtask(self.check_file) file_path = join_path(cd_path, rel_path) if not check_file(file_path, rel_path, md5sums_file): return False return True
def use_cd(self, associated_task): if self.cd_path: extract_iso = associated_task.add_subtask( copy_file, description=_("Extracting files from %s") % self.cd_path) self.info.iso_path = join_path(self.info.install_dir, "installation.iso") try: extract_iso(self.cd_path, self.info.iso_path) except Exception, err: log.error(err) self.info.cd_path = None self.info.iso_path = None return False self.info.cd_path = self.cd_path #This will often fail before release as the CD might not match the latest daily ISO check_iso = associated_task.add_subtask( self.check_iso, description=_("Checking installation files")) if not check_iso(self.info.iso_path): subversion = self.info.cd_distro.get_info(self.info.cd_path)[2] if subversion.lower() in ("alpha", "beta", "release candidate"): log.error( "CD check failed, but ignoring because CD is %s" % subversion) else: self.info.cd_path = None self.info.iso_path = None return False return True
def use_cd(self, associated_task): if self.cd_path: extract_iso = associated_task.add_subtask( copy_file, description = _("Extracting files from %s") % self.cd_path) self.info.iso_path = join_path(self.info.install_dir, "installation.iso") try: extract_iso(self.cd_path, self.info.iso_path) except Exception, err: log.error(err) self.info.cd_path = None self.info.iso_path = None return False self.info.cd_path = self.cd_path #This will often fail before release as the CD might not match the latest daily ISO check_iso = associated_task.add_subtask( self.check_iso, description = _("Checking installation files")) if not check_iso(self.info.iso_path): subversion = self.info.cd_distro.get_info(self.info.cd_path)[2] if subversion.lower() in ("alpha", "beta", "release candidate"): log.error("CD check failed, but ignoring because CD is %s" % subversion) else: self.info.cd_path = None self.info.iso_path = None return False return True
def is_file_exist(path): if not u.is_path_exist(path): return False for name in file_name: if not u.is_path_exist(u.join_path(path, name)): return False return True
def get_uniprot_map(org, in_path=cs.STRING_PATH): file_name = '{}.uniprot.tsv'.format(org) with open(utils.join_path(cs.STRING_PATH, file_name), 'r') as mapping: # uniprot to string id mapping dict uni2sdb = {} while True: line = mapping.readline() line = line.strip() if line == '': break elif line[0] == '#': continue else: words = line.split('\t') org_id = words[0] uniprot = words[1] uniprot_ac, uniprot_id = uniprot.split('|') str_id = words[2] if org != org_id: raise Exception('org id does not match for mapping!') uni2sdb[uniprot_ac] = '{}.{}'.format(org, str_id) return uni2sdb
def data(self): if self._data: return self._data self._data = BaseGame.from_file( join_path(DATA_FOLDER, self._dataset_path)) return self._data
def sim_degree(bio_net, file_path=cs.SVG_PATH): file_name = '{}-{}-sim<{}>-degree.svg'.format(bio_net.org1.org_id, bio_net.org2.org_id, bio_net.similarity_mode) data = {} data["degree geometric average"] = [] data["normal sim score"] = [] for i in range(bio_net.org1.node_count): for j in range(bio_net.org2.node_count): if (bio_net.similarity[bio_net.v_ind(i, j)] > cs.MIN_VIS_CUT): data["degree geometric average"].append( (bio_net.org1.degree[i] * bio_net.org2.degree[j])**0.5) data["normal sim score"].append( bio_net.similarity[bio_net.v_ind(i, j)]) df = pd.DataFrame(data) im = sns.lmplot(x="degree geometric average", y="normal sim score", data=df, scatter_kws={"s": 5}, fit_reg=False) mx = max(bio_net.similarity) im.set(ylim=((-mx * cs.NORM_MARGIN), (mx * (1 + cs.NORM_MARGIN)))) svg_file = utils.join_path(file_path, file_name) sns.plt.savefig(svg_file)
def check_cd(self, cd_path, associated_task=None): associated_task.description = _("Checking CD %s") % cd_path if not self.info.distro.is_valid_cd(cd_path, check_arch=False): return False self.set_distro_from_arch(cd_path) if self.info.skip_md5_check: return True md5sums_file = join_path(cd_path, self.info.distro.md5sums) for rel_path in self.info.distro.get_required_files(): if rel_path == self.info.distro.md5sums: continue check_file = associated_task.add_subtask(self.check_file) file_path = join_path(cd_path, rel_path) if not check_file(file_path, rel_path, md5sums_file): return False return True
def find_any_iso(self): ''' look for USB keys with ISO or pre specified ISO ''' #Use pre-specified ISO if self.info.iso_path and os.path.exists(self.info.iso_path): log.debug("Checking pre-specified ISO %s" % self.info.iso_path) for distro in self.info.distros: if distro.is_valid_iso(self.info.iso_path, self.info.check_arch): self.info.cd_path = None return self.info.iso_path, distro #Search USB devices log.debug("Searching ISOs on USB devices") for path in self.get_usb_search_paths(): path = join_path(path, '*.iso') isos = glob.glob(path) #以修改时间排序 def my_cmp(E1,E2): return -cmp(os.path.getmtime(E1),os.path.getmtime(E2)) isos.sort(my_cmp) #在python2.4之后才支持以下语法 #isos.sort(key=lambda x: os.path.getmtime(x)) for iso in isos: for distro in self.info.distros: if distro.is_valid_iso(iso, self.info.check_arch): return iso, distro return None, None
def find_iso(self, associated_task=None): log.debug("Searching for local ISO") for path in self.get_iso_search_paths(): path = join_path(path, '*.iso') isos = glob.glob(path) for iso in isos: if self.info.distro.is_valid_iso(iso, self.info.check_arch): return iso
def save_images_tag(request, image_ids, path): path = u.join_path(path, "images.json") images = [] for image_id in image_ids: image_info = sahara.image_get(request, image_id).to_dict() images.append(image_info) images_info = {"images": images} u.save_to_json(path, images_info)
def load_config_project(): global project_config if os.path.isfile(os.path.abspath("./dev.json")): project_config = inout.read_json("./dev.json") else: project_config = inout.read_json( utils.join_path(ROOT_DIR, "res/config.json")) print(json.dumps(project_config, indent=4))
def save_flavors_info(request, flavors_ids, path): path = u.join_path(path, "flavors.json") flavors = [] for f_id in flavors_ids: flavor_info = nova.flavor_get(request, f_id).to_dict() flavors.append(flavor_info) flavors_info = {"flavors": flavors} u.save_to_json(path, flavors_info)
def __init__(self, application): self.application = application self.info = application.info #~ if hasattr(sys,'frozen') and sys.frozen: #~ root_dir = dirname(abspath(sys.executable)) #~ else: #~ root_dir = '' #~ self.info.root_dir = abspath(root_dir) self.info.temp_dir = join_path(self.info.root_dir, 'temp') self.info.data_dir = join_path(self.info.root_dir, 'data') self.info.bin_dir = join_path(self.info.root_dir, 'bin') self.info.image_dir = join_path(self.info.data_dir, 'images') self.info.translations_dir = join_path(self.info.root_dir, 'translations') self.info.trusted_keys = join_path(self.info.data_dir, 'trustedkeys.gpg') self.info.application_icon = join_path(self.info.image_dir, self.info.application_name.capitalize() + ".ico") self.info.icon = self.info.application_icon self.info.iso_md5_hashes = {} log.debug('data_dir=%s' % self.info.data_dir) log.debug('bin_dir=%s' % self.info.bin_dir) log.debug('image_dir=%s' % self.info.image_dir) log.debug('application_icon=%s' % self.info.application_icon) if self.info.locale: locale.setlocale(locale.LC_ALL, self.info.locale) log.debug('user defined locale = %s' % self.info.locale) gettext.install(self.info.application_name, localedir=self.info.translations_dir, unicode=True)
def __init__(self, application): self.application = application self.info = application.info #~ if hasattr(sys,'frozen') and sys.frozen: #~ root_dir = dirname(abspath(sys.executable)) #~ else: #~ root_dir = '' #~ self.info.root_dir = abspath(root_dir) self.info.temp_dir = join_path(self.info.root_dir, 'temp') self.info.data_dir = join_path(self.info.root_dir, 'data') self.info.bin_dir = join_path(self.info.root_dir, 'bin') self.info.image_dir = join_path(self.info.data_dir, 'images') self.info.translations_dir = join_path(self.info.root_dir, 'translations') self.info.trusted_keys = join_path(self.info.data_dir, 'trustedkeys.gpg') self.info.application_icon = join_path( self.info.image_dir, self.info.application_name.capitalize() + ".ico") self.info.icon = self.info.application_icon self.info.iso_md5_hashes = {} log.debug('data_dir=%s' % self.info.data_dir) if self.info.locale: locale.setlocale(locale.LC_ALL, self.info.locale) log.debug('user defined locale = %s' % self.info.locale) gettext.install(self.info.application_name, localedir=self.info.translations_dir, unicode=True, names=['ngettext'])
def save_cluster_template_info(request, ct_id, path): ct_info = sahara.cluster_template_get(request, ct_id).to_dict() path = u.join_path(path, "clusterTemplate.json") node_groups_template_ids = [] _node_groups = ct_info['node_groups'] for ng in _node_groups: node_groups_template_ids.append(ng['node_group_template_id']) u.save_to_json(path, ct_info) return node_groups_template_ids
def create_dir_structure(self, associated_task=None): self.info.disks_dir = join_path(self.info.target_dir, "disks") self.info.backup_dir = self.info.target_dir + "-backup" self.info.install_dir = join_path(self.info.target_dir, "install") self.info.install_boot_dir = join_path(self.info.install_dir, "boot") #self.info.disks_boot_dir = join_path(self.info.disks_dir, "boot") dirs = [ self.info.target_dir, self.info.disks_dir, self.info.install_dir, self.info.install_boot_dir, #self.info.disks_boot_dir, #join_path(self.info.disks_boot_dir, "grub"), join_path(self.info.install_boot_dir, "grub"),] for d in dirs: if not os.path.isdir(d): log.debug("Creating dir %s" % d) os.mkdir(d) else: log.info("%s exists, will not be created" % d)
def modify_grub_configuration(self): template_file = join_path(self.info.data_dir, 'grub.install.cfg') template = read_file(template_file) if self.info.run_task == "cd_boot": isopath = "" ## TBD at the moment we are extracting the ISO, not the CD content #~ elif self.info.cd_path: #~ isopath = unix_path(self.info.cd_path) elif self.info.iso_path: isopath = unix_path(self.info.iso_path) if self.info.target_drive.is_fat(): rootflags = "rootflags=sync" else: rootflags = "rootflags=syncio" dic = dict( custom_installation_dir = unix_path(self.info.custominstall), iso_path = isopath, keyboard_variant = self.info.keyboard_variant, keyboard_layout = self.info.keyboard_layout, locale = self.info.locale, accessibility = self.info.accessibility, kernel = unix_path(self.info.kernel), initrd = unix_path(self.info.initrd), rootflags = rootflags, title1 = "Completing the Linux Mint installation.", title2 = "For more installation boot options, press `ESC' now...", normal_mode_title = "Normal mode", safe_graphic_mode_title = "Safe graphic mode", acpi_workarounds_title = "ACPI workarounds", verbose_mode_title = "Verbose mode", demo_mode_title = "Demo mode", ) content = template for k,v in dic.items(): k = "$(%s)" % k content = content.replace(k, v) if self.info.run_task == "cd_boot": content = content.replace(" automatic-ubiquity", "") content = content.replace(" iso-scan/filename=", "") grub_config_file = join_path(self.info.install_boot_dir, "grub", "grub.cfg") write_file(grub_config_file, content)
def create_preseed_diskimage(self): source = join_path(self.info.data_dir, 'preseed.disk') template = read_file(source) password = md5_password(self.info.password) dic = dict( timezone = self.info.timezone, password = password, keyboard_variant = self.info.keyboard_variant, keyboard_layout = self.info.keyboard_layout, locale = self.info.locale, user_full_name = self.info.user_full_name, username = self.info.username) for k,v in dic.items(): k = "$(%s)" % k template = template.replace(k, v) preseed_file = join_path(self.info.install_dir, "preseed.cfg") write_file(preseed_file, template) source = join_path(self.info.data_dir, "wubildr-disk.cfg") target = join_path(self.info.install_dir, "wubildr-disk.cfg") copy_file(source, target)
def download_diskimage(self, diskimage, associated_task=None): proxy = self.info.web_proxy save_as = join_path(self.info.disks_dir, diskimage.split("/")[-1]) if os.path.isfile(save_as): os.unlink(save_as) try: download = associated_task.add_subtask(downloader.download, is_required=False) self.dimage_path = download(diskimage, save_as, web_proxy=proxy) return self.dimage_path is not None except Exception: log.exception("Cannot download disk image file %s:" % diskimage) return False
def extract_kernel(self): bootdir = self.info.install_boot_dir # Extract kernel, initrd, md5sums if self.info.cd_path: log.debug("Copying files from CD %s" % self.info.cd_path) for src in [ join_path(self.info.cd_path, self.info.distro.md5sums), join_path(self.info.cd_path, self.info.distro.kernel), join_path(self.info.cd_path, self.info.distro.initrd),]: shutil.copy(src, bootdir) elif self.info.iso_path: log.debug("Extracting files from ISO %s" % self.info.iso_path) self.extract_file_from_iso(self.info.iso_path, self.info.distro.md5sums, output_dir=bootdir) self.extract_file_from_iso(self.info.iso_path, self.info.distro.kernel, output_dir=bootdir) self.extract_file_from_iso(self.info.iso_path, self.info.distro.initrd, output_dir=bootdir) else: raise Exception("Could not retrieve the required installation files") # Check the files log.debug("Checking kernel, initrd and md5sums") self.info.kernel = join_path(bootdir, os.path.basename(self.info.distro.kernel)) self.info.initrd = join_path(bootdir, os.path.basename(self.info.distro.initrd)) md5sums = join_path(bootdir, os.path.basename(self.info.distro.md5sums)) paths = [ (self.info.kernel, self.info.distro.kernel), (self.info.initrd, self.info.distro.initrd),] for file_path, rel_path in paths: if not self.check_file(file_path, rel_path, md5sums): raise Exception("File %s is corrupted" % file_path)
def find_iso(self, associated_task=None): log.debug("Searching for local ISO") for path in self.get_iso_search_paths(): path = join_path(path, '*.iso') #glob(path)搜索这个path路径下的iso文件 isos = glob.glob(path) #以修改时间排序 def my_cmp(E1,E2): return -cmp(os.path.getmtime(E1),os.path.getmtime(E2)) isos.sort(my_cmp) #在python2.4之后才支持以下语法 #isos.sort(key=lambda x: os.path.getmtime(x)) for iso in isos: if self.info.distro.is_valid_iso(iso, self.info.check_arch): return iso
def check_updates(self, path = None): if path is None: path = utils.join_path("", "data") if os.path.isdir(path): for f in os.listdir(path): self.check_updates(os.path.join(path, f)) else: resname = path.partition("/data/")[2] mtime = os.stat(path).st_mtime if resname not in self.update_times: return if self.update_times[resname] < mtime: logging.notice("Reloading resource '%s'" % resname) self.resources["bitmap"][resname]._reload() self.update_times[resname] = mtime
def download_iso(self, associated_task=None): log.debug("Could not find any ISO or CD, downloading one now") self.info.cd_path = None if not self.info.distro.metalink: get_metalink = associated_task.add_subtask( self.get_metalink, description=_("Downloading information on installation files")) get_metalink() if not self.info.distro.metalink: raise Exception("Cannot download the metalink and therefore the ISO") file = self.info.distro.metalink.files[0] save_as = join_path(self.info.install_dir, file.name) urls = self.select_mirrors(file.urls) for url in urls[:5]: if url.type == 'bittorrent': if self.info.no_bittorrent: continue if os.path.exists(save_as): try: os.unlink(save_as) except OSError: logging.exception('Could not remove: %s' % save_as) btdownload = associated_task.add_subtask( btdownloader.download, is_required = False) iso_path = btdownload(url.url, save_as) else: if os.path.exists(save_as): try: os.unlink(save_as) except OSError: logging.exception('Could not remove: %s' % save_as) download = associated_task.add_subtask( downloader.download, is_required = True) iso_path = download(url.url, save_as, web_proxy=self.info.web_proxy) if iso_path: check_iso = associated_task.add_subtask( self.check_iso, description = _("Checking installation files")) if check_iso(iso_path): self.info.iso_path = iso_path return True else: os.unlink(iso_path)
def find_any_iso(self): """ look for USB keys with ISO or pre specified ISO """ # Use pre-specified ISO if self.info.iso_path and os.path.exists(self.info.iso_path): log.debug("Checking pre-specified ISO %s" % self.info.iso_path) for distro in self.info.distros: if distro.is_valid_iso(self.info.iso_path, self.info.check_arch): self.info.cd_path = None return self.info.iso_path, distro # Search USB devices log.debug("Searching ISOs on USB devices") for path in self.get_usb_search_paths(): path = join_path(path, "*.iso") isos = glob.glob(path) for iso in isos: for distro in self.info.distros: if distro.is_valid_iso(iso, self.info.check_arch): return iso, distro return None, None
def find_any_iso(self): ''' look for local ISOs or pre specified ISO ''' #Use pre-specified ISO if self.info.iso_path \ and os.path.exists(self.info.iso_path): log.debug("Checking pre-specified ISO %s" % self.info.iso_path) for distro in self.info.distros: if distro.is_valid_iso(self.info.iso_path, self.info.check_arch): self.info.cd_path = None return self.info.iso_path, distro #Search local ISOs log.debug("Searching for local ISOs") for path in self.get_iso_search_paths(): path = join_path(path, '*.iso') isos = glob.glob(path) for iso in isos: for distro in self.info.distros: if distro.is_valid_iso(iso, self.info.check_arch): return iso, distro return None, None
def get_template_path(self): template_path = self.application.settings.get("template_path") return join_path(template_path, 'admin')
def __init__(self): curr_path = utils.get_dir_path(__file__) self.help_dir = utils.join_path(curr_path, HELP_TOPICS_DIRNAME)
def get_distros(self): isolist_path = join_path(self.info.data_dir, 'isolist.ini') distros = self.parse_isolist(isolist_path) return distros
def create_preseed_cdboot(self): source = join_path(self.info.data_dir, 'preseed.cdboot') target = join_path(self.info.custominstall, "preseed.cfg") copy_file(source, target)