def generate_project(self): # Generate the .kdev4 file. with open( join_paths(self.mesoninfo.builddir, f'{self.projectinfo.descriptive_name}.kdev4'), 'w') as file: file.write('[Project]\n') file.write('CreatedFrom=meson.build\n') file.write('Manager=KDevMesonManager\n') file.write(f'Name={self.projectinfo.descriptive_name}\n') # Make .kdev4/ directory. if not os.path.exists(join_paths(self.mesoninfo.builddir, '.kdev')): os.mkdir(join_paths(self.mesoninfo.builddir, '.kdev')) # Generate the .kdev4/ file. with open( join_paths(self.mesoninfo.builddir, '.kdev', f'{self.projectinfo.descriptive_name}.kdev4'), 'w') as file: file.write('[Buildset]\n') file.write(f'BuildItems=@Variant({self._variant()})\n\n') file.write('[MesonManager]\n') file.write(f'Current Build Directory Index=0\n') file.write(f'Number of Build Directories=1\n\n') file.write('[MesonManager][BuildDir 0]\n') file.write('Additional meson arguments=\n') file.write(f'Build Build Path={self.mesoninfo.builddir}\n') file.write( f'Meson Generator Backend={self.buildoptions.combo("backend").value}\n' ) file.write(f'Meson executable={self.meson.exe}\n')
def handle(self, *args, **options): hero_info_path = join_paths(settings.ASSETS_PATH, 'characters_info.csv') hero_stats_path = join_paths(settings.ASSETS_PATH, 'characters_stats.csv') info_fields = ['id', 'name', 'alignment', 'gender', 'eye_color', 'race', 'hair_color', 'publisher', 'skin_color', 'height', 'weight'] stat_fields = ['name', 'alignment', 'intelligence', 'strength', 'speed', 'durability', 'power', 'combat', 'total'] def update_hero(hero): # TODO: Do checks for dirty field values like empty strings, dashes if 'id' in hero.keys(): hero.pop('id') if 'total' in hero.keys(): hero.pop('total') exists = Hero.objects.filter(name=hero['name']).update(**hero) if exists: return Hero.objects.filter(name=hero['name']).first() return Hero.objects.update_or_create(**hero) try: with open(hero_info_path) as file: heroes_data = csv.DictReader(file, fieldnames=info_fields) heroes = list(map(update_hero, heroes_data)) self.stdout.write(self.style.SUCCESS(f'Successfully created {len(heroes)} heroes')) with open(hero_stats_path) as file: heroes_data = csv.DictReader(file, fieldnames=stat_fields) heroes = list(map(update_hero, heroes_data)) self.stdout.write(self.style.SUCCESS(f'Successfully updated {len(heroes)} heroes')) except (CommandError, TypeError, ValueError, KeyError) as exc: raise exc
def main(input_dir, output_dir): """ The main fucniton of this script. Parameters ---------- input_dir : str The location of the input directory output_dir : str The location of the output directory Returns ------- None """ # bfactors_exp = pd.read_csv(join_paths("data/external", "bfactors.exp.csv")) bfactors_md = import_md_bfactors( join_paths("data/05-analysis", "bfactors.CA.agr")) bfactors_enm = import_enm_bfactors( join_paths("data/external", "mode.bfactors")) # Renumber residues # For the future, use PDB files processed for all-atom MD # when running ENM simulations bfactors = bfactors_enm[[ 'residue_number', 'bfactor_exp', 'bfactor_scaled' ]].copy() bfactors.rename(columns={'bfactor_scaled': 'bfactor_enm'}, inplace=True) bfactors['bfactor_md'] = bfactors_md['bfactor'][ bfactors_md['residue_number'].isin(bfactors['residue_number'])] bfactors.to_csv(join_paths(output_dir, "bfactors.csv"), index=None) return None
def create_input_files(self, weak_scaling=False): input_dir_path = join_paths(self.output_dir_path, 'input_data') os.mkdir(input_dir_path) for n_points_ in self.run_params['n_points']: for img_file in self.run_params['img_files']: for scale in (self.run_params['n_cores'] if weak_scaling else [1]): if weak_scaling: n_points = n_points_ * scale else: n_points = n_points_ print('\nCreating points for {np} points using {img}.'. format(np=n_points, img=img_file)) point_file_name = self.create_input_filename( n_points, img_file, 'dat') point_creator = ImagePointsCreator(n_points, img_file, point_file_name, input_dir_path) point_creator.create_points_pipeline( save_png=self.run_config['save_png_input'], clusters=self.run_params['clusters']) shutil.copy(join_paths(project_path, 'Input', img_file), join_paths(input_dir_path))
def create_docker_file(self): directory = join_paths(self._workspace, "template", "server") makedirs(directory) path = join_paths(directory, "Dockerfile") with open(path, "w") as docker_file: docker_file.write("FROM debian:jessie\n" "mem=XXX")
def main(input_dir, output_dir): """ The main fucniton of this script. Parameters ---------- input_dir : str The location of the input directory output_dir : str The location of the output directory Returns ------- None """ summary_filepaths = glob.glob(join_paths(input_dir, "summary.*")) summary_data = import_summary_data(summary_filepaths) summary_data['rmsd'] = 0 rmsd_path = join_paths("data/05-analysis", "rmsd.CA.agr") rmsd_data = import_rmsd(rmsd_path) rmsd_data.index += 250 # Heating and equilibration time summary_data.loc[rmsd_data.index, 'rmsd'] = rmsd_data['rmsd'].to_numpy() summary_data.to_csv(join_paths(output_dir, "summary_mdout.csv")) return None
def get_file_paths(self): # >>> import glob # >>> glob.glob('./[0-9].*') # ['./1.gif', './2.txt'] # >>> glob.glob('*.gif') # ['1.gif', 'card.gif'] # >>> glob.glob('?.gif') # ['1.gif'] for p in self.paths: only_files = [ join_paths(p, f) for f in listdir(p) if isfile(join_paths(p, f)) ] only_videos = list( filter(lambda f: '.mp4' in f.lower() or '.mov' in f.lower(), only_files)) only_images = list( filter(lambda f: '.jpg' in f.lower() or '.jpeg' in f.lower(), only_files)) only_wavs = list(filter(lambda f: '.wav' in f.lower(), only_files)) self.loaded_wavs += only_wavs # self.loaded_images += only_images # self.loaded_videos += only_videos # self.all_files = self.loaded_images + self.loaded_videos self.all_files = self.loaded_wavs shuffle(self.all_files)
def batch_transform_videos(self, output_path, seconds=5, resize=False, zoom=1.35): for p in self.loaded_videos: file_name = p.split('/')[-1].split('.')[0] self.log('progress', 'working on: {}'.format(file_name)) self.loaded_video = cv.VideoCapture(p) self.total_video_frames = int( self.loaded_video.get(cv.CAP_PROP_FRAME_COUNT)) frame_count = 0 written_frame_count = 0 frame, current_frame = self.frame_queue.get_frame() segment_count = 0 output_name = '{}_{}.mp4'.format(file_name, segment_count) segment_frames = seconds * self.fps if (self.total_video_frames / self.fps) / 60 > 4: segment_frames = 30 * self.fps self.video_writer = cv.VideoWriter( join_paths(output_path, output_name), self.fourcc, self.fps, (frame.shape[1], frame.shape[0]), self.color_video) while frame is not None: if resize and zoom: shape = frame.shape if shape[0] > shape[1]: ratio = self.master_width / shape[1] else: ratio = self.master_height / shape[0] (h, w) = frame.shape[:2] center = (w / 2, h / 2) # rotate the image by 180 degrees frame = cv.resize( frame, (self.master_width * 3, 3 * self.master_height), interpolation=cv.INTER_LINEAR) M = cv.getRotationMatrix2D(center, 0, ratio) upscale = cv.warpAffine(frame, M, (w, h)) (r_h, r_w) = upscale.shape[:2] frame = cv.resize(upscale, (self.master_width, self.master_height), interpolation=cv.INTER_LINEAR) if f_c == 0: print(frame.shape) self.video_writer.write(frame) if current_frame % segment_frames == 0 and current_frame + ( segment_frames / 2) < self.total_video_frames: self.destroy_video_writer() segment_count += 1 output_name = '{}_{}.mp4'.format(file_name, segment_count) self.video_writer = cv.VideoWriter( join_paths(output_path, output_name), self.fourcc, self.fps, (frame.shape[1], frame.shape[0]), self.color_video) frame, current_frame = self.frame_queue.get_frame() self.destroy_video_writer() if self.loaded_video: self.loaded_video.release()
def set_output_dir(self, custom_dir): if custom_dir is not None: if os.path.exists(custom_dir): return join_paths(custom_dir, self.output_dir_name) else: print "Custom dir {} does not exist!".format(custom_dir) else: return join_paths(project_path, 'Output', self.output_dir_name)
def run(cls, programs: set[str], overrides: str) -> None: bindir = join_paths(overrides, "bin") makedirs(bindir, 0o0755, exist_ok=True) for program in programs: if which(program): logging.debug("Create symlink for %s", program) symlink(FIREJAIL, join_paths(bindir, program))
def _copy_orchestration_files(self): count = 0 for any_file in listdir(self._template_folder): path_to_file = join_paths(self._template_folder, any_file) if isfile(path_to_file): count += 1 destination = join_paths(self._output_directory, any_file) copyfile(path_to_file, destination)
def _file_for(self, instance, resource): normalized_path = normpath(resource) parts = normalized_path.split(path_separator) if instance.definition.name in parts: path = relpath(resource, instance.definition.name) return join_paths(self._directory_for(instance), path) else: return join_paths(self._output_directory, resource)
def brute_force_scan(pdb_filepath, output_dir, start_cutoff_radius=5.0): """ Brute-force ENM scan to find an optimal ENM. """ # DDPT flags in the ordr of apperas in GENENMM sourcecode mass_flag = ['', '-mass'] ca_flag = ['-ca'] # Always present het_flag = ['-het'] # Always present lig1_flag = ['', '-lig1'] # Has no effect on apo form res_flag = ['', '-res'] cutoff_radii = np.arange(start_cutoff_radius, 15.5, 0.5) # Combine all flags without cutoff lists flags = [mass_flag, ca_flag, het_flag, lig1_flag, res_flag] # Create tuple with all flag permutaions (non-reapeating) flag_combos = list(itertools.product(*flags)) # Convert list of tuples into list lists flag_combos = list(map(list, flag_combos)) pdb_filename = os.path.splitext(os.path.basename(pdb_filepath))[0] # ANM (with cutoff radius) for cutoff_radius in cutoff_radii: for flag_combo in flag_combos: cutoff_flag = "-c {}".format(cutoff_radius) cutoff_flag_lbl = "-c{:05.2f}".format(cutoff_radius) output_subdir = join_paths(output_dir, cutoff_flag_lbl, \ "".join(flag_combo).replace(" ", ""), pdb_filename) os.makedirs(output_subdir, exist_ok=True) appended_flag_combo = flag_combo.copy() appended_flag_combo.append(cutoff_flag) with open(join_paths(output_subdir, "main.log"), 'w') as log_file: # Usage: run_enm.sh <pdb-filepath> <results-filepath> <GENENMM-flags> subprocess.call(['bash', 'src/simulation/run_enm.sh', pdb_filepath, output_subdir, \ " ".join(appended_flag_combo)], stdout=log_file) # pfENM for flag_combo in flag_combos: pf_flag = "-pf" output_subdir = join_paths(output_dir, pf_flag, \ "".join(flag_combo).replace(" ", ""), pdb_filename) os.makedirs(output_subdir, exist_ok=True) appended_flag_combo = flag_combo.copy() appended_flag_combo.append(pf_flag) with open(join_paths(output_subdir, "main.log"), 'w') as log_file: # Usage: run_enm.sh <pdb-filepath> <results-filepath> <GENENMM-flags> subprocess.call(['bash', 'src/simulation/run_enm.sh', pdb_filepath, output_subdir, \ " ".join(appended_flag_combo)], stdout=log_file) return None
def create_template_file(self, component_name, path, content): resource = join_paths(self._path, self.TEMPLATE_FOLDER, path) if component_name: resource = join_paths(self._path, self.TEMPLATE_FOLDER, component_name, path) folder = dirname(resource) if not isdir(folder): makedirs(folder) with open(resource, "w") as stream: stream.write(content)
def __init__(self, path): self._temporary_directory = mkdtemp(prefix="camp_") workspace = join_paths(self._temporary_directory, "acceptance") self._source = join_paths("samples", path) self._input = InputDirectory(self._copy(self._source, workspace), YAML()) self._output = OutputDirectory(join_paths(self._input.path, "out"), YAML()) self._model = None
def clean_junk_locks(self): for path, dirnames, filenames in walk_on_path(self.path): filenames = filenames or [] for dirname in dirnames: folder_path = join_paths(path, dirname) for filename in get_dir_filenames(folder_path): if not filename.startswith('.'): filenames.append(join_paths(dirname, filename)) for filename in filenames: filename = to_string(filename) if filename.startswith('.'): continue file_path = join_paths(path, filename) if '.' in filename: # Delete inactive positions locks binary = get_file_binary(file_path, mode='r') if binary: info = binary.split() if len(info) >= 2 and info[0] == DOMAIN_NAME and maybe_integer(info[1]): try: getpgid(int(info[1])) except OSError as error: if error.errno is errno.ESRCH: remove_file_quietly( file_path, retries=self.retries, retry_errno=self.retry_errno) else: # Clean locks wait list # Get last modified time, to check if file as been updated in the process modified_time = file_modified_time(file_path) if modified_time: binary = get_file_binary(file_path, mode='r') if binary: # Find alive locks keep_codes = binary.splitlines() for i, line in enumerate(keep_codes): info = line.split() if len(info) >= 2 and info[0] == DOMAIN_NAME and maybe_integer(info[1]): try: getpgid(int(info[1])) except OSError as error: if error.errno is errno.ESRCH: # Add empty line to keep position number keep_codes[i] = '' # Check if file as been updated in the process last_modified_time = file_modified_time(file_path) if last_modified_time and modified_time == last_modified_time: if not any(keep_codes): remove_file_quietly(file_path) else: with open(file_path, 'w') as f: f.write(NEW_LINE.join(keep_codes))
def _create_inner_configuration_file(self): path = join_paths(self._workspace, "template", "server", "src", "config") makedirs(path) resource = join_paths(path, "settings.ini") with open(resource, "w") as config_file: config_file.write("parameter=XYZ")
def parse_run_config(self): """Loads the json run_config file in the output folder into a dictionary.""" config_file_name = self.output_dir_name + '_config.json' if not os.path.isfile(join_paths(self.output_dir_path, config_file_name)): raise IOError('Make sure the run exists. Maybe typo in run name?') with open(join_paths(self.output_dir_path, config_file_name), 'r') as infile: run_config = json.load(infile) return run_config
def _scan(self, group: str) -> any: if group == 'testlog.json': subdir = 'meson-logs' if not Path(join_paths(self._builddir, subdir, group)).exists(): return None else: subdir = 'meson-info' with open(join_paths(self._builddir, subdir, group)) as loaded_json: info: any = json.loads(loaded_json.read()) return info
def generate_output_name(self, name, directory=None): if directory: Path(join_paths(self.output_path, directory)).mkdir(parents=True, exist_ok=True) else: Path(self.output_path).mkdir(parents=True, exist_ok=True) name += '_' + str(self.master_width) + 'x' + str( self.master_height) + '.mp4' # only supporting mp4; update VideoWriter return join_paths(self.output_path, directory, name)
def test_qtcreator_backend(self): # # Setting up tmp test directory source = Path(join_paths('test-cases', 'backends', '03-qtcreator')) build = Path( join_paths('test-cases', 'backends', '03-qtcreator', 'builddir')) # # Running Meson command meson: Meson = Meson(sourcedir=source, builddir=build) meson.setup() api = MesonAPI(sourcedir=source, builddir=build) ide = QtCreatorBackend(api) ide.generator() # # Run asserts to check it is working assert os.path.exists(join_paths(source, 'meson.build')) assert os.path.exists(join_paths(build, 'build.ninja')) assert os.path.exists( join_paths(build, 'meson-info', 'intro-projectinfo.json')) assert os.path.exists(join_paths(build, 'compile_commands.json')) assert os.path.exists(join_paths(build, 'basic.creator')) assert os.path.exists(join_paths(build, 'basic.includes')) assert os.path.exists(join_paths(build, 'basic.files'))
def existing_configurations(self, model): if not isdir(self._path): folder = sub(r"out[\\\/]?$","", self._path) raise NoConfigurationFound(folder) for each_file in listdir(self._path): path = join_paths(self._path, each_file) if search(self.CONFIGURATION_FOLDER, path) \ and isdir(path): yaml_file = join_paths(path, self.YAML_CONFIGURATION) with open(yaml_file, "r") as stream: configuration = self._codec.load_configuration_from(model, stream) yield path, configuration
def _iter_folder(import_name, folder_path): """ Iterates over a folder's import names. Parameters ---------- import_name : `None`, `str` The name of the extension if we would import it. folder_path : `str` Path to the folder Yields ------ import_name : `None`, `str` Detected import names for each applicable file in the folder. path : `str` Path of the file. """ for python_extension_name in PYTHON_EXTENSION_NAMES: file_path = join_paths(folder_path, f'__init__{python_extension_name}') if exists(file_path) and is_file(file_path): yield import_name, file_path return for file_name in list_directory(folder_path): if file_name.startswith('.') or (file_name == '__pycache__'): continue path = join_paths(folder_path, file_name) if is_file(path): for python_extension_name in PYTHON_EXTENSION_NAMES: if file_name.endswith(python_extension_name): if import_name is None: import_name_value = None else: import_name_value = f'{import_name}.{file_name[:-len(python_extension_name)]}' yield import_name_value, path break continue if is_folder(path): if import_name is None: import_name_value = None else: import_name_value = f'{import_name}.{file_name}' yield from _iter_folder(import_name_value, path) continue # no more cases continue
def _create_configuration_2(self): directory = join_paths(self.OUTPUT_DIRECTORY, "config_2") makedirs(directory) path = join_paths(directory, "configuration.yml") with open(path, "w") as configuration_1: configuration_1.write("instances:\n" " server_0:\n" " name: server_0\n" " definition: server\n" " feature_provider: null\n" " services_providers: []\n" " configuration:\n" " memory: 2GB\n")
def clear_unused_files(self): with open(join_paths(self.output_folder, 'out_n_graham_subs.dat'), 'r') as infile: n_graham_subs = infile.readline() self.n_graham_subs = int(float(n_graham_subs)) all_files = glob(join_paths(self.output_folder, 'out_*')) for outfile in all_files: last_letter_before_point = outfile.split('.')[0].split('_')[-1] if self.isinteger(last_letter_before_point): if int(last_letter_before_point) > (self.n_graham_subs - 1): os.remove(outfile)
def save_checkpoint(epoch): model_out_path = "model_epoch_{}.pth".format(epoch) model_out_path = join_paths(config.OUTPUT_DIR, model_out_path) model_latest_path = join_paths(config.OUTPUT_DIR, 'model_epoch_latest.pth') if not exists(config.OUTPUT_DIR): makedirs(config.OUTPUT_DIR) torch.save(model.cpu().state_dict(), model_out_path) if exists(model_latest_path): remove(model_latest_path) link(model_out_path, model_latest_path) print("Checkpoint saved to {}".format(model_out_path)) if device.type != 'cpu': model.cuda()
def get_ipc_path(pipe): ipc = f'discord-ipc-{pipe}' for path in (None, 'snap.discord', 'app/com.discordapp.Discord'): if path is None: full_path = TEMPORARY_DIRECTORY else: full_path = join_paths(TEMPORARY_DIRECTORY) for node_name in list_directory(full_path): if node_name.startswith(ipc): return join_paths(full_path, node_name) return None
def _generate_docker_compose_file(self, configuration): template = join_paths(self._template_folder, "docker-compose.yml") if exists(template): with open(template, "r") as source: content = source.read() for each_instance in configuration.instances: content = sub( r"build:\s*\./" + each_instance.definition.name, "build: ./images/" + each_instance.name, content) destination = join_paths(self._output_directory, "docker-compose.yml") with open(destination, "w") as target: target.write(content)
def from_sample(relative_path): scenario = Scenario() sample_directory = join_paths(SAMPLE_DIRECTORY, relative_path) working_directory = scenario.directory # Copy all the content for item in listdir(sample_directory): source = join_paths(sample_directory, item) destination = join_paths(working_directory, item) if isdir(source): copytree(source, destination) else: copy2(source, destination) return scenario
def evaluate_save_show(self, save, show, file_name): if save: assert file_name is not None # create post processing folder post_proc_dir = join_paths(project_path, 'Output', self.run_config['run_name'], 'post_processing') if not os.path.isdir(post_proc_dir): os.mkdir(post_proc_dir) plt.savefig(join_paths(post_proc_dir, file_name + '.png')) if show: plt.show()
def set_run_config_file(self, custom_config): print custom_config if custom_config is not 'run_config.py': custom_config = join_paths(project_path, custom_config) print custom_config if not os.path.isfile(custom_config): print "Config file {} does not exist!".format(custom_config) exit(1) print "Choosing config: {}".format(custom_config) return join_paths(custom_config) else: print "Choosing config: {} {}".format( os.path.dirname(self.this_path), 'run_config.py') return join_paths(os.path.dirname(self.this_path), 'run_config.py')
def register_shop(package, name): base_path = '../%s/%s' % (package, name) # Register shop shops.append(name) # Import project exec('import %s.%s' % (package, name)) # Get config config_path = get_abspath('%s/setup.conf' % base_path) config = ro_database.get_handler(config_path, ITWSHOPConfig) # Register skin register_shop_skin(u'Skin %s' % name, package, '%s/ui/' % name, name, config) # Register domain for i18n register_domain(name, get_abspath('%s/locale' % base_path)) # Register modules project_modules = [] modules_path = get_abspath(join_paths(base_path, 'modules')) if exists(modules_path) or name == 'ecox': project_modules = [f for f in listdir(modules_path) if isdir(get_abspath('%s/%s' % (modules_path, f)))] for m in project_modules: exec('import %s.%s.modules.%s' % (package, name, m)) # Print print 'Name: ', name print 'URL: ', config.get_value('url') print 'Modules: [%s], %s' % (len(project_modules), project_modules)
def lookup(self): """ Look for backup folders on PATH """ folders = listdir(self.path) for dirname in folders: path = join_paths(self.path, dirname) if isdir(path): backup = Backup(path) self.backups[backup.id] = backup
def __init__(self, *args, **kwargs): super(BaseMailerSessionManager, self).__init__(*args, **kwargs) pyramid_mailer = lazy_import_module('pyramid_mailer') self.mailer = pyramid_mailer.mailer_factory_from_settings(self.settings, prefix='') pyramid_mailer_message = lazy_import_module('pyramid_mailer.message') self.message_cls = pyramid_mailer_message.Message self.attachment_cls = pyramid_mailer_message.Attachment if self.settings.get('queue_path'): make_dir(self.settings['queue_path']) make_dir(join_paths(self.settings['queue_path'], 'cur')) make_dir(join_paths(self.settings['queue_path'], 'tmp')) make_dir(join_paths(self.settings['queue_path'], 'new')) sendmail_queue = lazy_import_module('repoze.sendmail.queue') self.queue_processor = sendmail_queue.QueueProcessor self.transaction = lazy_import_module('transaction') self.__dict__.setdefault('__middlewares__', []).append(RepozeTMMiddleware)
def __call__(self, header) : """ Returns the transformation file corresponding to the given header. """ fits, packed=self.__db.red_frame(header['STID'], header['NIGHT'], header['FNUM'], header['CMPOS'], add_packed=True) if packed : fits=splitext(fits)[0] trans=(splitext(basename(fits))[0]+ self.__db.extensions['astrom']['trans']) return join_paths(self.__astrom_template.substitute(header), trans)
def mailer_queue_send(self): queue_path = self.settings.get('queue_path') if queue_path: subdir_new = join_paths(queue_path, 'new') subdir_cur = join_paths(queue_path, 'cur') while True: for f in get_dir_filenames(subdir_new): if not f.startswith('.'): break else: for f in get_dir_filenames(subdir_cur): if not f.startswith('.'): break else: break # Break while qp = self.api_session_manager.queue_processor( self.api_session_manager.mailer.smtp_mailer, self.settings['queue_path']) qp.send_messages()
def __init__( self ): project_path = get_path( get_path(get_realpath(__main__.__file__)) ) asset_path = join_paths( project_path, "assets" ) page_handlers = [ ( r"/", VizHandler ), ] app_settings = { # URL Settings # "project_path" : project_path, "asset_path" : asset_path, "static_path" : join_paths( asset_path, "static" ), "template_path" : join_paths( asset_path, "template" ), # Module/Render Settings # "ui_modules" : { "SourceAuth" : SourceAuthModule, }, # Miscellaneous Settings # "debug" : True, } tornado.web.Application.__init__( self, page_handlers, **app_settings )
def get_file(self, filename, handler=False): """ Returns given file path - handler (bool) - Returns handler instead of path """ result = None if self.exists(filename): file_path = join_paths(self.path, filename) if handler: result = open(file_path, 'rb') else: result = file_path return result
def __init__(self, *args, **kwargs): super(BaseJobsManager, self).__init__(*args, **kwargs) self.save_reports = asbool(self.settings.get('save_reports', True)) self.server_domain_name = self.settings.get('server_domain_name') self.active = bool( not self.server_domain_name or self.server_domain_name == DOMAIN_NAME) self.domain_names = set(self.settings.get('domain_names', '')) self.domain_names.add(DOMAIN_NAME) try: self.transaction = lazy_import_module('transaction') except ImportError: self.transaction = None if self.active: temporary_dir = gettempdir() domain_start_file_path = join_paths(temporary_dir, 'jobs domain %s started' % DOMAIN_NAME) lock_key = 'jobs monitor start check' self.config.cache.lock(lock_key, timeout=10) try: start_thread = not isfile(domain_start_file_path) if not start_thread: try: with open(domain_start_file_path, 'r') as f: process_id = int(f.read()) except (IOError, ValueError): start_thread = True else: try: getpgid(process_id) except OSError as error: if error.errno is errno.ESRCH: start_thread = True else: raise if start_thread: with open(domain_start_file_path, 'w') as f: f.write(str(PROCESS_ID)) finally: self.config.cache.unlock(lock_key) # Start only one Thread for each domain if start_thread: start_system_thread('jobs_monitor', self.run_monitor) print('Running jobs monitor on PID %s' % PROCESS_ID)
def delete_file_paths(self, *ids): if not ids: return False ids = maybe_set(ids) # Get existing files blocks blocks_ids = set( f.file_id_block for f in ( self.session .query(FileBlock.file_id_block) .filter(FileBlock.file_id_path.in_(ids)) .all())) # Check if we can delete some file block relations delete_block_ids = blocks_ids.difference( f.file_id_block for f in ( self.session .query(FileBlock.file_id_block) .filter(FileBlock.file_id_block.in_(blocks_ids)) .filter(FileBlock.file_id_path.notin_(ids)) .all())) delete_paths = None if delete_block_ids: # Get paths to delete delete_paths = set( b.path for b in ( self.session .query(BlockPath.path) .filter(BlockPath.id.in_(delete_block_ids)) .all())) # Delete blocks relations self.direct_delete(FileBlock, FileBlock.file_id_path.in_(ids)) # Delete files paths from DB self.direct_delete(FilePath, FilePath.id.in_(ids)) if delete_block_ids: # Delete blocks paths from DB self.direct_delete(BlockPath, BlockPath.id.in_(delete_block_ids)) # Delete blocks paths from storage for path in delete_paths: remove_file_quietly(join_paths(self.storage_path, path)) return True
def __init__(self, *args, **kwargs): super(BasePolicySessionManager, self).__init__(*args, **kwargs) if 'token.session_reference_path' not in self.settings: self.settings['token.session_reference_path'] = join_paths( self.settings['token.path'], 'reference') # Jobs settings authorization = self.settings.get('authorization_session') if authorization: self.authorization_session = get_object_on_path(authorization) else: self.authorization_session = AuthenticatedSession
def unlock(self, name): name_256 = make_sha256(name) pattern_name = name_256 + '.' folder_path = join_paths(self.path, name_256[0]) # Lookup for locked positions files = [] for filename in get_dir_filenames(folder_path): if filename.startswith(pattern_name): position = int(filename.split('.', 1)[1]) files.append((position, filename)) if files: files.sort() for position, filename in files: file_path = join_paths(folder_path, filename) if remove_file( file_path, retries=self.retries, retry_errno=self.retry_errno): return True # If no position found, delete base lock return remove_file_quietly(self.get_file_path(name), retries=self.retries, retry_errno=self.retry_errno)
def get_info(self): """ Get all the basic info for the backup """ self.id = basename(self.path) # Check all files for filename in listdir(self.path): if isfile(join_paths(self.path, filename)): self.files.append(filename) # Check handlers for match in self._file_handlers.keys(): if match in filename: handler = getattr(self, self._file_handlers[match]) handler(filename)
def init(schema, endpoint, root, config_filename): if isfile(config_filename): click.confirm(f'{config_filename} already exists. Are you sure you want to continue?', abort=True) if endpoint == 'same as schema': endpoint = schema config = Config( schema=schema, endpoint=endpoint, documents=join_paths(root, '**/*.graphql') ) config.save(config_filename) click.echo(f"Config file generated at {click.style(config_filename, fg='bright_white')}\n\n")
def mk_dir_recursive(dir_path): """ Create a directory and, if needed, all the directory tree. Differently from os.mkdir, this function does not raise exception when the directory already exists. Args: dir_path (str): the path of the directory to create. """ if os.path.isdir(dir_path): return h, t = os.path.split(dir_path) # head/tail if not os.path.isdir(h): mk_dir_recursive(h) new_path = join_paths(h, t) if not os.path.isdir(new_path): os.mkdir(new_path)
def read(self, size=-1): if size == 0: return b'' try: open_block = self.blocks[self.block_position] except IndexError: return b'' if isinstance(open_block, str): open_block = self.blocks[self.block_position] = get_open_file(join_paths(self.storage_path, open_block)) binary = open_block.read(size) if size > 0: size -= len(binary) if size <= 0: return binary self.block_position += 1 binary += self.read(size) return binary
def __init__( self, path, expire=None, retry_errno=None, retries=3, **lock_settings): self.expire = maybe_integer(expire) self.path = make_dir(path) self.retries = maybe_integer(retries) or 3 self.retry_errno = maybe_set(retry_errno) self.retry_errno.update(DEFAULT_RETRY_ERRNO) # Lock settings settings = {} for key, value in list(lock_settings.items()): if key.startswith('lock_'): settings[key.split('lock_', 1)[1]] = value lock_path = settings.pop('path', None) or join_paths(self.path, 'locks') self.lockme = LockMe(lock_path, **settings)
def create_file_path(self, file_date=None): file_date = maybe_date(file_date or TODAY_DATE()) base_folder_path = file_date.strftime('%Y%m/%d') last_folder = 0 full_base_folder_path = join_paths(self.storage_path, base_folder_path) folders = sorted(int(i) for i in get_dir_filenames(full_base_folder_path) if i.isdigit()) if folders: last_folder = folders[-1] folder_path = join_paths(base_folder_path, last_folder) full_folder_path = join_paths(self.storage_path, folder_path) if len(get_dir_filenames(full_folder_path)) >= self.max_blocks_per_folder: folder_path = join_paths(base_folder_path, last_folder + 1) while True: filename = make_unique_hash(length=80) path = join_paths(folder_path, filename) full_path = join_paths(self.storage_path, path) if not isfile(full_path): return full_path, path
from os import getcwd from os.path import join as join_paths # Main ROOT_PATH = getcwd() # Backups folder name BACKUPS_PATH = join_paths(ROOT_PATH, '_backups') # Backup default settings BACKUP_DEFAULT_SETTINGS = { "cache.enabled": False, "cache": {} }
intersphinx_mapping = {} # -- Options for extensions ---------------------------------------------------- # Enable this if you want TODOs to show up in the generated documentation. todo_include_todos = True # # Monkeypatch pygments so it will know about the Swift lexers # # Pull in the Swift lexers from os.path import dirname, abspath, join as join_paths sys.path = [ join_paths(dirname(dirname(abspath(__file__))), 'utils', 'pygments') ] + sys.path import swift as swift_pygments_lexers sys.path.pop(0) # Monkeypatch pygments.lexers.get_lexer_by_name to return our lexers from pygments.lexers import get_lexer_by_name as original_get_lexer_by_name def swift_get_lexer_by_name(_alias, *args, **kw): if _alias == 'swift': return swift_pygments_lexers.SwiftLexer() elif _alias == 'swift-console': return swift_pygments_lexers.SwiftConsoleLexer() else:
from ines import lazy_import_module, OPEN_BLOCK_SIZE, TODAY_DATE from ines.api.database.sql import ( BaseSQLSession, BaseSQLSessionManager, new_lightweight_named_tuple, sql_declarative_base) from ines.api.jobs import job from ines.convert import maybe_date, maybe_integer, maybe_set, maybe_string, to_bytes, to_string from ines.exceptions import Error from ines.i18n import _ from ines.mimetype import find_mimetype from ines.url import get_url_file, open_json_url from ines.utils import ( file_unique_code, get_dir_filenames, get_open_file, make_unique_hash, make_dir, put_binary_on_file, remove_file_quietly, string_unique_code) FilesDeclarative = sql_declarative_base('ines.storage') FILES_TEMPORARY_DIR = join_paths(gettempdir(), 'ines-tmp-files') class BaseStorageSessionManager(BaseSQLSessionManager): __api_name__ = 'storage' __database_name__ = 'ines.storage' def __init__(self, *args, **kwargs): super(BaseStorageSessionManager, self).__init__(*args, **kwargs) make_dir(self.settings['folder_path']) if issubclass(self.session, BaseStorageWithImageSession): self.image_cls = lazy_import_module('PIL.Image') self.resize_quality = self.image_cls.ANTIALIAS self.resizes = {}
def get_file_path(self, name): name_256 = make_sha256(name) return join_paths(self.path, name_256[0], name_256)
def __init__( self ): project_path = get_path( get_path(get_realpath(__main__.__file__)) ) asset_path = join_paths( project_path, "assets" ) page_handlers = [ # User Login/Registration Handlers # ( r"/", LoginHandler ), ( r"/register", RegistrationHandler ), ( r"/verify/(.*)", VerifyHandler), # User Information Handlers # ( r"/main", UserMainHandler ), ( r"/profile", UserProfileHandler ), ( r"/group/([0-9]+)", UserGroupHandler ), # Asynchronous Request Handlers # ( r"/update-user-name", UpdateNameHandler ), ( r"/update-user-email", UpdateEmailHandler ), ( r"/add-member", AddMemberHandler ), ( r"/add-subgroup", AddSubgroupHandler), ( r"/add-deadline", AddDeadlineHandler), ( r"/add-course", AddCourseHandler), ( r"/schedule", ScheduleHandler), ( r"/get-courses", GetCoursesHandler), ( r"/get-deadlines", GetDeadlinesHandler), ( r"/get-users", GetUsersHandler), ( r"/delete-deadline", DeleteDeadlineHandler), ( r"/update-deadline-notes", EditMetadataNotesHandler), ( r"/update-deadline-name", EditMetadataNameHandler), ( r"/update-deadline-time", EditMetadataTimeHandler), #@TODO(halstea2) # ( r"/delete-deadline", DeleteMetadataHandler), ( r"/send-email", SendEmailHandler), # @TODO(halstea2) Remove test async handler ( r"/async-request", TestHandler ), # Google authentication handlers # ( r"/google-auth-request", GoogleAuthHandler), ( r"/oauth2callback", GoogleResponseHandler), # Miscellaneous Handlers # ( r"/logout", LogoutHandler ), ( r"/group-leave/([0-9]+)", GroupLeaveHandler ), ] app_settings = { # URL Settings # "project_path" : project_path, "asset_path" : asset_path, "static_path" : join_paths( asset_path, "static" ), "template_path" : join_paths( asset_path, "template" ), # Security Settings # "cookie_secret" : "datbigcuke", "login_url" : "/", # Module/Render Settings # "ui_modules" : { "RenderTemplate" : RenderTemplateModule, "SimpleModal" : SimpleModalModule, }, # Miscellaneous Settings # "debug" : True, } tornado.web.Application.__init__( self, page_handlers, **app_settings )
def init_person_groups(self): """Populate dicts with a person's group information.""" timer = make_timer(self.logger, 'Processing person groups...') self.person2group = pickle.load(file( join_paths(ldapconf(None, 'dump_dir'), "personid2group.pickle"))) timer("...person groups done.")
def init_person_course(self): """Populate dicts with a person's course information.""" timer = make_timer(self.logger, 'Processing person courses...') self.ownerid2urnlist = pickle.load(file( join_paths(ldapconf(None, 'dump_dir'), "ownerid2urnlist.pickle"))) timer("...person courses done.")
def __init__(self, *args, **kwargs): super(SaveMeWithReference, self).__init__(*args, **kwargs) self.reference_path = make_dir(join_paths(self.path, 'references'))
def get_reference_path(self, name): first_name = name.split(' ', 1)[0] first_name_256 = make_sha256(first_name) return join_paths(self.reference_path, first_name_256[0], first_name_256)
def create_temporary_file(mode='wb'): temporary_path = join_paths(FILES_TEMPORARY_DIR, make_unique_hash(64)) open_file = get_open_file(temporary_path, mode=mode) return temporary_path, open_file
from os.path import make_dirs, join as join_paths, isfile as is_file, exists as path_exists import sys from shutil import copy2 as clone_file def file_exists(p): return is_file(p) and path_exists(p) input_path = sys.argv[1] output_path = sys.argv[2] nim_binary = "nim" + (".exe" if sys.platform == "win32" else "") nimrod_binary = "nimrod" + (".exe" if sys.platform == "win32" else "") nim_input_path = join_paths(input_path, nim_binary) nim_output_path = join_paths(output_path, nim_binary) nimrod_input_path = join_paths(input_path, nimrod_binary) nimrod_output_path = join_paths(output_path, nimrod_binary) print("Working Variables:") print("\t input_path: {0}".format(input_path)) print("\t output_path: {0}".format(output_path)) print("\t nim_binary: {0}".format(nim_binary)) print("\t nimrod_binary: {0}".format(nimrod_binary)) print("\t nim_input_path: {0}".format(nim_input_path)) print("\t nim_output_path: {0}".format(nim_output_path)) print("\t nimrod_input_path: {0}".format(nimrod_input_path)) print("\t nimrod_output_path: {0}".format(nimrod_output_path)) print("")
def render( self, template_name, **kwargs ): template_url = join_paths( "html", "modules", template_name + ".html" ) return self.render_string( template_url, **kwargs )