def versionthis(filetoversion): global options try: if accesscontrollist.hasacl(filetoversion) and not options.ignoreacl: err = "filetoversion has a 'deny' in ACL permissions (ls -lde %s: %s) \n \ This program is currently not clever enough to check if you have permission to move/delete this file. \n \ To avoid this problem remove deny permissions from the access control entries \n \ or rerun this command with --ignoreacl" % (filetoversion, accesscontrollist.getacl(filetoversion)) raise SyncherException(err) # TODO: verify that this file is not already added logging.info("should: check for dups") filetoversionpath, repospathofversionedfile, repospathtoputnewfilein = settings.getFileToVersionPathes(filetoversion) util.makedirs(repospathtoputnewfilein) acl = None if options.ignoreacl: acl = accesscontrollist.removeacl(filetoversion) util.move(filetoversionpath, repospathofversionedfile) # repospathtoputnewfilein) if acl is not None: accesscontrollist.setacl(repospathofversionedfile, acl) util.symlink(repospathofversionedfile, filetoversionpath) syncdb.add(filetoversionpath) except Exception as e: logging.warn("ROLLING BACK because of %s" % e) undo.rollback() raise
def setWorkingDirTractometry(workingDir, sourceBundles=None, sourceMetrics=None): """ Preparation for tractometry script from scilpy scil_run_tractometry :param workingDir: Current working Folder :param sourceDirBundles: Usually 17-tractquerier :param sourceDirMetrics: :return: Nothing """ rawDir = 'raw' if os.path.exists(rawDir): rmtree(rawDir) os.mkdir(rawDir) bundlesDir = os.path.join(rawDir,'bundles') metricsDir = os.path.join(rawDir,'metrics') targetBundlesDir = os.path.join(workingDir, bundlesDir) + os.path.sep targetMetricsDir = os.path.join(workingDir, metricsDir) + os.path.sep if sourceBundles is not None: os.mkdir(bundlesDir) for bundle in sourceBundles: util.symlink(bundle, targetBundlesDir) if sourceMetrics is not None: os.mkdir(metricsDir) if type(sourceMetrics) is list: for metric in sourceMetrics: util.symlink(metric[0], targetMetricsDir, metric[1])
def link(conf, args): '''Link all files in the repo directory to their configured locations.''' # load our machine id so we know which files to link machine_id = config.get_machine_id() # map all file paths to their destination configs for this machine links = {} for path in os.listdir(constants.REPO_DIR): path = util.normalize_to_root(path, constants.REPO_DIR) is_hidden = util.is_hidden(path) is_ignored = path in conf['ignore'] if not is_hidden and not is_ignored: # load the config for the given path file_config = config.get_file_config(path, conf['destination']) # if the file belongs on this machine, store its config if config.machine_matches(machine_id, file_config['machines']): links[path] = file_config # find the longest link basename for pretty output formatting max_src_width = 0 if len(links) > 0: max_src_width = max(len(os.path.basename(k)) for k in links.keys()) # link the files to their destination(s) link_symbol = ' -> ' for src, info in links.iteritems(): msg = os.path.basename(src).rjust(max_src_width) msg += color.grey(link_symbol) for i, dest in enumerate(info['paths']): # the color of the link destination, different when we're creating a new # link, overwriting a link, and overwriting a normal file. dest_color = 'green' if os.path.lexists(dest): dest_color = 'cyan' if not os.path.islink(dest): dest_color = 'yellow' # do the symlink unless we're doing a dry run if not args.test: # overwrite links only by default, everything if forcing overwrite = True if args.force else None util.symlink(dest, src, overwrite=overwrite) # pad the left space if we're not the first item, since items with # multiple destinations are all under the same link name and symbol. if i > 0: msg += os.linesep msg += ' ' * (max_src_width + len(link_symbol)) msg += color.colored(dest, dest_color) print(msg) # return the created links for good measure return links
def save(simulation): log( 'Saving ',simulation.name+'_iter'+str(simulation.pde.time.iteration) ) case_dir = simulation.name+'.case' case_iter_dir = case_dir+'/iter'+str(simulation.pde.time.iteration).zfill(8)+'.case' mkdir(case_dir) mkdir(case_iter_dir) simulation.write_mesh( case_iter_dir+'/mesh.cf3mesh' ) write( simulation, case_iter_dir+'/case.p' ) simulation.solver.children.history.write(URI(case_iter_dir+'/history.tsv')) symlink( 'iter'+str(simulation.pde.time.iteration).zfill(8)+'.case', case_dir+'/current.case' )
def create_links(self): if self.version: util.symlink(os.path.join(self.dir, 'latest'), self.version) self._callback('repo_link_set', 'latest', self.version) if self.stable: util.symlink(os.path.join(self.dir, 'stable'), self.stable) self._callback('repo_link_set', 'stable', self.stable) elif os.path.lexists(os.path.join(self.dir, 'stable')): os.unlink(os.path.join(self.dir, 'stable')) else: if os.path.lexists(os.path.join(self.dir, 'latest')): os.unlink(os.path.join(self.dir, 'latest')) if os.path.lexists(os.path.join(self.dir, 'stable')): os.unlink(os.path.join(self.dir, 'stable'))
def setup_directories(self): if self.local_dir and 'symlink' == self.link_type: if not os.path.islink(self.package_dir) and os.path.isdir(self.package_dir): shutil.rmtree(self.package_dir) util.symlink(self.package_dir, self.local_dir) else: if os.path.islink(self.package_dir): os.unlink(self.package_dir) util.make_dir(self.package_dir) if self.version_dir: if os.path.islink(self.version_package_dir) or os.path.isfile(self.version_package_dir): os.unlink(self.version_package_dir) elif os.path.isdir(self.version_package_dir): shutil.rmtree(self.version_package_dir) if 'symlink' == self.link_type: util.symlink(self.version_package_dir, os.path.relpath(self.package_dir, self.version_dir)) else: # hardlink util.make_dir(self.version_package_dir)
def make_filepath(app, page, overwrite=False): ''' :return: symlink name (used just like normal file) side effect -> creates the symlink, and optionally the file will raise FileExistsError if overwrite is False ''' # the file on disk is given a random name. # This is pointed to by a symlink named as the page address # This is so that pages can have address that point to # any file. symlink_name = (safe_string(page.Bookname) \ + safe_string(page.Program) \ + safe_string(page.HotkeyHumanReadable) \ + safe_string(page.Profile)).replace(' ', '_') dest = os.path.join(app.symlinkdir, symlink_name) fname = _rand_name_in_dir(app.notedir) + '.txt' src = os.path.join(app.notedir, fname) symlink(src, dest) app.uac_bypass(src, create=True, overwrite=overwrite) return symlink_name
def setWorkingDirTractometry(workingDir, sourceBundles=None, sourceMetrics=None): """ Preparation for tractometry script from scilpy scil_run_tractometry :param workingDir: Current working Folder :param sourceDirBundles: Usually 17-tractquerier :param sourceDirMetrics: :return: Nothing """ inputDir = 'input' outputDir = 'output' subjectDir = 'subject' if os.path.exists(inputDir): rmtree(inputDir) if os.path.exists(outputDir): rmtree(outputDir) os.mkdir(inputDir) os.mkdir(outputDir) os.mkdir(os.path.join(inputDir, subjectDir)) bundlesDir = os.path.join(inputDir, subjectDir, 'bundles') metricsDir = os.path.join(inputDir, subjectDir, 'metrics') targetBundlesDir = os.path.join(workingDir, bundlesDir) + os.path.sep targetMetricsDir = os.path.join(workingDir, metricsDir) + os.path.sep if sourceBundles is not None: os.mkdir(bundlesDir) for bundle in sourceBundles: util.symlink(bundle, targetBundlesDir) if sourceMetrics is not None: os.mkdir(metricsDir) if type(sourceMetrics) is list: for metric in sourceMetrics: util.symlink(metric[0], targetMetricsDir, metric[1])
def makesymlinks(repospath): global options reposfilepath = os.path.abspath(repospath) with open(os.path.join(repospath, SYNCHER_DB_FILENAME)) as db: try: for line in db: line = line.strip() if not os.path.islink(line) and accesscontrollist.hasacl(line) and not options.ignoreacl: err = "filetoversion has a 'deny' in ACL permissions (ls -lde %s: %s) \n \ This program is currently not clever enough to check if you have permission to move/delete this file. \n \ To avoid this problem remove deny permissions from the access control entries \n \ or rerun this command with --ignoreacl" % (line, accesscontrollist.getacl(line)) logging.warn(err) elif not os.path.islink(line): acl = None if not options.dry: logging.info("creating symlink from %s to %s", reposfilepath + line, line) if os.path.exists(line): if options.ignoreacl: acl = accesscontrollist.removeacl(line) util.move(line, line + "-beforesyncher") # repospathtoputnewfilein) elif not os.path.exists(os.path.dirname(line)): util.makedirs(os.path.dirname(line)) util.symlink(reposfilepath + line, line) if acl is not None: accesscontrollist.setacl(line, acl) else: # if not os.path.realpath(line) == reposfilepath + line: if not os.path.samefile(os.path.realpath(line), reposfilepath + line): logging.warn("%s is already a symbolic link to %s not %s. it will not be followed and linked properly to repository" % (line, os.path.realpath(line), reposfilepath + line)) except Exception as e: logging.warn("ROLLING BACK because of %s" % e) undo.rollback() logging.warn("ROLLED BACK because of %s" % e) raise
def create_link(app, uncpath, filepath, db=None): ''' point a uncpath to a specifc file this is done by reading the symlink and modifying it TODO if the uncpath doesn't exist it will be created ''' # profile, book, program, specific = parse_address( # filepath, db=db) get_my_link = name_from_address(filepath) update_my_link = name_from_address(uncpath) cur = dbtools.loadItColumn( 'Symlink', table='Pages', condition="where Name == '{}'".format(get_my_link)) # new_link_path = readlink(os.path.join(app.symlinkdir, # cur.fetchone()[0])) name = cur.fetchone()[0] new_link_path = readlink(os.path.join(app.symlinkdir, name)) cur = dbtools.loadItColumn( 'Symlink', table='Pages', condition="where Name == '{}'".format(update_my_link)) old_link_name = cur.fetchone()[0] update_me = os.path.join(app.symlinkdir, old_link_name) print('from ', get_my_link) print('from ', filepath) print('updating {} from {} -> {}'.format(old_link_name, name, new_link_path)) print('the symlink from the other file ', readlink(os.path.join(app.symlinkdir, old_link_name))) os.remove(update_me) symlink(new_link_path, update_me)
def setup_server4(hostname=None, domain=None, pc="1", forge_modules=["puppetlabs/stdlib", "puppetlabs/concat", "puppetlabs/firewall", "puppetlabs/apt"]): """Setup Puppet 4 server""" import package, util, git, service # Local files to copy over basedir = "/etc/puppetlabs" local_master_conf = "files/puppet-master.conf" remote_master_conf = basedir+"/puppet/puppet.conf" local_hiera_yaml = "files/hiera.yaml" remote_hiera_yaml = basedir+"/code/hiera.yaml" local_fileserver_conf = "files/fileserver.conf" remote_fileserver_conf = basedir+"/puppet/fileserver.conf" local_environments = "files/environments" remote_codedir = basedir+"/code" local_gitignore = "files/gitignore" remote_gitignore = basedir+"/.gitignore" modules_dir = basedir+"/code/environments/production/modules" # Verify that all the local files are in place try: open(local_master_conf) open(local_hiera_yaml) except IOError: print "ERROR: some local config files were missing!" sys.exit(1) # Autodetect hostname and domain from env.host, if they're not overridden # with method parameters if not hostname: hostname = util.get_hostname() if not domain: domain = util.get_domain() # Ensure that clock is correct before doing anything else, like creating SSL # certificates. util.set_clock() # Start the install install_puppetlabs_release_package(pc) package.install("puppetserver") util.put_and_chown(local_master_conf, remote_master_conf) util.put_and_chown(local_hiera_yaml, remote_hiera_yaml) util.put_and_chown(local_fileserver_conf, remote_fileserver_conf) util.put_and_chown(local_gitignore, remote_gitignore) util.add_to_path("/opt/puppetlabs/bin") util.set_hostname(hostname + "." + domain) # "facter fqdn" return a silly name on EC2 without this util.add_host_entry("127.0.1.1", hostname, domain) # Copy over template environments util.put_and_chown(local_environments, remote_codedir) # Add modules from Puppet Forge. These should in my experience be limited to # those which provide new types and providers. In particular puppetlabs' # modules which control some daemon (puppetdb, postgresql, mysql) are # extremely complex, very prone to breakage and nasty to debug. for module in forge_modules: add_forge_module(module) # Git setup git.install() git.init(basedir) if not exists(modules_dir): sudo("mkdir "+modules_dir) git.init(modules_dir) git.add_submodules(basedir=modules_dir) git.add_all(basedir) git.commit(basedir, "Initial commit") # Link hieradata and manifests from production to testing. This keeps the # testing environment identical to the production environment. The modules # directory in testing is separate and may (or may not) contain modules that # override or complement those in production. util.symlink(remote_codedir+"/environments/production/hieradata", remote_codedir+"/environments/testing/hieradata") util.symlink(remote_codedir+"/environments/production/manifests", remote_codedir+"/environments/testing/manifests") # Start puppetserver to generate the CA and server certificates/keys service.start("puppetserver") run_agent(noop="False")
#!/usr/bin/env python # Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. """ gn can only run python scripts. This launches a subprocess Node process. The working dir of this program is out/Debug/ (AKA root_build_dir) Before running node, we symlink js/node_modules to out/Debug/node_modules. """ import sys from os import path from util import symlink, root_path, run if not path.exists("node_modules"): target_abs = path.join(root_path, "third_party/node_modules") target_rel = path.relpath(target_abs) symlink(target_rel, "node_modules", True) run(["node"] + sys.argv[1:], quiet=True)
def manage(conf, args): ''' Move a file to the base directory and leave a link pointing to its new location in its place. ''' # bail if the file is already a link if os.path.islink(args.path): raise ValueError('Unable to manage ' + color.cyan(args.path) + " since it's already a link!") # make sure the path is a descendant of the destination directory if not util.is_descendant(args.path, conf['destination']): raise ValueError("Unable to manage files that aren't descendants of " + 'the destination directory (' + color.cyan(conf['destination']) + ')') # mark files that aren't direct descendants of the root as such unrooted = os.path.dirname(args.path) != conf['destination'] # get the path of the file if it will be copied into the repo directory dest_path = os.path.join(constants.REPO_DIR, os.path.basename(args.path)) # rename the file as appropriate to to its original name dest_path, config_file_path = config.configify_file_name(dest_path) # give unrooted files a config file path so they'll go to the correct place if unrooted and config_file_path is None: config_file_path = util.toggle_hidden(dest_path, True) # bail if the file is already managed and we're not overwriting dest_exists = os.path.exists(dest_path) config_exists = (config_file_path is not None and os.path.exists(config_file_path)) if (dest_exists or config_exists) and not args.force: raise ValueError("Can't manage " + color.cyan(args.path) + " since it already appears to be managed (use --force to override)") # create a file config if necessary # replace any existing dest file with a copy of the new one util.rm(dest_path, force=True) util.cp(args.path, dest_path, recursive=True) # replace any existing config file with our new one if config_file_path is not None: util.rm(config_file_path, force=True) # build a config for this file file_config = config.normalize_file_config({ 'paths': [args.path], }, conf['destination']) # create a config file from our config dict with open(config_file_path, 'w') as f: json.dump(file_config, f, indent=2) # create a link to the new location, overwriting the old file util.symlink(args.path, dest_path, overwrite=True) print(color.cyan(args.path), 'copied and linked') # add and commit the file to the repo if --save is specified if args.save: files = [color.cyan(os.path.basename(dest_path))] if config_file_path: files.append(color.cyan(os.path.basename(config_file_path))) files = files.join(' and ') print('Adding', files, 'to the repository...') # move us to the current repo directory so all git commands start there os.chdir(constants.REPO_DIR) # alert the user if we have uncommitted changes (git exits non-0 in this case) if git.diff(exit_code=True, quiet=True, _ok_code=(0, 1)).exit_code != 0: raise ValueError('The repository has uncommitted changes - the ' 'newly-managed file will have to be added to the repo manually.') # add the new files to the staging area git.add(dest_path) if config_file_path is not None: git.add(config_file_path) print('Successfully added', files, 'to the repository') print('Committing changes...') # commit the file to the repository commit_message = 'Manage %s' % os.path.basename(args.path) git.commit(m=commit_message, quiet=True) print('Commit successful!') print('Pushing committed changes...') # pull any changes down from upstream, then push our new addition git.pull(rebase=True, quiet=True) git.push(quiet=True) print('Push successful!')
def setup_server4(hostname=None, domain=None, pc="1", forge_modules=[ "puppetlabs/stdlib", "puppetlabs/concat", "puppetlabs/firewall", "puppetlabs/apt" ]): """Setup Puppet 4 server""" import package, util, git, service # Local files to copy over basedir = "/etc/puppetlabs" local_master_conf = "files/puppet-master.conf" remote_master_conf = basedir + "/puppet/puppet.conf" local_hiera_yaml = "files/hiera.yaml" remote_hiera_yaml = basedir + "/code/hiera.yaml" local_fileserver_conf = "files/fileserver.conf" remote_fileserver_conf = basedir + "/puppet/fileserver.conf" local_environments = "files/environments" remote_codedir = basedir + "/code" local_gitignore = "files/gitignore" remote_gitignore = basedir + "/.gitignore" modules_dir = basedir + "/code/environments/production/modules" # Verify that all the local files are in place try: open(local_master_conf) open(local_hiera_yaml) except IOError: print "ERROR: some local config files were missing!" sys.exit(1) # Autodetect hostname and domain from env.host, if they're not overridden # with method parameters if not hostname: hostname = util.get_hostname() if not domain: domain = util.get_domain() # Ensure that clock is correct before doing anything else, like creating SSL # certificates. util.set_clock() # Start the install install_puppetlabs_release_package(pc) package.install("puppetserver") util.put_and_chown(local_master_conf, remote_master_conf) util.put_and_chown(local_hiera_yaml, remote_hiera_yaml) util.put_and_chown(local_fileserver_conf, remote_fileserver_conf) util.put_and_chown(local_gitignore, remote_gitignore) util.add_to_path("/opt/puppetlabs/bin") util.set_hostname(hostname + "." + domain) # "facter fqdn" return a silly name on EC2 without this util.add_host_entry("127.0.1.1", hostname, domain) # Copy over template environments util.put_and_chown(local_environments, remote_codedir) # Add modules from Puppet Forge. These should in my experience be limited to # those which provide new types and providers. In particular puppetlabs' # modules which control some daemon (puppetdb, postgresql, mysql) are # extremely complex, very prone to breakage and nasty to debug. for module in forge_modules: add_forge_module(module) # Git setup git.install() git.init(basedir) if not exists(modules_dir): sudo("mkdir " + modules_dir) git.init(modules_dir) git.add_submodules(basedir=modules_dir) git.add_all(basedir) git.commit(basedir, "Initial commit") # Link hieradata and manifests from production to testing. This keeps the # testing environment identical to the production environment. The modules # directory in testing is separate and may (or may not) contain modules that # override or complement those in production. util.symlink(remote_codedir + "/environments/production/hieradata", remote_codedir + "/environments/testing/hieradata") util.symlink(remote_codedir + "/environments/production/manifests", remote_codedir + "/environments/testing/manifests") # Start puppetserver to generate the CA and server certificates/keys service.start("puppetserver") run_agent(noop="False")