def _install_apm_and_npm_os_x(atom_app_dir): '''Appends the versions of apm and npm bundled with Atom to the $PATH.''' # https://github.com/atom/apm/issues/312 needs to get fixed so we can pull the version # of Node from Atom.app. Once it is fixed, 'Contents/Resources/app/apm/bin/node' should # appear in this list. # # More importantly, rather than try to find these paths under /Applications/Atom.app, # it would be better to get the realpath of `which apm` and find the paths under there, which # should work in a cross-platform way. (The current solution is too specific to OS X.) paths = [ APM_PATH, NPM_PATH, ] paths = map(lambda path: os.path.join(atom_app_dir, path), paths) dirs = map(os.path.dirname, paths) path_env_dirs = list(set(dirs)) path_elements = os.pathsep.join(path_env_dirs) # Add to the end of the $PATH because /Applications/Atom.app/Contents/Resources/app/apm/bin # includes a version of node that is not >=0.12.0, so we don't want that version to shadow the # "good" version of Node. logging.info('Adding the following to $PATH: %s', path_elements) env = os.environ env['PATH'] = env['PATH'] + os.pathsep + path_elements # `apm` relies on `atom` to run tests (https://fburl.com/66859218), # so we need to install it as well. logging.info('Install atom.sh from Atom.app directory...') atom_sh_path = os.path.join(atom_app_dir, ATOM_SH_PATH) symlink(atom_sh_path, os.path.join(atom_app_dir, 'bin', 'atom')) env['PATH'] += os.pathsep + os.path.join(atom_app_dir, 'bin') # `atom.sh` needs ATOM_PATH to locate Atom.app (https://fburl.com/66863817). env['ATOM_PATH'] = os.path.dirname(atom_app_dir) return env
def install_dependencies(package_config, npm, copy_local_dependencies=False): name = package_config['name'] is_node_package = package_config['isNodePackage'] package_type = 'Node' if is_node_package else 'Atom' logging.info('Installing dependencies for %s package %s...', package_type, name) # Link private node dependencies. src_path = package_config['packageRootAbsolutePath'] node_modules_path = os.path.join(src_path, 'node_modules') fs.mkdirs(node_modules_path) for local_dependency, local_dependency_config in package_config['localDependencies'].items(): src_dir = local_dependency_config['packageRootAbsolutePath'] dest_dir = os.path.join(node_modules_path, local_dependency) if copy_local_dependencies: shutil.copytree(src_dir, dest_dir, True); else: symlink(src_dir, dest_dir, relative=True) link_dependencys_executable(node_modules_path, local_dependency) # Install other public node dependencies. npm.install(src_path, local_packages=package_config['localDependencies'], include_dev_dependencies=True) logging.info('Done installing dependencies for %s', name) is_node_package = package_config.get('isNodePackage') if not is_node_package: logging.info('Running `apm link %s`...', src_path) cmd_args = ['apm', 'link', src_path] fs.cross_platform_check_output(cmd_args) logging.info('Done linking %s', name)
def install(): """Called by install_features() if listed in structure.FEATURES""" # etc_area.require_directory(structure.NGINX_SITES_ENABLED) # etc_area.require_directory(structure.NGINX_SITES_AVAILABLE) # is this needed? fs.symlink(structure.SCRIPT_PATH,'/usr/local/bin/maestro',replace=True) nginx_installed() print 'installing nginx feature', settings.DEVELOPING if settings.DEVELOPING: #TODO link in User Sites directory from os.path import join,exists,expanduser fs.symlink(structure.PROJECT_DIR,join(expanduser("~/Sites"),structure.PROJECT_NAME)) dev_name = join(structure.CONF_DIR,"dev.nginx.conf") if not exists(dev_name): servers_contents = nginx_enabled(cluster_name="dev",release_project_dir=False) with open(dev_name, 'w') as nginx_conf: nginx_conf.writelines(servers_contents) dev_name = join(structure.CONF_DIR,"staged.nginx.conf") if not exists(dev_name): servers_contents = nginx_enabled(cluster_name="staged",release_project_dir=False) with open(dev_name, 'w') as nginx_conf: nginx_conf.writelines(servers_contents) symlink_local_nginx()
def install_dependencies(package_config, npm): name = package_config['name'] is_node_package = package_config['isNodePackage'] package_type = 'Node' if is_node_package else 'Atom' logging.info('Installing dependencies for %s package %s...', package_type, name) # Link private node dependencies. src_path = package_config['packageRootAbsolutePath'] node_modules_path = os.path.join(src_path, 'node_modules') fs.mkdirs(node_modules_path) for local_dependency, local_dependency_config in package_config[ 'localDependencies'].items(): src_dir = local_dependency_config['packageRootAbsolutePath'] dest_dir = os.path.join(node_modules_path, local_dependency) symlink(src_dir, dest_dir, relative=True) link_dependencys_executable(node_modules_path, local_dependency) # Install other public node dependencies. npm.install( src_path, local_packages=package_config['localDependencies'], include_dev_dependencies=package_config['includeDevDependencies']) logging.info('Done installing dependencies for %s', name) is_node_package = package_config.get('isNodePackage') if not is_node_package: logging.info('Running `apm link %s`...', src_path) cmd_args = ['apm', 'link', src_path] fs.cross_platform_check_output(cmd_args) logging.info('Done linking %s', name)
def link_dependencys_executable(node_modules_path, dependency_name): dependency_root = os.path.join(node_modules_path, dependency_name) dependency_config = json_load(os.path.join(dependency_root, 'package.json')) # The bin field would ether be a dict or a string. if it's a dict, # such as `{ "name": "test", "bin" : { "myapp" : "./cli.js" } }`, we should create a # symlink from ./node_modules/test/cli.js to ./node_modules/.bin/myapp. # If it's a string, like `{ "name": "test", "bin" : "./cli.js" }`, then the symlink's name # should be name of the package, in this case, it should be ./node_modules/.bin/test . bin_config = dependency_config.get('bin') if not bin_config: return elif isinstance(bin_config, dict): symlinks_to_create = bin_config else: symlinks_to_create = {dependency_name: bin_config} dot_bin_path = os.path.join(node_modules_path, '.bin') if platform_checker.is_windows(): fs.mkdirs(dot_bin_path) for dst_name, relative_src_path in symlinks_to_create.items(): absolute_dst_path = os.path.join(dot_bin_path, dst_name) absolute_src_path = os.path.join(dependency_root, relative_src_path) if platform_checker.is_windows(): shutil.copyfile(absolute_src_path, absolute_dst_path) else: symlink(absolute_src_path, absolute_dst_path, relative=True)
def _install_apm_and_npm_os_x(atom_app_dir): '''Inserts the versions of apm, npm, and node bundled with Atom to the front of the $PATH.''' paths = [ APM_PATH, NODE_PATH, NPM_PATH, ] paths = map(lambda path: os.path.join(atom_app_dir, path), paths) dirs = map(os.path.dirname, paths) path_env_dirs = list(set(dirs)) path_prefix = os.pathsep.join(path_env_dirs) logging.info('Adding the following to $PATH: %s', path_prefix) env = os.environ env['PATH'] = path_prefix + os.pathsep + env['PATH'] # `apm` relies on `atom` to run tests (https://fburl.com/66859218), # so we need to install it as well. logging.info('Install atom.sh from Atom.app directory...') atom_sh_path = os.path.join(atom_app_dir, ATOM_SH_PATH) symlink(atom_sh_path, os.path.join(atom_app_dir, 'bin', 'atom')) env['PATH'] += os.pathsep + os.path.join(atom_app_dir, 'bin') # `atom.sh` needs ATOM_PATH to locate Atom.app (https://fburl.com/66863817). env['ATOM_PATH'] = os.path.dirname(atom_app_dir) return env
def link_dependencys_executable(node_modules_path, dependency_name): dependency_root = os.path.join(node_modules_path, dependency_name) dependency_config = json_load(os.path.join(dependency_root, "package.json")) # The bin field would ether be a dict or a string. if it's a dict, # such as `{ "name": "test", "bin" : { "myapp" : "./cli.js" } }`, we should create a # symlink from ./node_modules/test/cli.js to ./node_modules/.bin/myapp. # If it's a string, like `{ "name": "test", "bin" : "./cli.js" }`, then the symlink's name # should be name of the package, in this case, it should be ./node_modules/.bin/test . bin_config = dependency_config.get("bin") if not bin_config: return elif isinstance(bin_config, dict): symlinks_to_create = bin_config else: symlinks_to_create = {dependency_name: bin_config} dot_bin_path = os.path.join(node_modules_path, ".bin") if platform_checker.is_windows(): fs.mkdirs(dot_bin_path) for dst_name, relative_src_path in symlinks_to_create.items(): absolute_dst_path = os.path.join(dot_bin_path, dst_name) absolute_src_path = os.path.join(dependency_root, relative_src_path) if platform_checker.is_windows(): shutil.copyfile(absolute_src_path, absolute_dst_path) else: symlink(absolute_src_path, absolute_dst_path, relative=True)
def process_package(pkg, copy_local_dependencies, queue, package_manager, npm_directory): logging.info('OfflineInstaller is installing %s', pkg.name) package_json = pkg.package_json all_deps = package_manager.get_deps(package_json, pkg.include_dev_dependencies, include_local_dependencies=True) if not all_deps: return package_root = os.path.dirname(package_json) node_modules = os.path.join(package_root, 'node_modules') bin_dir = os.path.join(node_modules, '.bin') mkdirs(node_modules) for name, version in all_deps.items(): package_dir = os.path.join(node_modules, name) if package_manager.is_local_dependency(name): if copy_local_dependencies: # A packaging tool may want the option to copy rather than symlink dependencies. shutil.copytree(package_manager.get_local_package_root(name), package_dir) else: # Prefer local symlink if it is an option. symlink(package_manager.get_local_package_root(name), package_dir) # Install the dependency at node_modules/pkg_name. # Note that if there is a compatible version in a parent node_modules, # then you should not install it again in order to save space # (and in some cases, to avoid cycles). elif not has_ancestor_with_dep(name, version, node_modules): # TODO: If the package.json has a preinstall step, it should be run. install_package(name, version, package_dir, npm_directory) # Add the package.json for the dependency to the queue. pkg_to_install = PackageNeedsDepsInstalled( name, os.path.join(package_dir, 'package.json'), include_dev_dependencies=False) queue.appendleft(pkg_to_install) else: # Unclear whether .bin should still get installed in this case. If so, # has_ancestor_with_dep() should be changed to return the path to the ancestor. continue # If the dependency's package.json has bin entries, then they need to be # symlinked to the dependent package's node_modules/.bin directory. package_info = json_load(os.path.join(package_dir, 'package.json')) bin = package_info.get('bin', None) if isinstance(bin, str): bin_command = bin bin = {} bin[package_info['name']] = bin_command if isinstance(bin, dict) and bin: mkdirs(bin_dir) for script_name, local_path in bin.items(): symlink(os.path.join(package_dir, local_path), os.path.join(bin_dir, script_name))
def process_package(pkg, copy_local_dependencies, queue, package_manager, npm_directory): logging.info('OfflineInstaller is installing %s', pkg.name) package_json = pkg.package_json all_deps = package_manager.get_deps(package_json, pkg.include_dev_dependencies, include_local_dependencies=True) if not all_deps: return package_root = os.path.dirname(package_json) node_modules = os.path.join(package_root, 'node_modules') bin_dir = os.path.join(node_modules, '.bin') mkdirs(node_modules) for name, version in all_deps.items(): package_dir = os.path.join(node_modules, name) if package_manager.is_local_dependency(name): if copy_local_dependencies: # A packaging tool may want the option to copy rather than symlink dependencies. shutil.copytree(package_manager.get_local_package_root(name), package_dir) else: # Prefer local symlink if it is an option. symlink(package_manager.get_local_package_root(name), package_dir) # Install the dependency at node_modules/pkg_name. # Note that if there is a compatible version in a parent node_modules, # then you should not install it again in order to save space # (and in some cases, to avoid cycles). elif not has_ancestor_with_dep(name, version, node_modules): # TODO: If the package.json has a preinstall step, it should be run. install_package(name, version, package_dir, npm_directory) # Add the package.json for the dependency to the queue. pkg_to_install = PackageNeedsDepsInstalled(name, os.path.join(package_dir, 'package.json'), include_dev_dependencies=False) queue.appendleft(pkg_to_install) else: # Unclear whether .bin should still get installed in this case. If so, # has_ancestor_with_dep() should be changed to return the path to the ancestor. continue # If the dependency's package.json has bin entries, then they need to be # symlinked to the dependent package's node_modules/.bin directory. package_info = json_load(os.path.join(package_dir, 'package.json')) bin = package_info.get('bin', None) if isinstance(bin, str): bin_command = bin bin = {} bin[package_info['name']] = bin_command if isinstance(bin, dict) and bin: mkdirs(bin_dir) for script_name, local_path in bin.items(): symlink(os.path.join(package_dir, local_path), os.path.join(bin_dir, script_name))
def symlink_local_nginx(cluster_names=('staged','dev')): for cluster_name in cluster_names: cluster = structure.CLUSTERS[cluster_name] enabled_conf_path = join(features["nginx"].NGINX_SITES_ENABLED, cluster['domains'][0]+".conf") conf_path = join(structure.CONF_DIR,"%s.nginx.conf" % cluster_name) fs.symlink(conf_path,enabled_conf_path,replace=True) fastcgi_path = join(features["nginx"].NGINX_ETC_DIR,'fastcgi.conf') geo_path = join(features["nginx"].NGINX_ETC_DIR,'geo.conf') fs.symlink(join(structure.CONF_DIR,'nginx.fastcgi.conf'),fastcgi_path,replace=True) #TODO fs.symlink(join(structure.CONF_DIR,'nginx.geo.conf'),fastcgi_path,replace=True) log_area.require_directory(structure.LOG_DIR) features["nginx"].reload()
def install_dependencies(package_config, npm, copy_local_dependencies=False): name = package_config['name'] is_node_package = package_config['isNodePackage'] package_type = 'Node' if is_node_package else 'Atom' logging.info('Installing dependencies for %s package %s...', package_type, name) # Link private node dependencies. src_path = package_config['packageRootAbsolutePath'] node_modules_path = os.path.join(src_path, 'node_modules') fs.mkdirs(node_modules_path) for local_dependency, local_dependency_config in package_config[ 'localDependencies'].items(): src_dir = local_dependency_config['packageRootAbsolutePath'] dest_dir = os.path.join(node_modules_path, local_dependency) if copy_local_dependencies: shutil.copytree(src_dir, dest_dir, True) else: symlink(src_dir, dest_dir, relative=True) link_dependencys_executable(node_modules_path, local_dependency) # Install other public node dependencies. # # We store the sha sum of package.json under the node_modules directory. If # the sum matches, we skip the call to `npm install`. sum_path = os.path.join(node_modules_path, 'package.json.sum') package_json_path = os.path.join(src_path, 'package.json') package_json_sum = hashlib.sha1( read_file(package_json_path).encode('utf-8')).hexdigest() valid_sum = read_file(sum_path) == package_json_sum if valid_sum: logging.info('Dependencies for %s already installed', name) else: npm.install(src_path, local_packages=package_config['localDependencies'], include_dev_dependencies=True) write_file(sum_path, package_json_sum) logging.info('Done installing dependencies for %s', name) is_node_package = package_config.get('isNodePackage') if not is_node_package: logging.info('Running `apm link %s`...', src_path) cmd_args = ['apm', 'link', src_path] fs.cross_platform_check_output(cmd_args) logging.info('Done linking %s', name)
def install_dependencies(package_config, npm, copy_local_dependencies=False): name = package_config["name"] is_node_package = package_config["isNodePackage"] package_type = "Node" if is_node_package else "Atom" logging.info("Installing dependencies for %s package %s...", package_type, name) # Link private node dependencies. src_path = package_config["packageRootAbsolutePath"] node_modules_path = os.path.join(src_path, "node_modules") fs.mkdirs(node_modules_path) for local_dependency, local_dependency_config in package_config["localDependencies"].items(): src_dir = local_dependency_config["packageRootAbsolutePath"] dest_dir = os.path.join(node_modules_path, local_dependency) if copy_local_dependencies: shutil.copytree(src_dir, dest_dir, True) else: symlink(src_dir, dest_dir, relative=True) link_dependencys_executable(node_modules_path, local_dependency) # Install other public node dependencies. # # We store the sha sum of package.json under the node_modules directory. If # the sum matches, we skip the call to `npm install`. sum_path = os.path.join(node_modules_path, "package.json.sum") package_json_path = os.path.join(src_path, "package.json") package_json_sum = hashlib.sha1(read_file(package_json_path)).hexdigest() valid_sum = read_file(sum_path) == package_json_sum if valid_sum: logging.info("Dependencies for %s already installed", name) else: npm.install(src_path, local_packages=package_config["localDependencies"], include_dev_dependencies=True) write_file(sum_path, package_json_sum) logging.info("Done installing dependencies for %s", name) is_node_package = package_config.get("isNodePackage") if not is_node_package: logging.info("Running `apm link %s`...", src_path) cmd_args = ["apm", "link", src_path] fs.cross_platform_check_output(cmd_args) logging.info("Done linking %s", name)
def install_dependencies(package_config, npm, copy_local_dependencies=False): name = package_config['name'] is_node_package = package_config['isNodePackage'] package_type = 'Node' if is_node_package else 'Atom' logging.info('Installing dependencies for %s package %s...', package_type, name) # Link private node dependencies. src_path = package_config['packageRootAbsolutePath'] node_modules_path = os.path.join(src_path, 'node_modules') fs.mkdirs(node_modules_path) for local_dependency, local_dependency_config in package_config['localDependencies'].items(): src_dir = local_dependency_config['packageRootAbsolutePath'] dest_dir = os.path.join(node_modules_path, local_dependency) if copy_local_dependencies: shutil.copytree(src_dir, dest_dir, True); else: symlink(src_dir, dest_dir, relative=True) link_dependencys_executable(node_modules_path, local_dependency) # Install other public node dependencies. # # We store the sha sum of package.json under the node_modules directory. If # the sum matches, we skip the call to `npm install`. sum_path = os.path.join(node_modules_path, 'package.json.sum') package_json_path = os.path.join(src_path, 'package.json') package_json_sum = hashlib.sha1(read_file(package_json_path)).hexdigest() valid_sum = read_file(sum_path) == package_json_sum if valid_sum: logging.info('Dependencies for %s already installed', name) else: npm.install(src_path, local_packages=package_config['localDependencies'], include_dev_dependencies=True) write_file(sum_path, package_json_sum) logging.info('Done installing dependencies for %s', name) is_node_package = package_config.get('isNodePackage') if not is_node_package: logging.info('Running `apm link %s`...', src_path) cmd_args = ['apm', 'link', src_path] fs.cross_platform_check_output(cmd_args) logging.info('Done linking %s', name)
def handle(self, *modulenames, **options): #TODO make sure memcached couchdb are started try: # is this needed? fs.symlink(structure.SCRIPT_PATH,'/usr/local/bin/maestro',replace=True) nginx_installed() #TODO create <project>/log dir for nginx structure.ensure_target_dirs() structure.machine.uploads_area.require_directory(structure.UPLOADS_DIR) structure.machine.downloads_area.require_directory(structure.DOWNLOADS_DIR) structure.machine.log_area.require_directory(structure.LOG_DIR) structure.machine.pid_area.require_directory(structure.PID_DIR) #sock_dir = "/var/tmp/django" #import fs,os,stat #fs.makedirs(sock_dir) #os.chmod(sock_dir,0777) if settings.DEVELOPING: #TODO link in User Sites directory from os.path import join,exists,expanduser fs.symlink(structure.PROJECT_DIR,join(expanduser("~/Sites"),structure.PROJECT_NAME)) hosts = updated_hosts(change_file=True) dev_name = join(structure.CONF_DIR,"dev.nginx.conf") if not exists(dev_name): servers_contents = nginx_enabled(cluster_name="dev",release_project_dir=False) with open(dev_name, 'w') as nginx_conf: nginx_conf.writelines(servers_contents) dev_name = join(structure.CONF_DIR,"staged.nginx.conf") if not exists(dev_name): servers_contents = nginx_enabled(cluster_name="staged",release_project_dir=False) with open(dev_name, 'w') as nginx_conf: nginx_conf.writelines(servers_contents) symlink_local_nginx() except Warning, e: print self.style.NOTICE(e.message)
def install_dependencies(package_config, npm): name = package_config['name'] is_node_package = package_config['isNodePackage'] package_type = 'Node' if is_node_package else 'Atom' logging.info('Installing dependencies for %s package %s...', package_type, name) # Link private node dependencies. src_path = package_config['packageRootAbsolutePath'] fs.mkdirs(os.path.join(src_path, 'node_modules')) for local_dependency, local_dependency_config in package_config['localDependencies'].items(): src_dir = local_dependency_config['packageRootAbsolutePath'] dest_dir = os.path.join(src_path, 'node_modules', local_dependency) if platform_checker.is_windows(): shutil.rmtree(dest_dir, ignore_errors=True) shutil.copytree(src_dir, dest_dir) else: symlink(src_dir, dest_dir) link_dependencys_executable(src_path, local_dependency) # Install other public node dependencies. npm.install(src_path, local_packages=package_config['localDependencies'], include_dev_dependencies=package_config['includeDevDependencies']) logging.info('Done installing dependencies for %s', name) # Install libclang dependencies, if appropriate. if package_config.get('installLibClang', False): from fb.libclang import install_libclang logging.info('Installing libclang extra dependencies...') install_libclang(src_path) logging.info('Done installing libclang extra dependencies.') is_node_package = package_config.get('isNodePackage') if not is_node_package: logging.info('Running `apm link %s`...', src_path) args = ['apm', 'link', src_path] fs.cross_platform_check_call(args) logging.info('Done linking %s', name)
def handle_local(self,options): from thepian.conf import structure fs.symlink(join(structure.PROJECT_DIR,"thepian"),join(structure.USR_BIN_DIR,"thepian"), replace=False)
def link_dependencies(self): makedirs(os.path.join(structure.TARGET_DIR,"python")) for mod in dependency.MODULE_PATHS: python_target = os.path.join(structure.TARGET_DIR,"python",mod) symlink(dependency.MODULE_PATHS[mod],python_target) print "Linking dependencies", ",".join(dependency.MODULE_PATHS.keys()), "->", os.path.join(structure.TARGET_DIR,"python")