def link_dependencys_executable(node_modules_path, dependency_name):
    dependency_root = os.path.join(node_modules_path, dependency_name)
    dependency_config = json_load(os.path.join(dependency_root, "package.json"))

    # The bin field would ether be a dict or a string. if it's a dict,
    # such as `{ "name": "test", "bin" : { "myapp" : "./cli.js" } }`, we should create a
    # symlink from ./node_modules/test/cli.js to ./node_modules/.bin/myapp.
    # If it's a string, like `{ "name": "test", "bin" : "./cli.js"  }`, then the symlink's name
    # should be name of the package, in this case, it should be ./node_modules/.bin/test .
    bin_config = dependency_config.get("bin")
    if not bin_config:
        return
    elif isinstance(bin_config, dict):
        symlinks_to_create = bin_config
    else:
        symlinks_to_create = {dependency_name: bin_config}

    dot_bin_path = os.path.join(node_modules_path, ".bin")
    if platform_checker.is_windows():
        fs.mkdirs(dot_bin_path)

    for dst_name, relative_src_path in symlinks_to_create.items():
        absolute_dst_path = os.path.join(dot_bin_path, dst_name)
        absolute_src_path = os.path.join(dependency_root, relative_src_path)

        if platform_checker.is_windows():
            shutil.copyfile(absolute_src_path, absolute_dst_path)
        else:
            symlink(absolute_src_path, absolute_dst_path, relative=True)
Exemplo n.º 2
0
def link_dependencys_executable(node_modules_path, dependency_name):
    dependency_root = os.path.join(node_modules_path, dependency_name)
    dependency_config = json_load(os.path.join(dependency_root,
                                               'package.json'))

    # The bin field would ether be a dict or a string. if it's a dict,
    # such as `{ "name": "test", "bin" : { "myapp" : "./cli.js" } }`, we should create a
    # symlink from ./node_modules/test/cli.js to ./node_modules/.bin/myapp.
    # If it's a string, like `{ "name": "test", "bin" : "./cli.js"  }`, then the symlink's name
    # should be name of the package, in this case, it should be ./node_modules/.bin/test .
    bin_config = dependency_config.get('bin')
    if not bin_config:
        return
    elif isinstance(bin_config, dict):
        symlinks_to_create = bin_config
    else:
        symlinks_to_create = {dependency_name: bin_config}

    dot_bin_path = os.path.join(node_modules_path, '.bin')
    if platform_checker.is_windows():
        fs.mkdirs(dot_bin_path)

    for dst_name, relative_src_path in symlinks_to_create.items():
        absolute_dst_path = os.path.join(dot_bin_path, dst_name)
        absolute_src_path = os.path.join(dependency_root, relative_src_path)

        if platform_checker.is_windows():
            shutil.copyfile(absolute_src_path, absolute_dst_path)
        else:
            symlink(absolute_src_path, absolute_dst_path, relative=True)
Exemplo n.º 3
0
def install_dependencies(package_config, npm):
    name = package_config['name']
    is_node_package = package_config['isNodePackage']
    package_type = 'Node' if is_node_package else 'Atom'
    logging.info('Installing dependencies for %s package %s...', package_type,
                 name)

    # Link private node dependencies.
    src_path = package_config['packageRootAbsolutePath']
    node_modules_path = os.path.join(src_path, 'node_modules')
    fs.mkdirs(node_modules_path)
    for local_dependency, local_dependency_config in package_config[
            'localDependencies'].items():
        src_dir = local_dependency_config['packageRootAbsolutePath']
        dest_dir = os.path.join(node_modules_path, local_dependency)
        symlink(src_dir, dest_dir, relative=True)
        link_dependencys_executable(node_modules_path, local_dependency)

    # Install other public node dependencies.
    npm.install(
        src_path,
        local_packages=package_config['localDependencies'],
        include_dev_dependencies=package_config['includeDevDependencies'])
    logging.info('Done installing dependencies for %s', name)

    is_node_package = package_config.get('isNodePackage')
    if not is_node_package:
        logging.info('Running `apm link %s`...', src_path)
        cmd_args = ['apm', 'link', src_path]
        fs.cross_platform_check_output(cmd_args)
        logging.info('Done linking %s', name)
Exemplo n.º 4
0
def install_dependencies(package_config, npm, copy_local_dependencies=False):
    name = package_config['name']
    is_node_package = package_config['isNodePackage']
    package_type = 'Node' if is_node_package else 'Atom'
    logging.info('Installing dependencies for %s package %s...', package_type, name)

    # Link private node dependencies.
    src_path = package_config['packageRootAbsolutePath']
    node_modules_path = os.path.join(src_path, 'node_modules')
    fs.mkdirs(node_modules_path)
    for local_dependency, local_dependency_config in package_config['localDependencies'].items():
        src_dir = local_dependency_config['packageRootAbsolutePath']
        dest_dir = os.path.join(node_modules_path, local_dependency)
        if copy_local_dependencies:
            shutil.copytree(src_dir, dest_dir, True);
        else:
            symlink(src_dir, dest_dir, relative=True)
        link_dependencys_executable(node_modules_path, local_dependency)

    # Install other public node dependencies.
    npm.install(src_path, local_packages=package_config['localDependencies'], include_dev_dependencies=True)
    logging.info('Done installing dependencies for %s', name)

    is_node_package = package_config.get('isNodePackage')
    if not is_node_package:
        logging.info('Running `apm link %s`...', src_path)
        cmd_args = ['apm', 'link', src_path]
        fs.cross_platform_check_output(cmd_args)
        logging.info('Done linking %s', name)
Exemplo n.º 5
0
def add_github_uri_to_output_dir(uri, output_dir, package_name):
    if not is_recognized_github_uri(uri):
        raise Exception('Unsupported uri format: %s' % uri)

    # Checkout the commit in output_dir.
    hash_index = uri.rindex('#')
    git_uri = 'git://github.com/' + uri[len(GITHUB_URI_PREFIX):hash_index]
    commit_id = uri[hash_index + 1:]

    # Clear out the expected checkout directory just in case something is there.
    checkout_dir = os.path.join(output_dir, commit_id)
    shutil.rmtree(checkout_dir, ignore_errors=True)

    logging.warn('Cloning %s for %s.', git_uri, package_name)
    args = ['git', 'clone', '--quiet', git_uri, '--depth', '1', commit_id]
    subprocess.check_call(args, cwd=output_dir)

    # Write the contents to a special directories for packages cloned from Git repos.
    # Note that when running `npm install` for a Git URI, the contents will be exactly the same,
    # except npm appears to normalize the root `package.json` file.
    hasher = hashlib.new('sha1')
    hasher.update(uri)
    encoded_uri = hasher.hexdigest()
    pkg_output_dir = os.path.join(output_dir, NUCLIDE_SECRET_NPM_SUBDIRECTORY,
                                  encoded_uri)
    mkdirs(os.path.dirname(pkg_output_dir))
    shutil.move(checkout_dir, pkg_output_dir)

    # Remove the .git directory from the checkout.
    shutil.rmtree(os.path.join(pkg_output_dir, '.git'))
Exemplo n.º 6
0
def add_github_uri_to_output_dir(uri, output_dir, package_name):
    if not is_recognized_github_uri(uri):
        raise Exception('Unsupported uri format: %s' % uri)

    # Checkout the commit in output_dir.
    hash_index = uri.rindex('#')
    git_uri = 'git://github.com/' + uri[len(GITHUB_URI_PREFIX):hash_index]
    commit_id = uri[hash_index + 1:]

    # Clear out the expected checkout directory just in case something is there.
    checkout_dir = os.path.join(output_dir, commit_id)
    shutil.rmtree(checkout_dir, ignore_errors=True)

    logging.warn('Cloning %s for %s.', git_uri, package_name)
    args = ['git', 'clone', '--quiet', git_uri, '--depth', '1', commit_id]
    subprocess.check_call(args, cwd=output_dir)

    # Write the contents to a special directories for packages cloned from Git repos.
    # Note that when running `npm install` for a Git URI, the contents will be exactly the same,
    # except npm appears to normalize the root `package.json` file.
    hasher = hashlib.new('sha1')
    hasher.update(uri)
    encoded_uri = hasher.hexdigest()
    pkg_output_dir = os.path.join(output_dir, NUCLIDE_SECRET_NPM_SUBDIRECTORY, encoded_uri)
    mkdirs(os.path.dirname(pkg_output_dir))
    shutil.move(checkout_dir, pkg_output_dir)

    # Remove the .git directory from the checkout.
    shutil.rmtree(os.path.join(pkg_output_dir, '.git'))
def set(key, data):
    #print 'storing %s => %s' % (key, data)
    filePath = _constructPathFromKey(key)
    parentPath = dirname(filePath)
    if not os.path.exists(parentPath):
        mkdirs(parentPath)
    with open(filePath, 'w') as fh:
        return pickle.dump(data, fh) #filePutContents(_constructPathFromKey(key), data)
Exemplo n.º 8
0
def process_package(pkg, copy_local_dependencies, queue, package_manager,
                    npm_directory):
    logging.info('OfflineInstaller is installing %s', pkg.name)
    package_json = pkg.package_json
    all_deps = package_manager.get_deps(package_json,
                                        pkg.include_dev_dependencies,
                                        include_local_dependencies=True)
    if not all_deps:
        return

    package_root = os.path.dirname(package_json)
    node_modules = os.path.join(package_root, 'node_modules')
    bin_dir = os.path.join(node_modules, '.bin')
    mkdirs(node_modules)
    for name, version in all_deps.items():
        package_dir = os.path.join(node_modules, name)
        if package_manager.is_local_dependency(name):
            if copy_local_dependencies:
                # A packaging tool may want the option to copy rather than symlink dependencies.
                shutil.copytree(package_manager.get_local_package_root(name),
                                package_dir)
            else:
                # Prefer local symlink if it is an option.
                symlink(package_manager.get_local_package_root(name),
                        package_dir)
        # Install the dependency at node_modules/pkg_name.
        # Note that if there is a compatible version in a parent node_modules,
        # then you should not install it again in order to save space
        # (and in some cases, to avoid cycles).
        elif not has_ancestor_with_dep(name, version, node_modules):
            # TODO: If the package.json has a preinstall step, it should be run.
            install_package(name, version, package_dir, npm_directory)

            # Add the package.json for the dependency to the queue.
            pkg_to_install = PackageNeedsDepsInstalled(
                name,
                os.path.join(package_dir, 'package.json'),
                include_dev_dependencies=False)
            queue.appendleft(pkg_to_install)
        else:
            # Unclear whether .bin should still get installed in this case. If so,
            # has_ancestor_with_dep() should be changed to return the path to the ancestor.
            continue

        # If the dependency's package.json has bin entries, then they need to be
        # symlinked to the dependent package's node_modules/.bin directory.
        package_info = json_load(os.path.join(package_dir, 'package.json'))
        bin = package_info.get('bin', None)
        if isinstance(bin, str):
            bin_command = bin
            bin = {}
            bin[package_info['name']] = bin_command
        if isinstance(bin, dict) and bin:
            mkdirs(bin_dir)
            for script_name, local_path in bin.items():
                symlink(os.path.join(package_dir, local_path),
                        os.path.join(bin_dir, script_name))
Exemplo n.º 9
0
def restore(trashed_file, path_exists, fs):
    if path_exists(trashed_file.path):
        raise IOError('Refusing to overwrite existing file "%s".' % os.path.basename(trashed_file.path))
    else:
        parent = os.path.dirname(trashed_file.path)
        fs.mkdirs(parent)

    fs.move(trashed_file.original_file, trashed_file.path)
    fs.remove_file(trashed_file.info_file)
Exemplo n.º 10
0
def process_package(pkg, copy_local_dependencies, queue, package_manager, npm_directory):
    logging.info('OfflineInstaller is installing %s', pkg.name)
    package_json = pkg.package_json
    all_deps = package_manager.get_deps(package_json,
                                        pkg.include_dev_dependencies,
                                        include_local_dependencies=True)
    if not all_deps:
        return

    package_root = os.path.dirname(package_json)
    node_modules = os.path.join(package_root, 'node_modules')
    bin_dir = os.path.join(node_modules, '.bin')
    mkdirs(node_modules)
    for name, version in all_deps.items():
        package_dir = os.path.join(node_modules, name)
        if package_manager.is_local_dependency(name):
            if copy_local_dependencies:
                # A packaging tool may want the option to copy rather than symlink dependencies.
                shutil.copytree(package_manager.get_local_package_root(name), package_dir)
            else:
                # Prefer local symlink if it is an option.
                symlink(package_manager.get_local_package_root(name), package_dir)
        # Install the dependency at node_modules/pkg_name.
        # Note that if there is a compatible version in a parent node_modules,
        # then you should not install it again in order to save space
        # (and in some cases, to avoid cycles).
        elif not has_ancestor_with_dep(name, version, node_modules):
            # TODO: If the package.json has a preinstall step, it should be run.
            install_package(name, version, package_dir, npm_directory)

            # Add the package.json for the dependency to the queue.
            pkg_to_install = PackageNeedsDepsInstalled(name,
                                                       os.path.join(package_dir, 'package.json'),
                                                       include_dev_dependencies=False)
            queue.appendleft(pkg_to_install)
        else:
            # Unclear whether .bin should still get installed in this case. If so,
            # has_ancestor_with_dep() should be changed to return the path to the ancestor.
            continue

        # If the dependency's package.json has bin entries, then they need to be
        # symlinked to the dependent package's node_modules/.bin directory.
        package_info = json_load(os.path.join(package_dir, 'package.json'))
        bin = package_info.get('bin', None)
        if isinstance(bin, str):
            bin_command = bin
            bin = {}
            bin[package_info['name']] = bin_command
        if isinstance(bin, dict) and bin:
            mkdirs(bin_dir)
            for script_name, local_path in bin.items():
                symlink(os.path.join(package_dir, local_path), os.path.join(bin_dir, script_name))
Exemplo n.º 11
0
    def write_dependencies(self, output_dir):
        package_to_version_set = {}
        for config in self._package_manager.get_configs():
            src_path = config['packageRootAbsolutePath']
            package_json = os.path.join(src_path, 'package.json')
            self._process_package_json(
                package_json,
                package_to_version_set,
                include_dev_dependencies=config['includeDevDependencies'])

        # Write deps based on package_to_version_set.
        # Leveraging semver from npm makes this fairly straightforward.
        for package, versions in package_to_version_set.items():
            package_dir = os.path.join(self._dot_npm_directory, package)
            if not os.path.isdir(package_dir):
                raise Exception(
                    'ERROR: Could not find directory for package %s at %s' %
                    (package, package_dir))
            available_versions = os.listdir(package_dir)
            for version in versions:
                if is_recognized_github_uri(version):
                    add_github_uri_to_output_dir(version, output_dir, package)
                    continue

                semver_range = version if version != 'latest' else '*'
                matching_versions = find_version_in_range(
                    available_versions, semver_range)
                if not matching_versions:
                    # Note that there are other valid version formats, such as local dependencies
                    # and URL formats that we have not added logic for
                    # (https://docs.npmjs.com/files/package.json#git-urls-as-dependencies).
                    # Currently, we can get away with this because these formats are not used by our
                    # transitive dependencies, so we may have to expand what we support in the
                    # future.
                    raise Exception('No package found for %s@%s' %
                                    (package, version))
                else:
                    # By default, we pick the newest version available.
                    desired_version = matching_versions[-1]

                    if version == 'latest':
                        logging.warn(
                            'Warning: choosing "latest" among what is locally available for %s: (%s).',
                            package, desired_version)
                    src_dir = os.path.join(package_dir, desired_version)
                    dest_dir = os.path.join(output_dir, package,
                                            desired_version)
                    if not os.path.isdir(dest_dir):
                        mkdirs(os.path.dirname(dest_dir))
                        shutil.copytree(src_dir, dest_dir)
Exemplo n.º 12
0
    def write_dependencies(self, output_dir):
        package_to_version_set = {}
        for config in self._package_manager.get_configs():
            if config.get('excludeFromRelease', False):
                continue

            src_path = config['packageRootAbsolutePath']
            package_json = os.path.join(src_path, 'package.json')
            self._process_package_json(package_json,
                                       package_to_version_set,
                                       include_dev_dependencies=config['includeDevDependencies'])

        # Write deps based on package_to_version_set.
        # Leveraging semver from npm makes this fairly straightforward.
        for package, versions in package_to_version_set.items():
            package_dir = os.path.join(self._dot_npm_directory, package)
            if not os.path.isdir(package_dir):
                raise Exception('ERROR: Could not find directory for package %s at %s' % (package, package_dir))
            available_versions = os.listdir(package_dir)
            for version in versions:
                if is_recognized_github_uri(version):
                    add_github_uri_to_output_dir(version, output_dir, package)
                    continue

                semver_range = version if version != 'latest' else '*'
                matching_versions = find_version_in_range(available_versions, semver_range)
                if not matching_versions:
                    # Note that there are other valid version formats, such as local dependencies
                    # and URL formats that we have not added logic for
                    # (https://docs.npmjs.com/files/package.json#git-urls-as-dependencies).
                    # Currently, we can get away with this because these formats are not used by our
                    # transitive dependencies, so we may have to expand what we support in the
                    # future.
                    raise Exception('No package found for %s@%s' % (package, version))
                else:
                    # By default, we pick the newest version available.
                    desired_version = matching_versions[-1]

                    if version == 'latest':
                        logging.warn(
                            'Warning: choosing "latest" among what is locally available for %s: (%s).',
                            package,
                            desired_version)
                    src_dir = os.path.join(package_dir, desired_version)
                    dest_dir = os.path.join(output_dir, package, desired_version)
                    if not os.path.isdir(dest_dir):
                        mkdirs(os.path.dirname(dest_dir))
                        shutil.copytree(src_dir, dest_dir)
Exemplo n.º 13
0
def install_dependencies(package_config, npm, copy_local_dependencies=False):
    name = package_config['name']
    is_node_package = package_config['isNodePackage']
    package_type = 'Node' if is_node_package else 'Atom'
    logging.info('Installing dependencies for %s package %s...', package_type,
                 name)

    # Link private node dependencies.
    src_path = package_config['packageRootAbsolutePath']
    node_modules_path = os.path.join(src_path, 'node_modules')
    fs.mkdirs(node_modules_path)
    for local_dependency, local_dependency_config in package_config[
            'localDependencies'].items():
        src_dir = local_dependency_config['packageRootAbsolutePath']
        dest_dir = os.path.join(node_modules_path, local_dependency)
        if copy_local_dependencies:
            shutil.copytree(src_dir, dest_dir, True)
        else:
            symlink(src_dir, dest_dir, relative=True)
        link_dependencys_executable(node_modules_path, local_dependency)

    # Install other public node dependencies.
    #
    # We store the sha sum of package.json under the node_modules directory. If
    # the sum matches, we skip the call to `npm install`.
    sum_path = os.path.join(node_modules_path, 'package.json.sum')
    package_json_path = os.path.join(src_path, 'package.json')
    package_json_sum = hashlib.sha1(
        read_file(package_json_path).encode('utf-8')).hexdigest()
    valid_sum = read_file(sum_path) == package_json_sum
    if valid_sum:
        logging.info('Dependencies for %s already installed', name)
    else:
        npm.install(src_path,
                    local_packages=package_config['localDependencies'],
                    include_dev_dependencies=True)
        write_file(sum_path, package_json_sum)
        logging.info('Done installing dependencies for %s', name)

    is_node_package = package_config.get('isNodePackage')
    if not is_node_package:
        logging.info('Running `apm link %s`...', src_path)
        cmd_args = ['apm', 'link', src_path]
        fs.cross_platform_check_output(cmd_args)
        logging.info('Done linking %s', name)
def install_dependencies(package_config, npm, copy_local_dependencies=False):
    name = package_config["name"]
    is_node_package = package_config["isNodePackage"]
    package_type = "Node" if is_node_package else "Atom"
    logging.info("Installing dependencies for %s package %s...", package_type, name)

    # Link private node dependencies.
    src_path = package_config["packageRootAbsolutePath"]
    node_modules_path = os.path.join(src_path, "node_modules")
    fs.mkdirs(node_modules_path)
    for local_dependency, local_dependency_config in package_config["localDependencies"].items():
        src_dir = local_dependency_config["packageRootAbsolutePath"]
        dest_dir = os.path.join(node_modules_path, local_dependency)
        if copy_local_dependencies:
            shutil.copytree(src_dir, dest_dir, True)
        else:
            symlink(src_dir, dest_dir, relative=True)
        link_dependencys_executable(node_modules_path, local_dependency)

    # Install other public node dependencies.
    #
    # We store the sha sum of package.json under the node_modules directory. If
    # the sum matches, we skip the call to `npm install`.
    sum_path = os.path.join(node_modules_path, "package.json.sum")
    package_json_path = os.path.join(src_path, "package.json")
    package_json_sum = hashlib.sha1(read_file(package_json_path)).hexdigest()
    valid_sum = read_file(sum_path) == package_json_sum
    if valid_sum:
        logging.info("Dependencies for %s already installed", name)
    else:
        npm.install(src_path, local_packages=package_config["localDependencies"], include_dev_dependencies=True)
        write_file(sum_path, package_json_sum)
        logging.info("Done installing dependencies for %s", name)

    is_node_package = package_config.get("isNodePackage")
    if not is_node_package:
        logging.info("Running `apm link %s`...", src_path)
        cmd_args = ["apm", "link", src_path]
        fs.cross_platform_check_output(cmd_args)
        logging.info("Done linking %s", name)
def install_dependencies(package_config, npm, copy_local_dependencies=False):
    name = package_config['name']
    is_node_package = package_config['isNodePackage']
    package_type = 'Node' if is_node_package else 'Atom'
    logging.info('Installing dependencies for %s package %s...', package_type, name)

    # Link private node dependencies.
    src_path = package_config['packageRootAbsolutePath']
    node_modules_path = os.path.join(src_path, 'node_modules')
    fs.mkdirs(node_modules_path)
    for local_dependency, local_dependency_config in package_config['localDependencies'].items():
        src_dir = local_dependency_config['packageRootAbsolutePath']
        dest_dir = os.path.join(node_modules_path, local_dependency)
        if copy_local_dependencies:
            shutil.copytree(src_dir, dest_dir, True);
        else:
            symlink(src_dir, dest_dir, relative=True)
        link_dependencys_executable(node_modules_path, local_dependency)

    # Install other public node dependencies.
    #
    # We store the sha sum of package.json under the node_modules directory. If
    # the sum matches, we skip the call to `npm install`.
    sum_path = os.path.join(node_modules_path, 'package.json.sum')
    package_json_path = os.path.join(src_path, 'package.json')
    package_json_sum = hashlib.sha1(read_file(package_json_path)).hexdigest()
    valid_sum = read_file(sum_path) == package_json_sum
    if valid_sum:
        logging.info('Dependencies for %s already installed', name)
    else:
        npm.install(src_path, local_packages=package_config['localDependencies'], include_dev_dependencies=True)
        write_file(sum_path, package_json_sum)
        logging.info('Done installing dependencies for %s', name)

    is_node_package = package_config.get('isNodePackage')
    if not is_node_package:
        logging.info('Running `apm link %s`...', src_path)
        cmd_args = ['apm', 'link', src_path]
        fs.cross_platform_check_output(cmd_args)
        logging.info('Done linking %s', name)
Exemplo n.º 16
0
def install_dependencies(package_config, npm):
    name = package_config['name']
    is_node_package = package_config['isNodePackage']
    package_type = 'Node' if is_node_package else 'Atom'
    logging.info('Installing dependencies for %s package %s...', package_type, name)

    # Link private node dependencies.
    src_path = package_config['packageRootAbsolutePath']
    fs.mkdirs(os.path.join(src_path, 'node_modules'))
    for local_dependency, local_dependency_config in package_config['localDependencies'].items():
        src_dir = local_dependency_config['packageRootAbsolutePath']
        dest_dir = os.path.join(src_path, 'node_modules', local_dependency)
        if platform_checker.is_windows():
            shutil.rmtree(dest_dir, ignore_errors=True)
            shutil.copytree(src_dir, dest_dir)
        else:
            symlink(src_dir, dest_dir)
        link_dependencys_executable(src_path, local_dependency)

    # Install other public node dependencies.
    npm.install(src_path, local_packages=package_config['localDependencies'], include_dev_dependencies=package_config['includeDevDependencies'])
    logging.info('Done installing dependencies for %s', name)

    # Install libclang dependencies, if appropriate.
    if package_config.get('installLibClang', False):
        from fb.libclang import install_libclang
        logging.info('Installing libclang extra dependencies...')
        install_libclang(src_path)
        logging.info('Done installing libclang extra dependencies.')

    is_node_package = package_config.get('isNodePackage')
    if not is_node_package:
        logging.info('Running `apm link %s`...', src_path)
        args = ['apm', 'link', src_path]
        fs.cross_platform_check_call(args)
        logging.info('Done linking %s', name)