def testDownloadPageContent(self): """Tests the DownloadPageContent functions.""" config_file = os.path.join(u'data', u'projects.ini') project_definitions = {} with open(config_file) as file_object: project_definition_reader = projects.ProjectDefinitionReader() for project_definition in project_definition_reader.Read( file_object): project_definitions[project_definition.name] = ( project_definition) self.assertGreaterEqual(len(project_definitions), 1) project_definition = project_definitions[u'artifacts'] self.assertEqual(project_definition.name, u'artifacts') self.assertIsNotNone(project_definition.version) self.assertEqual(project_definition.version.version_string, u'>=20150409') expected_download_url = ( u'https://github.com/ForensicArtifacts/artifacts/releases') self.assertEqual(project_definition.download_url, expected_download_url)
def testRead(self): """Tests the Read function.""" config_file = os.path.join('data', 'projects.ini') project_definitions = {} with io.open(config_file, 'r', encoding='utf-8') as file_object: project_definition_reader = projects.ProjectDefinitionReader() for project_definition in project_definition_reader.Read( file_object): project_definitions[project_definition.name] = ( project_definition) self.assertGreaterEqual(len(project_definitions), 1) project_definition = project_definitions['artifacts'] self.assertEqual(project_definition.name, 'artifacts') self.assertIsNotNone(project_definition.version) self.assertEqual(project_definition.version.version_string, '>=20150409') expected_download_url = ( 'https://github.com/ForensicArtifacts/artifacts/releases') self.assertEqual(project_definition.download_url, expected_download_url)
def ReadProjectDefinitions(self, path): """Reads project definitions. Args: path (str): path of the project definitions file. """ with io.open(path, 'r', encoding='utf-8') as file_object: project_definition_reader = projects.ProjectDefinitionReader() self.project_definitions = { definition.name: definition for definition in project_definition_reader.Read(file_object) }
def _GetProjectDefinitions(self, projects_file): """Retrieves the project definitions from the projects file. Args: projects_file (str): path to the projects.ini configuration file. Returns: dist[str, ProjectDefinition]: project definitions per name. """ project_definitions = {} with io.open(projects_file, 'r', encoding='utf-8') as file_object: project_definition_reader = projects.ProjectDefinitionReader() for project_definition in project_definition_reader.Read(file_object): project_definitions[project_definition.name] = project_definition return project_definitions
def __init__(self, projects_file): """Initializes a PyPI manager. Args: projects_file (str): path to the projects.ini file. """ super(PyPIManager, self).__init__() self._download_helper = interface.DownloadHelper('') self._package_names = [] self._pypi_package_names = {} if projects_file: with io.open(projects_file, 'r', encoding='utf-8') as file_object: project_definition_reader = projects.ProjectDefinitionReader() for project_definition in project_definition_reader.Read(file_object): self._package_names.append(project_definition.name) if project_definition.pypi_name: self._pypi_package_names[project_definition.pypi_name] = ( project_definition.name)
def Main(): """The main program function. Returns: A boolean containing True if successful or False if not. """ build_targets = frozenset([ u'download', u'dpkg', u'dpkg-source', u'msi', u'osc', u'pkg', u'rpm', u'source' ]) argument_parser = argparse.ArgumentParser( description=(u'Downloads and builds the latest versions of projects.')) argument_parser.add_argument(u'build_target', choices=sorted(build_targets), action=u'store', metavar=u'BUILD_TARGET', default=None, help=u'The build target.') argument_parser.add_argument(u'--build-directory', u'--build_directory', action=u'store', metavar=u'DIRECTORY', dest=u'build_directory', type=str, default=u'build', help=u'The location of the build directory.') argument_parser.add_argument( u'-c', u'--config', dest=u'config_path', action=u'store', metavar=u'CONFIG_PATH', default=None, help=(u'path of the directory containing the build configuration ' u'files e.g. projects.ini.')) argument_parser.add_argument( u'--preset', dest=u'preset', action=u'store', metavar=u'PRESET_NAME', default=None, help= (u'name of the preset of project names to build. The default is to ' u'build all project defined in the projects.ini configuration file. ' u'The presets are defined in the preset.ini configuration file.')) argument_parser.add_argument( u'--projects', dest=u'projects', action=u'store', metavar=u'PROJECT_NAME(S)', default=None, help=(u'comma separated list of specific project names to build. The ' u'default is to build all project defined in the projects.ini ' u'configuration file.')) options = argument_parser.parse_args() if not options.build_target: print(u'Build target missing.') print(u'') argument_parser.print_help() print(u'') return False if options.build_target not in build_targets: print(u'Unsupported build target: {0:s}.'.format(options.build_target)) print(u'') argument_parser.print_help() print(u'') return False config_path = options.config_path if not config_path: config_path = os.path.dirname(__file__) config_path = os.path.dirname(config_path) config_path = os.path.join(config_path, u'data') presets_file = os.path.join(config_path, u'presets.ini') if options.preset and not os.path.exists(presets_file): print(u'No such config file: {0:s}.'.format(presets_file)) print(u'') return False projects_file = os.path.join(config_path, u'projects.ini') if not os.path.exists(projects_file): print(u'No such config file: {0:s}.'.format(projects_file)) print(u'') return False logging.basicConfig(level=logging.INFO, format=u'[%(levelname)s] %(message)s') project_builder = ProjectBuilder(options.build_target) # TODO: package ipython. # TODO: # (u'protobuf', ProjectBuilder.PROJECT_TYPE_GOOGLE_CODE_WIKI), # ./configure # make # cd python # python setup.py build # python setup.py install --root $PWD/tmp # # Build of rpm fails: # python setup.py bdist_rpm # # Solution: use protobuf-python.spec to build # TODO: rpm build of psutil is broken, fix upstream or add patching. # (u'psutil', ProjectBuilder.PROJECT_TYPE_PYPI), project_names = [] if options.preset: with open(presets_file) as file_object: preset_definition_reader = presets.PresetDefinitionReader() for preset_definition in preset_definition_reader.Read( file_object): if preset_definition.name == options.preset: project_names = preset_definition.project_names break if not project_names: print(u'Undefined preset: {0:s}'.format(options.preset)) print(u'') return False elif options.projects: project_names = options.projects.split(u',') builds = [] with open(projects_file) as file_object: project_definition_reader = projects.ProjectDefinitionReader() for project_definition in project_definition_reader.Read(file_object): is_disabled = False if (options.build_target in project_definition.disabled or u'all' in project_definition.disabled): if project_definition.name not in project_names: is_disabled = True else: # If a project is manually specified ignore the disabled status. logging.info(u'Ignoring disabled status for: {0:s}'.format( project_definition.name)) if not is_disabled: builds.append(project_definition) if not os.path.exists(options.build_directory): os.mkdir(options.build_directory) current_working_directory = os.getcwd() os.chdir(options.build_directory) failed_builds = [] undefined_packages = list(project_names) for project_definition in builds: if project_names and project_definition.name not in project_names: continue if undefined_packages: project_index = undefined_packages.index(project_definition.name) del undefined_packages[project_index] logging.info(u'Processing: {0:s}'.format(project_definition.name)) # TODO: add support for dokan, bzip2 # TODO: setup sqlite in build directory. if not project_builder.Build(project_definition): print(u'Failed building: {0:s}'.format(project_definition.name)) failed_builds.append(project_definition.name) os.chdir(current_working_directory) if undefined_packages: print(u'') print(u'Undefined packages:') for undefined_package in undefined_packages: print(u'\t{0:s}'.format(undefined_package)) if failed_builds: print(u'') print(u'Failed buiding:') for failed_build in failed_builds: print(u'\t{0:s}'.format(failed_build)) return not failed_builds
def Main(): """The main program function. Returns: bool: True if successful or False if not. """ argument_parser = argparse.ArgumentParser( description=('Generates dpkg packaging files for a project.')) argument_parser.add_argument( 'project_name', action='store', metavar='NAME', type=str, help=('Project name for which the dpkg packaging files should be ' 'generated.')) argument_parser.add_argument('-c', '--config', dest='config_file', action='store', metavar='CONFIG_FILE', default=None, help='path of the build configuration file.') argument_parser.add_argument( '--source-directory', '--source_directory', action='store', metavar='DIRECTORY', dest='source_directory', type=str, default=None, help='The location of the the source directory.') options = argument_parser.parse_args() logging.basicConfig(level=logging.INFO, format='[%(levelname)s] %(message)s') if not options.config_file: options.config_file = os.path.dirname(__file__) options.config_file = os.path.dirname(options.config_file) options.config_file = os.path.join(options.config_file, 'data', 'projects.ini') if not os.path.exists(options.config_file): print('No such config file: {0:s}.'.format(options.config_file)) print('') return False project_definition_match = None with open(options.config_file) as file_object: project_definition_reader = projects.ProjectDefinitionReader() for project_definition in project_definition_reader.Read(file_object): if options.project_name == project_definition.name: project_definition_match = project_definition if not project_definition_match: print('No such package name: {0:s}.'.format(options.project_name)) print('') return False source_path = options.source_directory if not source_path: globbed_paths = [] for path in glob.glob('{0:s}*'.format(options.project_name)): if not os.path.isdir(path): continue globbed_paths.append(path) if len(globbed_paths) != 1: print('Unable to determine source directory.') print('') return False source_path = globbed_paths[0] if not os.path.exists(source_path): print('No such source directory: {0:s}.'.format(source_path)) print('') return False source_path = os.path.abspath(source_path) project_version = os.path.basename(source_path) if not project_version.startswith('{0:s}-'.format(options.project_name)): print(('Unable to determine project version based on source ' 'directory: {0:s}.').format(source_path)) print('') return False _, _, project_version = project_version.partition('-') dpkg_path = os.path.join(source_path, 'dpkg') if os.path.exists(dpkg_path): print('Destination dpkg directory: {0:s} already exists.'.format( dpkg_path)) print('') return False tools_path = os.path.dirname(__file__) data_path = os.path.join(os.path.dirname(tools_path), 'data') build_files_generator = dpkg_files.DPKGBuildFilesGenerator( options.project_name, project_version, project_definition_match, data_path) print('Generating dpkg files for: {0:s} {1:s} in: {2:s}'.format( options.project_name, project_version, dpkg_path)) build_files_generator.GenerateFiles(dpkg_path) print('') return True
def Main(): """The main program function. Returns: bool: True if successful or False if not. """ tracks = ['dev', 'stable', 'testing'] argument_parser = argparse.ArgumentParser( description=('Installs the latest versions of project dependencies.')) argument_parser.add_argument( '-c', '--config', dest='config_path', action='store', metavar='CONFIG_PATH', default=None, help=('path of the directory containing the build configuration ' 'files e.g. projects.ini.')) argument_parser.add_argument( '--download-directory', '--download_directory', action='store', metavar='DIRECTORY', dest='download_directory', type=str, default='build', help='The location of the download directory.') argument_parser.add_argument( '--download-only', '--download_only', action='store_true', dest='download_only', default=False, help=('Only download the dependencies. The default behavior is to ' 'download and update the dependencies.')) argument_parser.add_argument( '-e', '--exclude', action='store_true', dest='exclude_packages', default=False, help=('Excludes the package names instead of including them.')) argument_parser.add_argument( '-f', '--force', action='store_true', dest='force_install', default=False, help=('Force installation. This option removes existing versions ' 'of installed dependencies. The default behavior is to only' 'install a dependency if not or an older version is installed.')) argument_parser.add_argument( '--machine-type', '--machine_type', action='store', metavar='TYPE', dest='machine_type', type=str, default=None, help= ('Manually sets the machine type instead of using the value returned ' 'by platform.machine(). Usage of this argument is not recommended ' 'unless want to force the installation of one machine type e.g. ' '\'x86\' onto another \'amd64\'.')) argument_parser.add_argument( '--msi-targetdir', '--msi_targetdir', action='store', metavar='TYPE', dest='msi_targetdir', type=str, default=None, help=( 'Manually sets the MSI TARGETDIR property. Usage of this argument ' 'is not recommended unless want to force the installation of the ' 'MSIs into different directory than the system default.')) argument_parser.add_argument( '--preset', dest='preset', action='store', metavar='PRESET_NAME', default=None, help=( 'name of the preset of project names to update. The default is to ' 'build all project defined in the projects.ini configuration file. ' 'The presets are defined in the preset.ini configuration file.')) argument_parser.add_argument( '-t', '--track', dest='track', action='store', metavar='TRACK', default='stable', choices=sorted(tracks), help=( 'the l2tbinaries track to download from. The default is stable.')) argument_parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', default=False, help='have more verbose output.') argument_parser.add_argument( 'project_names', nargs='*', action='store', metavar='NAME', type=str, help=('Optional project names which should be updated if an update is ' 'available. The corresponding package names are derived from ' 'the projects.ini configuration file. If no value is provided ' 'all available packages are updated.')) options = argument_parser.parse_args() config_path = options.config_path if not config_path: config_path = os.path.dirname(__file__) config_path = os.path.dirname(config_path) config_path = os.path.join(config_path, 'data') presets_file = os.path.join(config_path, 'presets.ini') if options.preset and not os.path.exists(presets_file): print('No such config file: {0:s}.'.format(presets_file)) print('') return False projects_file = os.path.join(config_path, 'projects.ini') if not os.path.exists(projects_file): print('No such config file: {0:s}.'.format(projects_file)) print('') return False logging.basicConfig(level=logging.INFO, format='[%(levelname)s] %(message)s') project_names = [] if options.preset: with io.open(presets_file, 'r', encoding='utf-8') as file_object: preset_definition_reader = presets.PresetDefinitionReader() for preset_definition in preset_definition_reader.Read( file_object): if preset_definition.name == options.preset: project_names = preset_definition.project_names break if not project_names: print('Undefined preset: {0:s}'.format(options.preset)) print('') return False elif options.project_names: project_names = options.project_names dependency_updater = DependencyUpdater( download_directory=options.download_directory, download_only=options.download_only, download_track=options.track, exclude_packages=options.exclude_packages, force_install=options.force_install, msi_targetdir=options.msi_targetdir, preferred_machine_type=options.machine_type, verbose_output=options.verbose) project_definitions = {} with io.open(projects_file, 'r', encoding='utf-8') as file_object: project_definition_reader = projects.ProjectDefinitionReader() for project_definition in project_definition_reader.Read(file_object): project_definitions[project_definition.name] = project_definition package_names = [] for project_name in project_names: project_definition = project_definitions.get(project_name, None) if not project_definition: logging.error( 'Missing definition for project: {0:s}'.format(project_name)) continue package_name = project_name if (dependency_updater.operating_system == 'Windows' and project_definition.msi_name): package_name = project_definition.msi_name package_names.append(package_name) return dependency_updater.UpdatePackages(package_names)
def Main(): """The main program function. Returns: bool: True if successful or False if not. """ build_targets = frozenset([ 'download', 'dpkg', 'dpkg-source', 'msi', 'osc', 'pkg', 'rpm', 'source', 'srpm']) argument_parser = argparse.ArgumentParser(description=( 'Downloads and builds the latest versions of projects.')) argument_parser.add_argument( 'build_target', choices=sorted(build_targets), action='store', metavar='BUILD_TARGET', default=None, help='The build target.') argument_parser.add_argument( '--build-directory', '--build_directory', action='store', metavar='DIRECTORY', dest='build_directory', type=str, default='build', help='The location of the build directory.') argument_parser.add_argument( '-c', '--config', dest='config_path', action='store', metavar='CONFIG_PATH', default=None, help=( 'path of the directory containing the build configuration ' 'files e.g. projects.ini.')) argument_parser.add_argument( '--preset', dest='preset', action='store', metavar='PRESET_NAME', default=None, help=( 'name of the preset of project names to build. The default is to ' 'build all project defined in the projects.ini configuration file. ' 'The presets are defined in the preset.ini configuration file.')) argument_parser.add_argument( '--projects', dest='projects', action='store', metavar='PROJECT_NAME(S)', default=None, help=( 'comma separated list of specific project names to build. The ' 'default is to build all project defined in the projects.ini ' 'configuration file.')) options = argument_parser.parse_args() if not options.build_target: print('Build target missing.') print('') argument_parser.print_help() print('') return False if options.build_target not in build_targets: print('Unsupported build target: {0:s}.'.format(options.build_target)) print('') argument_parser.print_help() print('') return False config_path = options.config_path if not config_path: config_path = os.path.dirname(__file__) config_path = os.path.dirname(config_path) config_path = os.path.join(config_path, 'data') if not options.preset and not options.projects: print('Please define a preset or projects to build.') print('') return False presets_file = os.path.join(config_path, 'presets.ini') if options.preset and not os.path.exists(presets_file): print('No such config file: {0:s}.'.format(presets_file)) print('') return False projects_file = os.path.join(config_path, 'projects.ini') if not os.path.exists(projects_file): print('No such config file: {0:s}.'.format(projects_file)) print('') return False logging.basicConfig( level=logging.INFO, format='[%(levelname)s] %(message)s') project_builder = ProjectBuilder(options.build_target) project_names = [] if options.preset: with io.open(presets_file, 'r', encoding='utf-8') as file_object: preset_definition_reader = presets.PresetDefinitionReader() for preset_definition in preset_definition_reader.Read(file_object): if preset_definition.name == options.preset: project_names = preset_definition.project_names break if not project_names: print('Undefined preset: {0:s}'.format(options.preset)) print('') return False elif options.projects: project_names = options.projects.split(',') builds = [] disabled_packages = [] with io.open(projects_file, 'r', encoding='utf-8') as file_object: project_definition_reader = projects.ProjectDefinitionReader() for project_definition in project_definition_reader.Read(file_object): if project_definition.name not in project_names: continue is_disabled = False if (options.build_target in project_definition.disabled or 'all' in project_definition.disabled): if options.preset: is_disabled = True else: # If a project is manually specified ignore the disabled status. logging.info('Ignoring disabled status for: {0:s}'.format( project_definition.name)) if is_disabled: disabled_packages.append(project_definition.name) else: builds.append(project_definition) if not os.path.exists(options.build_directory): os.mkdir(options.build_directory) current_working_directory = os.getcwd() os.chdir(options.build_directory) undefined_packages = list(project_names) for disabled_package in disabled_packages: undefined_packages.remove(disabled_package) failed_builds = [] for project_definition in builds: if project_names and project_definition.name not in project_names: continue if undefined_packages: project_index = undefined_packages.index(project_definition.name) del undefined_packages[project_index] logging.info('Processing: {0:s}'.format(project_definition.name)) # TODO: add support for dokan, bzip2 # TODO: setup sqlite in build directory. if not project_builder.Build(project_definition): print('Failed building: {0:s}'.format(project_definition.name)) failed_builds.append(project_definition.name) os.chdir(current_working_directory) if undefined_packages: print('') print('Undefined packages:') for undefined_package in undefined_packages: print('\t{0:s}'.format(undefined_package)) if failed_builds: print('') print('Failed building:') for failed_build in failed_builds: print('\t{0:s}'.format(failed_build)) return not failed_builds