def get_requirements(filename=None): """ Procesess dependencies from a requirements file or a simple pip dependency file. :param filename: a filename containing python packages in a format expected by pip (one per line). :return: a list of dependencies (python packages). :rtype: ``list`` .. versionadded:: 0.1 """ assert filename is not None requirements = [] for line in readconfig(filename, conffile=False, strip_comments=False): if re.match(r'(\s*#)|(\s*$)', line): continue if re.match(r'\s*-e\s+', line): requirements.append( re.sub( r'\s*-e\s+.*#egg=(.*)$', r'\1', line).strip( )) elif re.match(r'\s*-f\s+', line): pass else: requirements.append(line.strip()) return requirements
def get_selected_packages(remote_root, samples_dir, list_path): """ Creates a directory structure to store package samples for its later use in a test package repository. :param samples_dir: directory that will be used to store the examples for the repository. .. versionadded:: 0.1 """ samples = readconfig(list_path, None, False, False) for sample in samples: dist, comp, pool = sample.split() dest = os.path.join(samples_dir, dist, comp) if not os.path.isdir(dest): os.makedirs(dest) try: sample_name = os.path.basename(pool) if not os.path.isfile(os.path.join(dest, sample_name)): logger.info("Downloading -> %s" % sample_name) urllib.urlretrieve(os.path.join(remote_root, pool), os.path.join(dest, sample_name)) except urllib2.URLError, e: logger.warning('Could not get %s, error code #%s' % (sample_name, e.code))
def create_cache(repository_root, cache_dir_path): """ Creates the cache and all other necessary directories to organize the control files pulled from the repository. :param repository_root: url of the repository from which the control files files will be pulled. :param cache_dir_path: path where the cache will be created. .. versionadded:: 0.1 """ if not os.path.isdir(cache_dir_path): os.makedirs(cache_dir_path) branches = (branch.split() for branch in readconfig( os.path.join(repository_root, 'distributions'))) for name, release_path in branches: release_path = os.path.join(repository_root, release_path) try: md5list = deb822.Release( urllib.urlopen(release_path)).get('MD5sum') except urllib2.URLError, e: logger.warning( 'Could not read release file in %s, error code #%s' % (release_path, e.code)) else: for control_file_data in md5list: if re.match('[\w]*-?[\w]*/[\w]*-[\w]*/Packages.gz$', control_file_data['name']): component, architecture, _ = control_file_data[ 'name'].split('/') remote_file = os.path.join(repository_root, 'dists', name, control_file_data['name']) local_name = '_'.join( [name, component, architecture.replace('binary-', '')]) f = os.path.join(cache_dir_path, local_name + '.gz') if not os.path.isfile(f): try: urllib.urlretrieve(remote_file, f) except urllib2.URLError, e: logger.error('Could not get %s, error code #%s' % (remote_file, e.code)) else: if md5Checksum(f) != control_file_data['md5sum']: os.remove(f) try: urllib.urlretrieve(remote_file, f) except urllib2.URLError, e: logger.error( 'Could not get %s, error code #%s' % (remote_file, e.code))
def sync_cache(repository_root, cache_dir_path): """ Synchronizes the existing control files in the cache, comparing the the ones in the repository with the local copies. If there are differences in the MD5sum field then the local copies are deleted and copied again from the repository. It is assumed that the cache directory was created previously. :param repository_root: url of the repository from which the Packages files will be updated. :param cache_dir_path: path to the desired cache directory. .. versionadded:: 0.1 """ branches = (branch.split() for branch in readconfig( os.path.join(repository_root, 'distributions'))) changes = [] for branch, _ in branches: remote_branch_path = os.path.join(repository_root, 'dists', branch) release_path = os.path.join(remote_branch_path, 'Release') try: md5list = deb822.Release( urllib.urlopen(release_path)).get('MD5sum') except urllib2.URLError, e: logger.warning( 'Could not read release file in %s, error code #%s' % (remote_branch_path, e.code)) else: for package_control_file in md5list: if re.match('[\w]*-?[\w]*/[\w]*-[\w]*/Packages.gz$', package_control_file['name']): component, architecture, _ = package_control_file[ 'name'].split('/') remote_package_path = os.path.join( remote_branch_path, package_control_file['name']) local_name = '_'.join([ branch, component, architecture.replace('binary-', '') ]) f = os.path.join(cache_dir_path, local_name + '.gz') if package_control_file['md5sum'] != md5Checksum(f): if os.path.exists(f): os.remove(f) try: urllib.urlretrieve(remote_package_path, f) changes.append(os.path.basename(f)) except urllib2.URLError, e: logger.error('Could not get %s, error code #%s' % (remote_package_path, e.code)) else: logger.info('There are no changes in %s' % f)
def get_classifiers(filename=None): ''' Reads python classifiers from a file. :param filename: a filename containing python classifiers (one classifier per line). :return: a list with each classifier. :rtype: ``list`` .. versionadded:: 0.1 ''' assert filename return readconfig(filename, conffile=False)
def update_cache(repository_root, cache_dir_path): ''' Updates the control files existent in the cache, comparing the the ones in the repository with its local copies. If there are differences in the MD5sum field then the local copies are deleted and copied again from the repository. It is assumed that the cache directory was created previously. :param repository_root: url of the repository from which the Packages files will be updated. :param cache_dir_path: path to the desired cache directory .. versionadded:: 0.1 ''' local_branches = (branch.split() for branch in readconfig( os.path.join(repository_root, "distributions"))) for branch, _ in local_branches: remote_branch_path = os.path.join(repository_root, "dists", branch) local_branch_path = os.path.join(cache_dir_path, branch) release_path = os.path.join(remote_branch_path, "Release") try: md5list = deb822.Release( urllib.urlopen(release_path)).get('MD5sum') except urllib2.URLError, e: logger.warning( 'Could not read release file in %s, error code #%s' % (remote_branch_path, e.code)) else: for package_control_file in md5list: if re.match("[\w]*-?[\w]*/[\w]*-[\w]*/Packages.gz$", package_control_file['name']): _, architecture, _ = package_control_file['name'].split( "/") # BUSCAR UNA FORMA MENOS PROPENSA A ERRORES PARA HACER ESTO architecture = architecture.split("-")[1] remote_package_path = os.path.join( remote_branch_path, package_control_file['name']) local_package_path = os.path.join( local_branch_path, package_control_file['name']) if package_control_file['md5sum'] != md5Checksum( local_package_path): os.remove(local_package_path) urllib.urlretrieve(remote_package_path, local_package_path) update_package_list(local_package_path, branch, architecture) else: logger.info('There are no changes in %s' % local_package_path)
def init_sample_packages(repository_root, samples_dir): """ Creates a directory structure to store packages for its later use in a test package repository. :param repository_root: url of the repository used. :param samples_dir: directory that will be used to store the examples for the repository. .. versionadded:: 0.1 """ if not os.path.isdir(samples_dir): os.makedirs(samples_dir) # Puede que exista una forma mas sencilla de obtener los nombres dist_releases = (branch.split() for branch in readconfig( os.path.join(repository_root, "distributions"))) for release, _ in dist_releases: release_path = os.path.join(repository_root, "dists", release, "Release") try: # Riesgo poco probable de que el Release no tenga MD5sum md5list = deb822.Release( urllib.urlopen(release_path)).get('MD5sum') print md5list except urllib2.URLError, e: logger.warning( 'Could not read release file in %s, error code #%s' % (release_path, e.code)) else: for l in md5list: if re.match("[\w]*-?[\w]*/[\w]*-[\w]*/Packages.gz$", l['name']): list_dir = os.path.join(samples_dir, release, os.path.dirname(l['name'])) if not os.path.isdir(list_dir): os.makedirs(list_dir) list_path = os.path.join(list_dir, "list") if not os.path.isfile(list_path): control_f_path = os.path.join(repository_root, "dists", release, l['name']) select_sample_packages(control_f_path, list_path, samples_dir, False)
def get_dependency_links(filename=None): ''' Procesess dependency links from a requirements file or a simple pip dependency file. :param filename: a filename containing python packages in a format expected by pip (one per line). :return: a list of dependency links. :rtype: ``list`` .. versionadded:: 0.1 ''' assert filename is not None dependency_links = [] for line in readconfig(filename, conffile=False): if re.match(r'\s*-[ef]\s+', line): dependency_links.append(re.sub(r'\s*-[ef]\s+', '', line)) return dependency_links
keywords = ('Social Network', 'Continuous Integration', 'Source Code Management') f_readme = get_path([DOCDIR, 'rst', 'readme.rst']) f_python_classifiers = get_path([CONFDIR, 'data', 'python-classifiers.list']) f_exclude_sources = get_path([CONFDIR, 'data', 'exclude-sources.list']) f_exclude_packages = get_path([CONFDIR, 'data', 'exclude-packages.list']) f_exclude_patterns = get_path([CONFDIR, 'data', 'exclude-patterns.list']) f_include_data_patterns = get_path([CONFDIR, 'data', 'include-data-patterns.list']) f_python_dependencies = get_path([CONFDIR, 'data', 'python-dependencies.list']) f_debian_run_dependencies = get_path([CONFDIR, 'data', 'debian-run-dependencies.list']) f_debian_build_dependencies = get_path([CONFDIR, 'data', 'debian-build-dependencies.list']) exclude_sources = readconfig(filename=f_exclude_sources, conffile=False) exclude_packages = readconfig(filename=f_exclude_packages, conffile=False) exclude_patterns = readconfig(filename=f_exclude_patterns, conffile=False) include_data_patterns = readconfig(filename=f_include_data_patterns, conffile=False) long_description = cat_file(filename=f_readme) classifiers = get_classifiers(filename=f_python_classifiers) install_requires = get_requirements(filename=f_python_dependencies) dependency_links = get_dependency_links(filename=f_python_dependencies) python_dependencies = readconfig(filename=f_python_dependencies, conffile=False, strip_comments=False) debian_run_dependencies = readconfig(filename=f_debian_run_dependencies, conffile=False) debian_build_dependencies = readconfig(filename=f_debian_build_dependencies,
import os import urllib os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tribus.config.web") from debian import deb822 from tribus.web.cloud.models import Package, Details from tribus.common.utils import readconfig canaima = 'http://paquetes.canaima.softwarelibre.gob.ve' selected = '/home/fran/workspace/tribus/selected_samples' lista = readconfig(os.path.join(selected, 'lista.txt'), None, False, False) with open("/home/tribus/lista.txt", "w") as f: for d in Details.objects.filter(Size__lt = 2000): p = Package.objects.get(Details = d) f.write("%s %s %s \n" % (p.Package, d.Distribution, d.Filename))
f_python_dependencies = get_path([CONFDIR, 'data', 'python-dependencies.list']) f_debian_build_dependencies = get_path( [CONFDIR, 'data', 'debian-build-dependencies.list']) f_debian_maint_dependencies = get_path( [CONFDIR, 'data', 'debian-maint-dependencies.list']) f_debian_run_dependencies = get_path( [CONFDIR, 'data', 'debian-run-dependencies.list']) f_exclude_sources = get_path([CONFDIR, 'data', 'exclude-sources.list']) f_exclude_packages = get_path([CONFDIR, 'data', 'exclude-packages.list']) f_exclude_patterns = get_path([CONFDIR, 'data', 'exclude-patterns.list']) f_data_patterns = get_path([CONFDIR, 'data', 'include-data-patterns.list']) f_workenv_preseed = get_path([CONFDIR, 'data', 'preseed-slapd-debconf.conf']) f_sql_preseed = get_path([CONFDIR, 'data', 'preseed-db.sql']) f_users_ldif = get_path([CONFDIR, 'data', 'preseed-ldap-users.ldif']) exclude_sources = readconfig(filename=f_exclude_sources, conffile=False) exclude_packages = readconfig(filename=f_exclude_packages, conffile=False) exclude_patterns = readconfig(filename=f_exclude_patterns, conffile=False) include_data_patterns = readconfig(filename=f_data_patterns, conffile=False) long_description = cat_file(filename=f_readme) classifiers = get_classifiers(filename=f_python_classifiers) install_requires = get_requirements(filename=f_python_dependencies) dependency_links = get_dependency_links(filename=f_python_dependencies) debian_build_dependencies = readconfig(filename=f_debian_build_dependencies, conffile=False) debian_maint_dependencies = readconfig(filename=f_debian_maint_dependencies, conffile=False) debian_run_dependencies = readconfig(filename=f_debian_run_dependencies, conffile=False)