示例#1
0
 def __init__(self, temperature_config):
     super().__init__(temperature_config)
     self.property_bus = "i2c"
     devicepath = PosixPath("/sys/bus/i2c/devices").joinpath(
         temperature_config.device).joinpath("iio:device0")
     self.temperature_path_raw = PosixPath(
         devicepath.joinpath("in_temp_raw"))
     self.temperature_path_scale = PosixPath(
         devicepath.joinpath("in_temp_scale"))
     self.pressure_path_raw = PosixPath(
         devicepath.joinpath("in_pressure_raw"))
     self.pressure_path_scale = PosixPath(
         devicepath.joinpath("in_pressure_scale"))
     # Make sure they exist
     if (not self.temperature_path_raw.exists()
             and not self.temperature_path_raw.is_file()):
         raise DeviceError(self.temperature_path_raw)
     if (not self.temperature_path_scale.exists()
             and not self.temperature_path_scale.is_file()):
         raise DeviceError(self.temperature_path_scale)
     if (not self.pressure_path_raw.exists()
             and not self.pressure_path_raw.is_file()):
         raise DeviceError(self.pressure_path_raw)
     if (not self.pressure_path_scale.exists()
             and not self.pressure_path_scale.is_file()):
         raise DeviceError(self.pressure_path_scale)
示例#2
0
def parse_migration(dir: PosixPath, migration_id: int) -> Migration:
    up_file: PosixPath = dir.joinpath(f'{migration_id}_up.sql')
    down_file: PosixPath = dir.joinpath(f'{migration_id}_down.sql')

    with open(up_file) as up_fp, open(down_file) as down_fp:
        migration = Migration(migration_id=migration_id,
                              up=up_fp.read(),
                              down=down_fp.read())
        return migration
示例#3
0
def test_make_indices(pt_run_dir: pathlib.PosixPath):
    # Doesn't currently test:
    #    content of the outputs
    #    what happens if they already exist
    from paratemp.energy_histo import make_indices
    with cd(pt_run_dir):
        make_indices('PT-out0.log')
    assert pt_run_dir.joinpath('replica_temp.xvg').exists()
    assert pt_run_dir.joinpath('replica_index.xvg').exists()
    assert pt_run_dir.joinpath('demux.pl.log').exists()
示例#4
0
def gen_packages(db, dist_dir: str, branch_name: str, component_name: str):
    repopath = branch_name + '/' + component_name
    basedir = PosixPath(dist_dir).joinpath(branch_name).joinpath(
        component_name)
    d = basedir.joinpath('binary-all')
    d.mkdir(0o755, parents=True, exist_ok=True)
    arch_packages = {
        'all': open(str(d.joinpath('Packages')), 'w', encoding='utf-8')
    }

    cur = db.cursor()
    cur.execute(
        """
        SELECT p.package, p.version, min(p.architecture) architecture,
          min(p.filename) filename, min(p.size) size, min(p.sha256) sha256,
          min(p.section) section, min(p.installed_size) installed_size,
          min(p.maintainer) maintainer, min(p.description) description,
          array_agg(array[pd.relationship, pd.value]) dep
        FROM pv_packages p INNER JOIN pv_repos r ON p.repo=r.name
        LEFT JOIN pv_package_dependencies pd ON pd.package=p.package
        AND pd.version=p.version AND pd.repo=p.repo
        WHERE r.path=%s AND p.debtime IS NOT NULL
        GROUP BY p.package, p.version, p.repo""", (repopath, ))
    for row in cur:
        architecture = row['architecture']
        if architecture not in arch_packages:
            d = basedir.joinpath('binary-' + architecture)
            d.mkdir(0o755, parents=True, exist_ok=True)
            arch_packages[architecture] = open(str(d.joinpath('Packages')),
                                               'w',
                                               encoding='utf-8')
        f = arch_packages[architecture]
        control = {
            'Package': row['package'],
            'Version': row['version'],
            'Architecture': architecture,
            'Installed-Size': str(row['installed_size']),
            'Maintainer': row['maintainer'],
            'Filename': row['filename'],
            'Size': str(row['size']),
            'SHA256': row['sha256'],
            'Description': row['description']
        }
        if row['section']:
            control['Section'] = row['section']
        for k, v in row['dep']:
            if k:
                control[k] = v
        print(deb822.SortPackages(deb822.Packages(control)), file=f)
    for f in arch_packages.values():
        file_path = f.name
        f.close()
        subprocess.check_call(('xz', '-k', '-0', '-f', file_path))
示例#5
0
def parse_migration(directory: PosixPath, migration_id: int,
                    params: Dict[str, str]) -> Migration:
    up_file: PosixPath = directory.joinpath(f'{migration_id}_up.sql')
    down_file: PosixPath = directory.joinpath(f'{migration_id}_down.sql')

    with open(str(up_file)) as up_fp, open(str(down_file)) as down_fp:
        up = up_fp.read().format(**params)
        down = down_fp.read().format(**params)
        migration = Migration(migration_id=migration_id,
                              up_digest=digest(up),
                              down_digest=digest(down),
                              up=up,
                              down=down)
        return migration
示例#6
0
def unzip(url:str, dest:PosixPath, chunk_size:int=1024*1024, remove_zip: bool=False):
    """ 
    Downloads and unzips a zip file
    
    parameters:
        url: str, uri to zip file
        dest: PosixPath, destination folder
        chunk_size: int, default 1 MB
        remove_zip: bool, default False, unlinks zip file after unzip operation
        
    returns:
        tqdm progress bar and typer echo messages
    """
    stream = requests.get(url, stream=True, verify=False, allow_redirects=True)
    filename = stream.url.split(sep="/")[-1]
    length = int(stream.headers.get("content-length", -1))
    
    if length < 1:
        raise Exception(f"content length is less than 1 bytes")
    
    if not dest.exists():
        raise Exception(f"destination folder does not exist: {dest}")
    
    if dest.is_file():
        dest = dest.parent
        
    dest = dest.resolve()

    typer.echo("Downloading zip file...")

    with tqdm.wrapattr(
    open(dest.joinpath(filename), "wb"), "write",
    unit='B', unit_scale=True, unit_divisor=1024, miniters=1,
    desc=filename, total=length) as f:
        for chunk in stream.iter_content(chunk_size=chunk_size):
            if chunk:
                f.write(chunk)
                f.flush()
                
    typer.echo("Extracting zip file...")
    
    with zipfile.ZipFile(dest.joinpath(filename)) as zippo:
        for member in tqdm(zippo.infolist(), desc="Extracting zip file..."):
            zippo.extract(member, dest)
            
    if remove_zip:
        dest.joinpath(filename).unlink()
        typer.secho(f"{filename} is removed.", bold=True, fg="red")
    else:
        typer.secho(f"{filename} is unzipped in {dest}.", bold=True, fg="green")
示例#7
0
def gen_contents(db, branch_name: str, component_name: str, dist_dir: str):
    repopath = branch_name + '/' + component_name
    basedir = PosixPath(dist_dir).joinpath(branch_name).joinpath(
        component_name)
    basedir.mkdir(0o755, parents=True, exist_ok=True)
    cur = db.cursor()
    cur.execute(
        "SELECT architecture FROM pv_repos "
        "WHERE architecture != 'all' AND path=%s", (repopath, ))
    allarch = [r[0] for r in cur]
    for arch in allarch:
        cur.execute(
            """
            SELECT df.path || '/' || df.name AS f, string_agg(DISTINCT (
              coalesce(dp.section || '/', '') || dp.package), ',') AS p
            FROM pv_packages dp
            INNER JOIN pv_package_files df USING (package, version, repo)
            INNER JOIN pv_repos pr ON pr.name=dp.repo
            WHERE pr.path=%s AND df.ftype='reg'
            AND pr.architecture IN (%s, 'all') AND dp.debtime IS NOT NULL
            GROUP BY df.path, df.name""", (repopath, arch))
        filename = str(basedir.joinpath('Contents-%s.gz' % arch))
        with gzip.open(filename, 'wb', 9) as f:
            for path, package in cur:
                f.write(
                    (path.ljust(55) + ' ' + package + '\n').encode('utf-8'))
示例#8
0
 def __init__(self, lightsensor_config):
     super().__init__(lightsensor_config)
     self.property_bus = "i2c"
     devicepath = PosixPath("/sys/bus/i2c/devices").joinpath(
         lightsensor_config.device).joinpath("iio:device0")
     self.lightsensor_path_raw = PosixPath(
         devicepath.joinpath("in_illuminance_raw"))
     self.lightsensor_path_scale = PosixPath(
         devicepath.joinpath("in_illuminance_scale"))
     # Make sure they exist
     if (not self.lightsensor_path_raw.exists()
             and not self.lightsensor_path_raw.is_file()):
         raise DeviceError(self.lightsensor_path_raw)
     if (not self.lightsensor_path_scale.exists()
             and not self.lightsensor_path_scale.is_file()):
         raise DeviceError(self.lightsensor_path_scale)
	def __init__(self
			, projectPath
			, targetFolder = Path.home().joinpath('bin', 'net.splitcells.os.state.interface.commands.managed')
			):
		projectPosixPath = PosixPath(projectPath)
		self.projectName = projectPosixPath.name
		self.binFolder = projectPosixPath.joinpath('bin')
		self.targetFolder = targetFolder
示例#10
0
    def pngTable(self,
                 df: DataFrame,
                 colwidth_factor: float = 0.20,
                 fontsize: int = 12,
                 formatFloats: bool = True,
                 save: bool = False,
                 in_folder: PosixPath = None):
        '''
        Displays or saves a table as png.
        Uses matplotlib => pandas plotting table.
        
        parameters:
            df: dataframe or pivot table
            colwidth_factor: float, default 0.20, defines the width of columns
            fontsize: int, default 12
            formatFloats: bool, default True, formats as two digit prettiy floats
            save: saves the png file as table.png
            in_folder: posixpath, default None, folder to save the png file
        
        returns:
            png file in Downloads folder
        '''
        if not isinstance(in_folder, PosixPath) or not in_folder.exists():
            in_folder = core.folder.Downloads

        # file name and path
        path = in_folder.joinpath(f"table-{core.now_prefix()}.png")

        # format floats - two digits
        if formatFloats:
            df.applymap(lambda x: '{:,.2f}'.format(x)
                        if isinstance(x, float) else x)

        # get pandas.plotting.table
        table = pd.plotting.table

        fig, ax = plt.subplots(figsize=(1.9 * df.shape[1],
                                        0.3 * df.shape[0]))  # set size frame
        ax.xaxis.set_visible(False)  # hide the x axis
        ax.yaxis.set_visible(False)  # hide the y axis
        ax.set_frame_on(False)  # no visible frame, uncomment if size is ok
        tabla = table(ax,
                      df,
                      loc='upper left',
                      colWidths=[colwidth_factor] *
                      len(df.columns))  # where df is your data frame
        tabla.auto_set_font_size(False)  # Activate set fontsize manually
        tabla.set_fontsize(fontsize)  # if ++fontsize is necessary ++colWidths
        tabla.scale(1.2, 1.2)  # change size table
        if save:
            plt.savefig(fname=path, bbox_inches="tight", pad_inches=1)  # save
            # https://stackoverflow.com/questions/56328353/matplotlib-savefig-cuts-off-pyplot-table
            plt.close()
            print(f"saved in Downloads folder as {path.stem}.png")
        else:
            plt.show()  # show the result
            plt.close()
示例#11
0
 def __init__(self, temperature_config):
     super().__init__(temperature_config)
     self.property_bus = "w1"
     devicepath = PosixPath("/sys/bus/w1/devices").joinpath(
         temperature_config.device)
     self.temperature_path = PosixPath(devicepath.joinpath("w1_slave"))
     # Make sure they exist
     if (not self.temperature_path.exists()
             and not self.temperature_path.is_file()):
         raise DeviceError(self.temperature_path)
示例#12
0
def _process_audio_files(worker_id: int,
                         tasks: pd.DataFrame,
                         p_out: PosixPath,
                         p_raw: PosixPath,
                         n_samples: int = 59049,
                         sample_rate: int = 22050,
                         topk: int = 50,
                         file_pattern: str = 'clip-{}-seg-{}-of-{}') -> None:
    n_tasks = tasks.shape[0]
    t_start = time.time()
    n_parts = n_tasks // 10
    idx = 0
    LOG.info(f"[Worker {worker_id:02d}]: Received {n_tasks} tasks.")

    for i, t in tasks.iterrows():
        # find output dir
        split = t.split
        out_dir = p_out.joinpath(split)

        # process audio file
        try:
            segments = _segment_audio(_load_audio(p_raw.joinpath(t.mp3_path), sample_rate=sample_rate),
                                      n_samples=n_samples,
                                      center=False)
            loaded = True
        except (RuntimeError, EOFError) as e:
            LOG.warning(f"[Worker {worker_id:02d}]: Failed load audio: {t.mp3_path}. Ignored.")
            loaded = False

        # save label and segments to npy files
        if loaded:
            labels = t[t.index.tolist()[:topk]].values.astype(bool)
            n_segments = len(segments)
            for j, seg in enumerate(segments):
                np.savez_compressed(out_dir.joinpath(file_pattern.format(t.clip_id, j+1, n_segments)).as_posix(), data=seg, labels=labels)

        # report progress
        idx += 1
        if idx == n_tasks:
            LOG.info(f"[Worker {worker_id:02d}]: Job finished. Quit. (time usage: {(time.time() - t_start) / 60:.02f} min)")
        elif idx % n_parts == 0:
            LOG.info(f"[Worker {worker_id:02d}]: {idx//n_parts*10}% tasks done. (time usage: {(time.time() - t_start) / 60:.02f} min)")
    return
示例#13
0
def parse_migration(dir: PosixPath, migration_id: int) -> Migration:
    up_file: PosixPath = dir.joinpath(f"{migration_id}_up.sql")
    down_file: PosixPath = dir.joinpath(f"{migration_id}_down.sql")

    with open(up_file) as up_fp, open(down_file) as down_fp:

        up = up_fp.read()
        up_digest = digest(up)
        down = down_fp.read()
        down_digest = digest(down)

        migration = Migration(
            migration_id=migration_id,
            up_digest=up_digest,
            up=up,
            down_digest=down_digest,
            down=down,
        )
        return migration
示例#14
0
def _save_model(model: Pipeline, model_path: PosixPath):
    """
    Saves the trained model with the sufix of the current day in the name of the file.

    :param model:
    :param model_path:
    :return:
    """
    path = model_path.joinpath(f'model_{date.today().isoformat()}.joblib')
    dump(model, path)
示例#15
0
def test_find_energies(pt_run_dir: pathlib.PosixPath):
    # Doesn't currently test:
    #    content of the outputs
    #    what happens if they already exist
    from paratemp.energy_histo import find_energies
    with cd(pt_run_dir):
        l_xvgs = find_energies()
    for xvg in l_xvgs:
        assert pt_run_dir.joinpath(xvg).exists()
        assert re.match(r'energy[01].xvg', xvg)
    assert len(l_xvgs) == 2
示例#16
0
def zip_last_n_files(directory: PosixPath = LOG_DIR,
                     zip_file: str = ZIP_FILE,
                     n: int = 3):
    log_files = glob(directory.joinpath("*.log").as_posix())
    sorted_log_files = sorted(log_files, key=path.getmtime)
    with ZipFile(zip_file, "w") as myzip:
        for i in sorted_log_files[-n:]:
            file_mtime = datetime.fromtimestamp(path.getmtime(i)).date()
            old_filename = Path(i).name.split(".")[0]
            new_filename = old_filename + f"_{str(file_mtime)}.log"
            myzip.write(i, new_filename)
示例#17
0
    def _run_pants(self, pants_repo: pathlib.PosixPath, pants_target: str,
                   extension: str) -> pathlib.PosixPath:
        """Runs pants with widget UI display."""

        # Version check for pants v1 vs v2 flags/behavior.
        is_pants_v1 = pants_repo.joinpath('pants.ini').exists()
        if is_pants_v1:
            goal_name = 'binary'
            tmp_root = None
        else:
            goal_name = 'package'
            # N.B. pants v2 doesn't support `--pants-distdir` outside of the build root.
            tmp_root = pants_repo.joinpath('dist')
            # N.B. The dist dir must exist for temporary_dir.
            tmp_root.mkdir(exist_ok=True)

        with temporary_dir(root_dir=tmp_root, cleanup=False) as tmp_dir:
            tmp_path = pathlib.PosixPath(tmp_dir)
            title = f'[Build] ./pants {goal_name} {pants_target}'
            cmd = f'cd {pants_repo} && ./pants --pants-distdir="{tmp_path}" {goal_name} {pants_target}'
            return self._stream_binary_build_with_output(cmd,
                                                         title,
                                                         tmp_path,
                                                         extension=extension)
示例#18
0
def pt_blank_dir(tmp_path: pathlib.PosixPath, path_test_data):
    dir_from = path_test_data / 'spc-and-methanol'
    tmp_path = tmp_path.joinpath('spc-and-methanol')
    # str needed for Python 3.5
    shutil.copytree(str(dir_from), str(tmp_path))
    return tmp_path
示例#19
0
from threading import Thread, Semaphore, Event
from queue import Queue
from time import sleep
import shlex
from PIL import Image, ImageDraw
import sys
from shutil import copyfile

RED = '\033[31m'
GREEN = '\033[32m'
BLUE = '\033[34m'
YELLOW = '\033[33m'
OFF = '\033[0m'

DIST_DIR = PosixPath('dist/Numix-HIDPI')
CURSOR_DIST = DIST_DIR.joinpath('cursors/')

BUILD_DIR = PosixPath('build/')
CURSOR_OUTPUT = BUILD_DIR.joinpath('cursor/')
ICON_OUTPUT = BUILD_DIR.joinpath('icons/')

CMD_TMPL = 'inkscape %s -o %s --export-dpi %s'

DPI = OrderedDict([(90, 24), (120, 30), (160, 40), (180, 45), (200, 50),
                   (220, 55), (240, 60), (320, 80)])


class WatchedProcess(Thread):
    """
		A light wrapper around a Popen object
示例#20
0
 def _get_path_by_language(self, p: PosixPath) -> PosixPath:
     # 仅支持中文和英文
     name = "zh_CN" if self.language == settings.LANGUAGE_CODE else "en"
     return p.joinpath(name)
示例#21
0
"""
ledge.config
~~~~~~~~~~~~

This module implements interface for getting and setting the "ledge" cli application's configuration.
The configuration is stored in the ~/.ledge/config.yaml file
"""

import yaml
from pathlib import PosixPath

ledge_dir = PosixPath('~/.ledge').expanduser()
if not ledge_dir.exists():
    ledge_dir.mkdir()

config_file = ledge_dir.joinpath('config.yaml')
if not config_file.exists():
    config_file.touch()


def get(key):
    """Get the config value by providing the key"""
    with config_file.open('r') as f:
        conf = yaml.load(f)
        return conf.get(key)
    return None


def set(key, value):
    """Set a config by providine a key-value pair"""
    with config_file.open('r') as f:
示例#22
0
def pt_run_dir(tmp_path: pathlib.PosixPath):
    dir_from = pathlib.Path('tests/test-data/spc-and-methanol-run')
    tmp_path = tmp_path.joinpath('spc-and-methanol-run')
    # str needed for Python 3.5
    shutil.copytree(str(dir_from), str(tmp_path))
    return tmp_path
示例#23
0
def pt_run_dir(tmp_path: pathlib.PosixPath):
    dir_from = pathlib.Path('tests/test-data/spc-and-methanol-run')
    tmp_path = tmp_path.joinpath('spc-and-methanol-run')
    # str needed for Python 3.5
    shutil.copytree(str(dir_from), str(tmp_path))
    return tmp_path
示例#24
0
 def _validate_pants_repo(self, pants_repo: pathlib.PosixPath) -> bool:
     """Validates a given or stored path is a valid pants repo."""
     return (pants_repo and pants_repo.is_dir()
             and pants_repo.joinpath('pants').is_file())
示例#25
0
def scan_dir(db, base_dir: str, branch: str, component: str, branch_idx: int):
    pool_path = PosixPath(base_dir).joinpath('pool')
    search_path = pool_path.joinpath(branch).joinpath(component)
    compname = '%s-%s' % (branch, component)
    comppath = '%s/%s' % (branch, component)
    cur = db.cursor()
    cur.execute("""SELECT p.package, p.version, p.repo, p.architecture,
          p.filename, p.size, p.mtime, p.sha256
        FROM pv_packages p
        INNER JOIN pv_repos r ON p.repo=r.name WHERE r.path=%s
        UNION ALL
        SELECT p.package, p.version, p.repo, p.architecture,
          p.filename, p.size, p.mtime, p.sha256
        FROM pv_package_duplicate p
        INNER JOIN pv_repos r ON p.repo=r.name WHERE r.path=%s""",
        (comppath, comppath))
    dup_pkgs = set()
    ignore_files = set()
    modified_repo = set()
    del_list = []
    # For each package/version/architecture we already know in the DB:
    for package, version, repopath, architecture, filename, size, mtime, sha256 in cur:
        fullpath = PosixPath(base_dir).joinpath(filename)
        if fullpath.is_file():
            # If a package with the same name exists:
            stat = fullpath.stat()
            sfullpath = str(fullpath)
            if size == stat.st_size and (mtime == int(stat.st_mtime) or
                # Ignore if the file isn't changed
                internal_pkgscan.sha256_file(sfullpath) == sha256):
                ignore_files.add(sfullpath)
            else:
                # Consider the new file to be a duplicate and replace the old one
                dup_pkgs.add(filename)
                del_list.append((filename, package, version, repopath))
        else:
            # If the package has been deleted
            del_list.append((filename, package, version, repopath))
            logger_scan.info('CLEAN  %s', filename)
            module_ipc.publish_change(
                compname, package, architecture, 'delete', version, '')
    # For each package/version/arch/repo to be deleted:
    for row in del_list:
        cur.execute("DELETE FROM pv_packages WHERE filename=%s", (row[0],))
        modified_repo.add(row[1:][-1])
    # Check if there are any new files added. Recursively scan the pool dir and take notes of
    # what we haven't seen yet.
    check_list = []
    for fullpath in search_path.rglob('*.deb'):
        if not fullpath.is_file():
            continue
        stat = fullpath.stat()
        sfullpath = str(fullpath)
        if sfullpath in ignore_files:
            continue
        check_list.append((sfullpath, str(fullpath.relative_to(base_dir)),
                           stat.st_size, int(stat.st_mtime)))
    del ignore_files
    with multiprocessing.dummy.Pool(max(1, os.cpu_count() - 1)) as mpool:
        for pkginfo, depinfo, sodeps, files in mpool.imap_unordered(scan_deb, check_list, 5):
            realname = pkginfo['architecture']
            validdeb = ('debtime' in pkginfo)
            if realname == 'all':
                realname = 'noarch'
            if component != 'main':
                realname = component + '-' + realname
            repo = '%s/%s' % (realname, branch)
            cur.execute("INSERT INTO pv_repos VALUES (%s,%s,%s,%s,%s,%s,%s,now()) "
                "ON CONFLICT DO NOTHING",
                (repo, realname, comppath, branch_idx,
                branch, component, pkginfo['architecture']))
            modified_repo.add(repo)
            pkginfo['repo'] = repo
            dbkey = (pkginfo['package'], pkginfo['version'], repo)
            if pkginfo['filename'] in dup_pkgs:
                if validdeb:
                    logger_scan.info('UPDATE %s', pkginfo['filename'])
                    module_ipc.publish_change(
                        compname, pkginfo['package'], pkginfo['architecture'],
                        'overwrite', pkginfo['version'], pkginfo['version']
                    )
            else:
                cur.execute("SELECT version, filename FROM pv_packages "
                    "WHERE package=%s AND repo=%s", (pkginfo['package'], repo))
                results = cur.fetchall()
                if results:
                    oldver = max(results, key=lambda x: dpkg_vercomp_key(x[0]))
                    vercomp = internal_dpkg_version.dpkg_version_compare(
                        oldver[0], pkginfo['version'])
                    if vercomp == -1:
                        if validdeb:
                            logger_scan.info('NEWER  %s %s %s >> %s',
                                pkginfo['architecture'], pkginfo['package'],
                                pkginfo['version'], oldver[0])
                            module_ipc.publish_change(
                                compname, pkginfo['package'],
                                pkginfo['architecture'], 'upgrade',
                                oldver[0], pkginfo['version']
                            )
                    elif vercomp:
                        logger_scan.warning('OLD    %s %s %s',
                            pkginfo['architecture'], pkginfo['package'],
                            pkginfo['version'])
                    else:
                        cur.execute("DELETE FROM pv_package_sodep "
                            "WHERE package=%s AND version=%s AND repo=%s", dbkey)
                        cur.execute("DELETE FROM pv_package_files "
                            "WHERE package=%s AND version=%s AND repo=%s", dbkey)
                        cur.execute("DELETE FROM pv_package_dependencies "
                            "WHERE package=%s AND version=%s AND repo=%s", dbkey)
                        cur.execute("DELETE FROM pv_package_duplicate "
                            "WHERE package=%s AND version=%s AND repo=%s", dbkey)
                        cur.execute("INSERT INTO pv_package_duplicate "
                            "SELECT * FROM pv_packages WHERE filename=%s",
                            (oldver[1],))
                        cur.execute("DELETE FROM pv_packages "
                            "WHERE package=%s AND version=%s AND repo=%s", dbkey)
                        logger_scan.error('DUP    %s == %s',
                            oldver[1], pkginfo['filename'])
                elif validdeb:
                    logger_scan.info('NEW    %s %s %s', pkginfo['architecture'],
                        pkginfo['package'], pkginfo['version'])
                    module_ipc.publish_change(
                        compname, pkginfo['package'], pkginfo['architecture'],
                        'new', '', pkginfo['version']
                    )
            keys, qms, vals = internal_db.make_insert(pkginfo)
            cur.execute("INSERT INTO pv_packages (%s) VALUES (%s)" %
                (keys, qms), vals)
            for row in depinfo.items():
                cur.execute("INSERT INTO pv_package_dependencies "
                    "VALUES (%s,%s,%s,%s,%s) "
                    "ON CONFLICT ON CONSTRAINT pv_package_dependencies_pkey "
                    "DO UPDATE SET value = %s",
                    dbkey + row + (row[1],))
            for row in sodeps:
                cur.execute("INSERT INTO pv_package_sodep VALUES "
                    "(%s,%s,%s,%s,%s,%s)", dbkey + row)
            for row in files:
                cur.execute("INSERT INTO pv_package_files VALUES "
                    "(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)", dbkey + row)
    for repo in modified_repo:
        cur.execute("UPDATE pv_repos SET mtime=now() WHERE name=%s", (repo,))
示例#26
0
def gen_release(db, branch_name: str, component_name_list: list, dist_dir: str,
                conf: PVConf):
    branch_dir = PosixPath(dist_dir).joinpath(branch_name)
    branch_dir.mkdir(0o755, parents=True, exist_ok=True)

    cur = db.cursor()
    meta_data_list = dict.fromkeys(component_name_list)
    for component_name in component_name_list:
        cur.execute("SELECT architecture FROM pv_repos WHERE path=%s",
                    (branch_name + '/' + component_name, ))
        meta_data_list[component_name] = [r[0] for r in cur] or ['all']
    cur.close()
    # Now we have this structure:
    # meta_data_list['main'] = ['amd64', 'arm64', ...]

    r_basic_info = {
        'Origin': conf['origin'],
        'Label': conf['label'],
        'Suite': branch_name,
        'Codename': conf['codename'],
        'Description': conf['desc'],
    }
    r_template = deb822.Release(r_basic_info)
    now = datetime.now(tz=timezone.utc)
    r_template['Date'] = now.strftime(date_format)
    if 'ttl' in conf:
        ttl = int(conf['ttl'])
        r_template['Valid-Until'] = (now +
                                     timedelta(days=ttl)).strftime(date_format)

    r = r_template.copy()

    r['Architectures'] = ' '.join(
        sorted(set.union(
            *map(set, meta_data_list.values())))) if meta_data_list else 'all'
    r['Components'] = ' '.join(sorted(component_name_list))
    hash_list = []
    for c in meta_data_list:
        for a in meta_data_list[c]:
            has_contents = False
            for filename in (
                    'binary-%s/Packages' % a,
                    'binary-%s/Packages.xz' % a,
                    'Contents-%s' % a,
                    'Contents-%s.gz' % a,
            ):
                path = branch_dir.joinpath(c).joinpath(filename)
                try:
                    size = path.stat().st_size
                except FileNotFoundError:
                    continue
                fullpath = str(PurePath(c).joinpath(filename))
                hash_list.append({
                    'sha256': sha256_file(str(path)),
                    'size': size,
                    'name': fullpath
                })
                if filename.startswith('Contents'):
                    if filename.endswith('.gz') and not has_contents:
                        with gzip.open(str(path), 'rb') as f:
                            size, sha256 = size_sha256_fp(f)
                        hash_list.append({
                            'sha256': sha256,
                            'size': size,
                            'name': os.path.splitext(fullpath)[0]
                        })
                    else:
                        has_contents = True

    null_name = 'placeholder'
    null_path = branch_dir.joinpath(null_name)
    if len(hash_list) == 0:
        open(null_path, 'wb').close()  # touch an empty file
        hash_list.append({
            'sha256': sha256_file(str(null_path)),
            'size': 0,
            'name': null_name
        })
    else:
        if os.path.exists(str(null_path)):
            os.remove(str(null_path))

    hash_list.sort(key=lambda x: x['name'])
    r['SHA256'] = hash_list
    release_fn = branch_dir.joinpath('Release')
    with open(str(release_fn), 'w', encoding='UTF-8') as f:
        f.write(str(r))
    subprocess.check_call([
        GPG_MAIN, '--batch', '--yes', '--clearsign', '-o',
        str(branch_dir.joinpath('InRelease')),
        str(release_fn)
    ])
    release_fn.unlink()