Esempio n. 1
0
class BMP280Sensor(AbstractTemperatureSensor):
    """
     Class to read the sensor BMP280 on the I2C bus

    To find the sensor id, make sure the i2c bus is enabled, the device is connected, and 
    mp3115a2 module is loaded. Additionally, you have to tell the i2c bus to read the chip
    with the 
    
    "echo bmp280 0x76" > /sys/bus/i2c/devices/i2c-1/new_device

    ...command. The i2c-1 bus number may change if the system has more than one i2c bus loadded.

     Then look in /sys/bus/i2c/devices directory for the 1-0076 directory.

     The 0x76 above and the 1-0076 represents the i2c bus id. The bus id can be determined
     with the i2cdetect command is needed. Some bmp280 sensors have ids of 0x77.

     The bme280 should also be supported but the humidity value will not be read

     """
    def __init__(self, temperature_config):
        super().__init__(temperature_config)
        self.property_bus = "i2c"
        devicepath = PosixPath("/sys/bus/i2c/devices").joinpath(
            temperature_config.device).joinpath("iio:device0")
        self.temperature_path = PosixPath(devicepath.joinpath("in_temp_input"))
        self.pressure_path = PosixPath(
            devicepath.joinpath("in_pressure_input"))
        # Make sure they exist
        if (not self.temperature_path.exists()
                and not self.temperature_path.is_file()):
            raise DeviceError(self.temperature_path)
        if (not self.pressure_path.exists()
                and not self.pressure_path.is_file()):
            raise DeviceError(self.pressure_path)

    @property
    def temperature(self):
        with self.temperature_path.open() as f:
            data = f.readline()
            data = str.strip(data)
        result = int(data) / 1000
        if (self.property_sensor_config.rounding != -1):
            result = round(result, self.property_sensor_config.rounding)
        return (result)

    @property
    def pressure(self):
        with self.pressure_path.open() as f:
            data = f.readline()
            data = str.strip(data)
        result = float(data) * 10
        if (self.property_sensor_config.rounding != -1):
            result = round(result, self.property_sensor_config.rounding)
        return result

    @property
    def bus(self):
        return self.property_bus
Esempio n. 2
0
class BH1750Sensor(AbstractLightSensor):
    """
    Class to read the sensor BH1750 on the I2C bus

    To find the sensor id, make sure the i2c bus is enabled, the device is connected, and 
    BH1750 module is loaded. Additionally, you have to tell the i2c bus to read the chip
    with the 
    
    "echo bh1750 0x23" > /sys/bus/i2c/devices/i2c-1/new_device

    ...command. The i2c-1 bus number may change if the system has more than one i2c bus loadded.

     Then look in /sys/bus/i2c/devices directory for the 1-0023 directory.

     The 0x23 above and the 1-0023 represents the i2c bus id. The bus id can be determined
     with the i2cdetect command is needed.

    """
    def __init__(self, lightsensor_config):
        super().__init__(lightsensor_config)
        self.property_bus = "i2c"
        devicepath = PosixPath("/sys/bus/i2c/devices").joinpath(
            lightsensor_config.device).joinpath("iio:device0")
        self.lightsensor_path_raw = PosixPath(
            devicepath.joinpath("in_illuminance_raw"))
        self.lightsensor_path_scale = PosixPath(
            devicepath.joinpath("in_illuminance_scale"))
        # Make sure they exist
        if (not self.lightsensor_path_raw.exists()
                and not self.lightsensor_path_raw.is_file()):
            raise DeviceError(self.lightsensor_path_raw)
        if (not self.lightsensor_path_scale.exists()
                and not self.lightsensor_path_scale.is_file()):
            raise DeviceError(self.lightsensor_path_scale)

    def dispose(self):
        pass

    @property
    def lightlevel(self):
        with self.lightsensor_path_raw.open() as f:
            data_raw = f.readline()
        with self.lightsensor_path_scale.open() as f:
            data_scale = f.readline()
        result = int(data_raw) * float(data_scale)
        if (self.property_sensor_config.rounding != -1):
            result = round(result, self.property_sensor_config.rounding)
        return result

    @property
    def bus(self):
        return self.property_bus
Esempio n. 3
0
    def process_list_files_request(self, request):
        """ Process LIST_FILES request.

        The request is expected to contain the directory for which the
        server must list files for.

        The possible responses are.

        * ACCEPTED with FILES_LISTED and the list of files, if the listing was successful
        * REFUSED with NOT_A_DIRECTORY if the list files directory doesn't exists

        Other responsed include ERROR with BAD_REQUEST if the request is
        imporperly formatted, or ERROR with UNKNOWN ERROR if any other
        error occured during the reading of the directory.
        """

        # extract list directory from request, raise bad request error
        # if something goes wrong
        try:
            assert isinstance(request, tuple)
            assert len(request) == 2

            directory = PosixPath(request[1])

        except Exception:
            response = make_bad_request_error()
            self.socket.send_pyobj(response)

            return

        # normalize the directory (later it can be combined with the
        # root directory)
        directory = normalize_directory(directory)

        # combine the list directory with the root directory
        directory = self.root_directory / directory

        # if the directory doesn't refer to an actual directory, send
        # not a directory refused resonse
        if not directory.exists() or directory.is_file():
            response = make_not_a_directory_response()
            self.socket.send_pyobj(response)

            return

        # build the list of files of the given directory, with files
        # properties
        files_list = {}

        for _file in directory.iterdir():
            name = _file.name
            is_directory = _file.is_dir()
            size = _file.stat().st_size if not is_directory else 0
            last_accessed = _file.stat().st_atime

            files_list[name] = (is_directory, size, last_accessed)

        # send list file accepted response with list of files
        response = make_files_listed_response(files_list)
        self.socket.send_pyobj(response)
Esempio n. 4
0
def generate(db, base_dir: str, conf_common: PVConf,
             conf_branches: BranchesConf, force: bool):
    dist_dir = base_dir + '/dists.new'
    pool_dir = base_dir + '/pool'
    dist_dir_real = base_dir + '/dists'
    dist_dir_old = base_dir + '/dists.old'
    shutil.rmtree(dist_dir, ignore_errors=True)
    for key in conf_branches.keys():
        i = PosixPath(pool_dir).joinpath(key)
        if not i.is_dir():
            continue
        branch_name = i.name
        realbranchdir = os.path.join(dist_dir_real, branch_name)
        inrel = PosixPath(realbranchdir).joinpath('InRelease')
        expire_renewal_period = timedelta(days=conf_branches[branch_name].get(
            "renew_in", 1)).total_seconds()
        if not force and inrel.is_file():
            # See if we can skip this branch altogether
            inrel_mtime = inrel.stat().st_mtime
            inrel_sec_to_expire = get_valid_until_from_release(
                inrel) - datetime.now().timestamp()
            cur = db.cursor()
            cur.execute(
                "SELECT coalesce(extract(epoch FROM max(mtime)), 0) "
                "FROM pv_repos WHERE branch=%s", (branch_name, ))
            db_mtime = cur.fetchone()[0]
            cur.close()
            # Skip if
            # -   P-vector does not recognize this branch (usually means branch is empty)
            # OR  On-disk release mtime is newer than last time db was updated
            # AND On-disk release won't expire in 1 day
            if not db_mtime or inrel_mtime > db_mtime and inrel_sec_to_expire > expire_renewal_period:
                shutil.copytree(realbranchdir,
                                os.path.join(dist_dir, branch_name))
                logger_rel.info('Skip generating Packages and Contents for %s',
                                branch_name)
                continue
        component_name_list = []
        for j in PosixPath(pool_dir).joinpath(branch_name).iterdir():
            if not j.is_dir():
                continue
            component_name = j.name
            component_name_list.append(component_name)
            logger_rel.info('Generating Packages for %s-%s', branch_name,
                            component_name)
            gen_packages(db, dist_dir, branch_name, component_name)
            logger_rel.info('Generating Contents for %s-%s', branch_name,
                            component_name)
            gen_contents(db, branch_name, component_name, dist_dir)

        conf = conf_common.copy()
        conf.update(conf_branches[branch_name])
        logger_rel.info('Generating Release for %s', branch_name)
        gen_release(db, branch_name, component_name_list, dist_dir, conf)
    if PosixPath(dist_dir_real).exists():
        os.rename(dist_dir_real, dist_dir_old)
    os.rename(dist_dir, dist_dir_real)
    shutil.rmtree(dist_dir_old, True)
Esempio n. 5
0
    def __init__(self,
                 root_directory,
                 token,
                 file_size_limit=FILE_SIZE_LIMIT,
                 min_chunk_size=MINIMUM_CHUNK_SIZE,
                 max_chunk_size=MAXIMUM_CHUNK_SIZE):
        """ Construct a :py:class:`Server` instance.

        Long description.

        :param root_directory: foobar.
        :param token: foobar.
        :param file_size_limit: foobar.
        :param min_chunk_size: foobar.
        :param max_chunk_size: foobar.
        """

        root_directory = PosixPath(root_directory)

        if not root_directory.is_absolute():
            root_directory = PosixPath(os.getcwd(), root_directory)

        if not root_directory.exists() or root_directory.is_file():
            raise NotADirectoryError(
                "The root directory must be an existing directory")

        self.root_directory = root_directory

        self.token = bytes(token, 'utf-8')

        if file_size_limit <= 0:
            raise ValueError("The file size limit must be greater than 0")
        self.file_size_limit = file_size_limit

        if min_chunk_size <= 0:
            raise ValueError("The minimum chunk size must be greater than 0")

        self.min_chunk_size = min_chunk_size
        self.max_chunk_size = max_chunk_size

        self.state = ServerState.IDLE

        # server UPLOAD and DOWNLOAD state related attributes
        self.chunk_size = 0
        self.temporary_file = None  # file open in 'wb' mode OR file open in 'rb' mode
        self.file_description = None
        self.remaining_bytes = 0
        self.file_destination = None  # upload only attribute

        # socket relate attributes
        self.router = None
        self.dealer = None
        self.socket = None

        self.is_running = False
Esempio n. 6
0
def _get_root_folder(root_folder: PosixPath) -> Optional[PosixPath]:
    folders: List[PosixPath] = [
        root_folder,
        Path(str(settings.BASE_DIR) + str(root_folder)),
    ]
    for folder in folders:
        if folder.is_dir():
            return folder
        if root_folder.is_file():
            logger.error(f"{root_folder} is a file.")

    return None
Esempio n. 7
0
def write_data(filepath: PosixPath, people_data: dict, planets: Dict[str,
                                                                     str]):
    settings.DATA_PATH.mkdir(parents=True, exist_ok=True)
    for data in people_data:
        table: DictsView = etl.fromdicts(data['results']).convert(
            'homeworld', planets).addfield(
                'date',
                lambda row: parse(row['edited']).strftime('​%Y-%m-%d')).cutout(
                    *CUTOUT_FIELDS)
        if filepath.is_file():
            table.appendcsv(filepath)
        else:
            table.tocsv(filepath)
Esempio n. 8
0
def unzip(url:str, dest:PosixPath, chunk_size:int=1024*1024, remove_zip: bool=False):
    """ 
    Downloads and unzips a zip file
    
    parameters:
        url: str, uri to zip file
        dest: PosixPath, destination folder
        chunk_size: int, default 1 MB
        remove_zip: bool, default False, unlinks zip file after unzip operation
        
    returns:
        tqdm progress bar and typer echo messages
    """
    stream = requests.get(url, stream=True, verify=False, allow_redirects=True)
    filename = stream.url.split(sep="/")[-1]
    length = int(stream.headers.get("content-length", -1))
    
    if length < 1:
        raise Exception(f"content length is less than 1 bytes")
    
    if not dest.exists():
        raise Exception(f"destination folder does not exist: {dest}")
    
    if dest.is_file():
        dest = dest.parent
        
    dest = dest.resolve()

    typer.echo("Downloading zip file...")

    with tqdm.wrapattr(
    open(dest.joinpath(filename), "wb"), "write",
    unit='B', unit_scale=True, unit_divisor=1024, miniters=1,
    desc=filename, total=length) as f:
        for chunk in stream.iter_content(chunk_size=chunk_size):
            if chunk:
                f.write(chunk)
                f.flush()
                
    typer.echo("Extracting zip file...")
    
    with zipfile.ZipFile(dest.joinpath(filename)) as zippo:
        for member in tqdm(zippo.infolist(), desc="Extracting zip file..."):
            zippo.extract(member, dest)
            
    if remove_zip:
        dest.joinpath(filename).unlink()
        typer.secho(f"{filename} is removed.", bold=True, fg="red")
    else:
        typer.secho(f"{filename} is unzipped in {dest}.", bold=True, fg="green")
Esempio n. 9
0
	def __gen_filename(self, key, version_id=None, filename=None):
		if filename is not None:
			filename = PosixPath(filename).resolve()
			if filename.is_file():
				return filename.as_posix()
			elif filename.is_dir() and filename.exists():
				basepath = filename
			else:
				return None
		else:
			basepath = PosixPath.cwd()
		leaf = key
		if version_id is not None:
			leaf = '%s-%s'%(leaf, version_id)
		return basepath.joinpath(leaf).as_posix()
Esempio n. 10
0
    def generate(self, path: pathlib.PosixPath, build_config: BuildConfig):
        if not (isinstance(path, pathlib.PosixPath)):
            raise ValueError(f'received type {type(path).__name__}')

        if (not path.is_file()):
            raise FileNotFoundError(f'could not load {path}')

        with open(path, encoding="utf-8") as fp:
            content = json.loads(fp.read())

        __custom = CustomPayload(content)

        return partial_class(
            (__custom.name,
             f'created by {__custom.instructor} for {__custom.name}',
             __custom.packages), AbstractKeyword, build_config)
Esempio n. 11
0
    def __add_to_file(self, path_to_file: pathlib.PosixPath,
                      template_dict: dict, template_name: str):
        """Add to a file from a template stored in the templates directory.

        Args:
            path_to_file (pathlib.PosixPath):\
                path to the file that needs to be updated.
            template_dict (dict):\
                used to customise parts of the template. The variable names \
                matching a key in the dict will be replaced with the \
                respective value.
            template_name (str):\
                template_name of the template file in the templates directory.

        Raises:
            TypeError: if the path input is not to a file.
            FileNotFoundError: if the path input does not exist.
            FileNotFoundError: if the project directory does not exist.
            FileNotFoundError: if the template file does not exist.
        """
        if not path_to_file.is_file():
            raise TypeError('Please input path to a file.')
        elif not path_to_file.exists():
            raise FileNotFoundError(f'{path_to_file} does not exist.')
        elif self.proj_dir is None or not self.proj_dir.exists():
            raise FileNotFoundError('You need to create a project directory.')

        if template_name is None:
            template_name = path_to_file.name + '.template'

        path_temp = Path.cwd() / 'templates' / template_name

        if not path_temp.exists():
            raise FileNotFoundError(f'No {template_name} file template was'
                                    ' found in the current directory.')

        template_str = path_temp.open('r').read()
        template = Template(template_str)

        write_to_file = template.render(template_dict)

        with path_to_file.open('w') as main:
            main.write(write_to_file)
Esempio n. 12
0
def onion_current_version():
    '''Retrieve the installed version of Onion.'''

    path_binary_file = PosixPath(binaries_dir, binary_file)
    regex = ('^oniond (?P<version>[0-9]*\.[0-9]*(?:\.[0-9]*)?)'
             ' \(build (?P<build>[0-9]*)\)'
             ' released on (?P<date>[0-9]{4}-[0-9]{2}-[0-9]{2})$')

    if path_binary_file.exists() and path_binary_file.is_file():
        output = check_output(['oniond', 'version']).decode()
        onion_current_installation = match(regex, output)

        if onion_current_installation:
            onion_current_version = onion_current_installation['version']
            onion_current_build = int(onion_current_installation['build'])
            onion_current_date = onion_current_installation['date']

            return (onion_current_version, onion_current_build,
                    onion_current_date)

    return False
Esempio n. 13
0
def setup_unarchive():
    '''Unarchive the setup file.'''

    path_setup_file = PosixPath(SETUP_FILE)

    if path_setup_file.exists() and path_setup_file.is_file():
        try:
            with open(path_setup_file, 'rb') as file:
                data = file.read()
                current_hash = sha1(data).hexdigest()
        except IOError:
            current_hash = ''

        if SETUP_HASH == current_hash:
            unpack_archive(SETUP_FILE, SETUP_EXTRACT_DIR, SETUP_COMPRESSION)
        else:
            print('The setup file is corrupted.')
            exit(3)
    else:
        print('The setup file cannot be found.')
        exit(3)
Esempio n. 14
0
class DS18B20Sensor(AbstractTemperatureSensor):
    """
    Class to read the sensor DS18B20 on the one wire bus

    To find the sensor id, make sure the one wire bus is enabled, the device is connected, and 
    w1_therm module is loaded. Then look in /sys/bus/w1/devices for a file startig with 28-*. 
    Record that number and that is the device. The device will be different for each sensor
    on the w1 bus.

    """
    def __init__(self, temperature_config):
        super().__init__(temperature_config)
        self.property_bus = "w1"
        devicepath = PosixPath("/sys/bus/w1/devices").joinpath(
            temperature_config.device)
        self.temperature_path = PosixPath(devicepath.joinpath("w1_slave"))
        # Make sure they exist
        if (not self.temperature_path.exists()
                and not self.temperature_path.is_file()):
            raise DeviceError(self.temperature_path)

    @property
    def temperature(self):
        with self.temperature_path.open() as f:
            f.readline()
            line = f.readline()

        data = line.split('=')
        result = int(data[1]) / 1000
        if (self.property_sensor_config.rounding != -1):
            result = round(result, self.property_sensor_config.rounding)
        return result

    @property
    def pressure(self):
        return 0

    @property
    def bus(self):
        return self.property_bus
Esempio n. 15
0
def find(warn=True):
    """Search for a pavilion.yaml configuration file. Use the one pointed
to by the PAV_CONFIG_FILE environment variable. Otherwise, use the first
found in these directories the default config search paths:

- The ~/.pavilion directory
- The Pavilion source directory (don't put your config here).
"""

    if PAV_CONFIG_FILE is not None:
        pav_cfg_file = PosixPath(Path(PAV_CONFIG_FILE))
        # pylint has a bug that pops up occasionally with pathlib.
        if pav_cfg_file.is_file():  # pylint: disable=no-member
            try:
                cfg = PavilionConfigLoader().load(pav_cfg_file.open())  # pylint: disable=no-member
                cfg.pav_cfg_file = pav_cfg_file
                return cfg
            except Exception as err:
                raise RuntimeError("Error in Pavilion config at {}: {}".format(
                    pav_cfg_file, err))

    for config_dir in PAV_CONFIG_SEARCH_DIRS:
        path = config_dir / 'pavilion.yaml'
        if path.is_file():  # pylint: disable=no-member
            try:
                # Parse and load the configuration.
                cfg = PavilionConfigLoader().load(path.open())  # pylint: disable=no-member
                cfg.pav_cfg_file = path
                return cfg
            except Exception as err:
                raise RuntimeError("Error in Pavilion config at {}: {}".format(
                    path, err))

    if warn:
        LOGGER.warning("Could not find a pavilion config file. Using an "
                       "empty/default config.")
    return PavilionConfigLoader().load_empty()
Esempio n. 16
0
    def get_item_dict(self,
                      item: PosixPath,
                      relative: Optional[PosixPath] = None) -> Dict[str, Any]:
        is_file: bool = item.is_file()
        _dict: Dict[str, Any] = {
            "name": item.name,
            "full_path": str(item),
            "type": "file" if is_file else "folder",
            "size": item.stat().st_size,
            "date": datetime.fromtimestamp(item.stat().st_ctime).date(),
        }
        if is_file:
            _dict["suffix"] = item.suffix
            _dict["used"] = str(item) in self.db_files
        else:
            _dict["files"] = []

        if relative:
            _dict["relative_path"] = str(
                item.relative_to(Path(relative.parent)))

        self.found_files.add(str(item))

        return _dict
Esempio n. 17
0
def create_h5_file(basins: List,
                   cfg: Dict,
                   h5_file: PosixPath,
                   scaler_file: PosixPath,
                   additional_features: List[Dict] = []):

    if h5_file.is_file():
        raise FileExistsError(f"File already exists at {h5_file}")

    if cfg.get("camels_attributes", []):
        attributes_sanity_check(data_dir=cfg["data_dir"],
                                dataset=cfg["dataset"],
                                basins=basins,
                                attribute_list=cfg.get("camels_attributes",
                                                       []))

    n_dyn_inputs = len(cfg["dynamic_inputs"])
    n_targets = len(cfg["target_variable"])
    # we only store user-defined additional static features provided in the additional_features table
    n_stat = len(cfg["static_inputs"])

    with h5py.File(h5_file, 'w') as out_f:
        dyn_input_data = out_f.create_dataset(
            'dynamic_inputs',
            shape=(0, cfg["seq_length"], n_dyn_inputs),
            maxshape=(None, cfg["seq_length"], n_dyn_inputs),
            chunks=True,
            dtype=np.float32,
            compression='gzip')
        if n_stat > 0:
            stat_input_data = out_f.create_dataset('static_inputs',
                                                   shape=(0, n_stat),
                                                   maxshape=(None, n_stat),
                                                   chunks=True,
                                                   dtype=np.float32,
                                                   compression='gzip')
        target_data = out_f.create_dataset('target_data',
                                           shape=(0, cfg["seq_length"],
                                                  n_targets),
                                           maxshape=(None, cfg["seq_length"],
                                                     n_targets),
                                           chunks=True,
                                           dtype=np.float32,
                                           compression='gzip')
        q_stds = out_f.create_dataset('q_stds',
                                      shape=(0, 1),
                                      maxshape=(None, 1),
                                      dtype=np.float32,
                                      compression='gzip',
                                      chunks=True)
        sample_2_basin = out_f.create_dataset('sample_2_basin',
                                              shape=(0, ),
                                              maxshape=(None, ),
                                              dtype="S11",
                                              compression='gzip',
                                              chunks=True)

        scalers = {
            'dyn_mean': np.zeros(n_dyn_inputs),
            'dyn_std': np.zeros(n_dyn_inputs),
            'target_mean': np.zeros(n_targets),
            'target_std': np.zeros(n_targets)
        }
        total_samples = 0

        basins_without_train_data = []

        for basin in tqdm(basins, file=sys.stdout):

            if additional_features:
                add_features = [d[basin] for d in additional_features]
            else:
                add_features = []

            try:
                dataset = get_basin_dataset(basin=basin,
                                            cfg=cfg,
                                            mode="train",
                                            additional_features=add_features)
            except NoTrainDataError as error:
                # skip basin
                basins_without_train_data.append(basin)
                continue

            num_samples = len(dataset)
            total_samples = dyn_input_data.shape[0] + num_samples

            basin_scaler = dataset.get_scaler()

            scalers["dyn_mean"] += num_samples * basin_scaler["dyn_mean"]
            scalers["dyn_std"] += num_samples * basin_scaler["dyn_std"]
            scalers["target_mean"] += num_samples * basin_scaler["target_mean"]
            scalers["target_std"] += num_samples * basin_scaler["target_std"]

            # store input and output samples
            dyn_input_data.resize(
                (total_samples, cfg["seq_length"], n_dyn_inputs))
            dyn_input_data[-num_samples:, :, :] = dataset.x_d.numpy()

            target_data.resize((total_samples, cfg["seq_length"], n_targets))
            target_data[-num_samples:, :, :] = dataset.y.numpy()

            if n_stat > 0:
                x_stat = dataset.x_s.numpy()
                stat_input_data.resize((total_samples, n_stat))
                # the non-CAMELS stat features are stored at the end of the combined features
                stat_input_data[-num_samples:, :] = x_stat[:, -n_stat:]

            # additionally store std of discharge of this basin for each sample
            q_stds.resize((total_samples, 1))
            q_std_array = np.array([dataset.q_std] * num_samples,
                                   dtype=np.float32).reshape(-1, 1)
            q_stds[-num_samples:, :] = q_std_array

            sample_2_basin.resize((total_samples, ))
            str_arr = np.array([basin.encode("ascii", "ignore")] * num_samples)
            sample_2_basin[-num_samples:] = str_arr

            out_f.flush()

    if basins_without_train_data:
        print(
            "### The following basins were skipped, since they don't have discharge observations in the train period"
        )
        print(basins_without_train_data)

    for key in scalers:
        scalers[key] /= total_samples

    if n_stat > 0:
        with h5py.File(h5_file, 'r') as f:
            scalers["stat_mean"] = f["static_inputs"][:].mean(axis=0)
            scalers["stat_std"] = f["static_inputs"][:].std(axis=0)

    if cfg.get("camels_attributes", []):
        attr_means, attr_stds = get_camels_scaler(
            data_dir=cfg["data_dir"],
            basins=basins,
            attributes=cfg["camels_attributes"])
        scalers["camels_attr_mean"] = attr_means
        scalers["camels_attr_std"] = attr_stds

    # sanity check that no std for any feature is 0, which results in NaN values during training
    problems_in_feature_std = []
    for k, v in scalers.items():
        # skip attributes, which were already tested above
        if k.endswith('_std') and ('attr' not in k):
            if any(v == 0) or any(np.isnan(v)):
                problems_in_feature_std.append(
                    (k, list(np.argwhere(np.isnan(v) | (v == 0)).flatten())))
    if problems_in_feature_std:
        print(
            "### ERROR: Zero or NaN std encountered in the following features."
        )
        for k, pos in problems_in_feature_std:
            print(f"In scaler for {k} at position {pos}")
        raise RuntimeError

    with scaler_file.open("wb") as fp:
        pickle.dump(scalers, fp)

    # already normalize all data, so we don't have to do this while training
    with h5py.File(h5_file, 'r+') as f:
        print(
            f"Applying normalization in chunks of {CHUNK_SIZE} using global statistics"
        )
        # perform iteration in chunks, for allowing to run on low memory systems

        n_batches = f["dynamic_inputs"].shape[0] // CHUNK_SIZE
        if f["dynamic_inputs"].shape[0] % CHUNK_SIZE > 0:
            n_batches += 1

        for i in tqdm(range(n_batches), file=sys.stdout):

            start_idx = i * CHUNK_SIZE
            end_idx = (i + 1) * CHUNK_SIZE
            if end_idx > f["dynamic_inputs"].shape[0]:
                slice_obj = slice(start_idx, None)
            else:
                slice_obj = slice(start_idx, end_idx)

            data = f["dynamic_inputs"]
            data[slice_obj] = (data[slice_obj] -
                               scalers["dyn_mean"]) / scalers["dyn_std"]

            data = f["target_data"]
            if cfg.get("zero_center_target", True):
                data[slice_obj] = (data[slice_obj] - scalers["target_mean"]
                                   ) / scalers["target_std"]
            else:
                data[slice_obj] = data[slice_obj] / scalers["target_std"]

            if n_stat > 0:
                data = f["static_inputs"]
                data[slice_obj] = (data[slice_obj] -
                                   scalers["stat_mean"]) / scalers["stat_std"]

            f.flush()
Esempio n. 18
0
        onion_about()
        exit(0)
    elif options[1] == 'help':
        onion_help()
        exit(0)
    else:
        print(f'Unknown command: {options[1]}')
        exit(1)

# Root privileges are required to continue
if geteuid() != 0:
    print('Onion does not have enough privileges to start.')
    exit(2)

# Checking if another instance of Onion is running
if PID_FILE.exists() and PID_FILE.is_file():
    print('Onion is already running.')
    exit(4)


# +-----------------------------------------------------------------+
# | MAIN PROGRAM                                                    |
# +-----------------------------------------------------------------+
# | 2. Onion HA part                                                |
# | Analyzes, logs and runs your scenarios                          |
# +-----------------------------------------------------------------+
# Reading the configuration file
config_ext = read_configuration()

if not config_ext:
    print('The configuration file cannot be opened.\n',
Esempio n. 19
0
File: cli.py Progetto: m-deck/henry
def main():
    logger.info('Starting henry')
    HELP_PATH = os.path.join(os.path.dirname(henry.__file__),
                             '.support_files/help.rtf')
    with open(HELP_PATH, 'r', encoding='unicode_escape') as myfile:
        descStr = myfile.read()

    # load custom config settings if defined in ~/.henry/henry.json
    settings_file = PosixPath(os.path.join(METADATA_PATH,
                                           'settings.json')).expanduser()
    timeout = 120
    config_path = PosixPath.cwd().joinpath('config.yml')
    if settings_file.is_file():
        with open(settings_file, 'r') as f:
            settings = json.load(f)
            timeout = settings.get('api_conn_timeout', timeout)
            if type(timeout) is list:
                timeout = tuple(timeout)
            config_path = settings.get('config_path', config_path)
        logger.info(
            f'Loaded config settings from ~/.henry/settings.json, {settings}')
    else:
        logger.info('No custom config file found. Using defaults.')

    parser = argparse.ArgumentParser(
        description=descStr,
        formatter_class=argparse.RawDescriptionHelpFormatter,
        prog='henry',
        usage='henry command subcommand '
        '[subcommand options] [global '
        'options]',
        allow_abbrev=False,
        add_help=False)

    subparsers = parser.add_subparsers(dest='command', help=argparse.SUPPRESS)
    parser.add_argument("-h", "--help", action="help", help=argparse.SUPPRESS)

    # subparsers.required = True # works, but might do without for now.

    pulse = subparsers.add_parser('pulse', help='pulse help')

    analyze_parser = subparsers.add_parser('analyze',
                                           help='analyze help',
                                           usage='henry analyze')
    analyze_parser.set_defaults(which=None)
    analyze_subparsers = analyze_parser.add_subparsers()
    analyze_projects = analyze_subparsers.add_parser('projects')
    analyze_models = analyze_subparsers.add_parser('models')
    analyze_explores = analyze_subparsers.add_parser('explores')

    # project subcommand
    analyze_projects.set_defaults(which='projects')
    analyze_projects.add_argument('-p',
                                  '--project',
                                  type=str,
                                  default=None,
                                  help='Filter on a project')
    analyze_projects.add_argument('--order_by',
                                  nargs=2,
                                  metavar=('ORDER_FIELD', 'ASC/DESC'),
                                  dest='sortkey',
                                  help='Sort results by a field')
    analyze_projects.add_argument('--limit',
                                  type=int,
                                  default=None,
                                  nargs=1,
                                  help='Limit results. No limit by default')

    # models subcommand
    analyze_models.set_defaults(which='models')
    models_group = analyze_models.add_mutually_exclusive_group()

    models_group.add_argument('-p',
                              '--project',
                              type=str,
                              default=None,
                              help='Filter on project')
    models_group.add_argument('-model',
                              '--model',
                              type=str,
                              default=None,
                              help='Filter on model')
    analyze_models.add_argument('--timeframe',
                                type=int,
                                default=90,
                                help='Timeframe (between 0 and 90)')
    analyze_models.add_argument('--min_queries',
                                type=int,
                                default=0,
                                help='Query threshold')
    analyze_models.add_argument('--order_by',
                                nargs=2,
                                metavar=('ORDER_FIELD', 'ASC/DESC'),
                                dest='sortkey',
                                help='Sort results by a field')
    analyze_models.add_argument('--limit',
                                type=int,
                                default=None,
                                nargs=1,
                                help='Limit results. No limit by default')

    # explores subcommand
    analyze_explores.set_defaults(which='explores')
    analyze_explores.add_argument('-model',
                                  '--model',
                                  type=str,
                                  default=None,
                                  required=('--explore') in sys.argv,
                                  help='Filter on model')
    analyze_explores.add_argument('-e',
                                  '--explore',
                                  default=None,
                                  help='Filter on model')
    analyze_explores.add_argument('--timeframe',
                                  type=int,
                                  default=90,
                                  help='Timeframe (between 0 and 90)')
    analyze_explores.add_argument('--min_queries',
                                  type=int,
                                  default=0,
                                  help='Query threshold')
    analyze_explores.add_argument('--order_by',
                                  nargs=2,
                                  metavar=('ORDER_FIELD', 'ASC/DESC'),
                                  dest='sortkey',
                                  help='Sort results by a field')
    analyze_explores.add_argument('--limit',
                                  type=int,
                                  default=None,
                                  nargs=1,
                                  help='Limit results. No limit by default')

    # VACUUM Subcommand
    vacuum_parser = subparsers.add_parser('vacuum',
                                          help='vacuum help',
                                          usage='henry vacuum')
    vacuum_parser.set_defaults(which=None)
    vacuum_subparsers = vacuum_parser.add_subparsers()
    vacuum_models = vacuum_subparsers.add_parser('models')
    vacuum_explores = vacuum_subparsers.add_parser('explores')
    vacuum_models.set_defaults(which='models')
    vm_group = vacuum_models.add_mutually_exclusive_group()
    vm_group.add_argument('-p',
                          '--project',
                          type=str,
                          default=None,
                          help='Filter on Project')
    vm_group.add_argument('-m',
                          '--model',
                          type=str,
                          default=None,
                          help='Filter on model')

    vacuum_models.add_argument('--timeframe',
                               type=int,
                               default=90,
                               help='Usage period to examine (in the range of '
                               '0-90 days). Default: 90 days.')

    vacuum_models.add_argument('--min_queries',
                               type=int,
                               default=0,
                               help='Vacuum threshold. Explores with less '
                               'queries in the given usage period will '
                               'be vacuumed. Default: 0 queries.')

    vacuum_explores.set_defaults(which='explores')
    vacuum_explores.add_argument('-m',
                                 '--model',
                                 type=str,
                                 default=None,
                                 required=('--explore') in sys.argv,
                                 help='Filter on model')

    vacuum_explores.add_argument('-e',
                                 '--explore',
                                 type=str,
                                 default=None,
                                 help='Filter on explore')

    vacuum_explores.add_argument('--timeframe',
                                 type=int,
                                 default=90,
                                 help='Timeframe (between 0 and 90)')

    vacuum_explores.add_argument('--min_queries',
                                 type=int,
                                 default=0,
                                 help='Query threshold')

    for subparser in [
            analyze_projects, analyze_models, analyze_explores, vacuum_models,
            vacuum_explores, pulse
    ]:
        subparser.add_argument('--output',
                               type=str,
                               default=None,
                               help='Path to file for saving the output')
        subparser.add_argument('-q',
                               '--quiet',
                               action='store_true',
                               help='Silence output')
        subparser.add_argument('--plain',
                               default=None,
                               action='store_true',
                               help='Show results in a table format '
                               'without the gridlines')
        subparser.add_argument_group("Authentication")
        subparser.add_argument(
            '--host',
            type=str,
            default='looker',
            required=any(
                k in sys.argv
                for k in ['--client_id', '--cliet_secret', '--alias']),
            help=argparse.SUPPRESS)
        subparser.add_argument('--port',
                               type=int,
                               default=19999,
                               help=argparse.SUPPRESS)
        subparser.add_argument('--client_id',
                               type=str,
                               required=any(
                                   k in sys.argv
                                   for k in ['--client_secret', '--alias']),
                               help=argparse.SUPPRESS)
        subparser.add_argument(
            '--client_secret',
            type=str,
            required=any(k in sys.argv for k in ['--client_id', '--alias']),
            help=argparse.SUPPRESS)
        subparser.add_argument('--persist',
                               action='store_true',
                               help=argparse.SUPPRESS)
        subparser.add_argument('--alias', type=str, help=argparse.SUPPRESS)
        subparser.add_argument('--path',
                               type=str,
                               default='',
                               help=argparse.SUPPRESS)

    args = vars(parser.parse_args())
    _args = {}
    for key, value in args.items():
        if key == 'client_secret':
            _args[key] = '[FILTERED]'
        else:
            _args[key] = value
    logger.info('Parsing args, %s', _args)

    if not args['command']:
        print('usage:', parser.usage)
        print('\nNo command specified. Try `henry --help` for help.')
        sys.exit(1)
    auth_params = ('host', 'port', 'client_id', 'client_secret', 'persist',
                   'alias', 'path')
    auth_args = {k: args[k] for k in auth_params}

    # authenticate
    if args['command'] != 'pulse':
        cmd = args['command'] + ' ' + args['which']
    else:
        cmd = args['command']
    session_info = f'Henry v{pkg.__version__}: cmd={cmd}' \
                   f', sid=#{uuid.uuid1()}'
    looker = authenticate(timeout, session_info, config_path, **auth_args)

    # map subcommand to function
    if args['command'] in ('analyze', 'vacuum'):
        if args['which'] is None:
            parser.error("No command")
        else:
            with Spinner():
                if args['command'] == 'analyze':
                    analyze = Analyze(looker)
                    result = analyze.analyze(**args)
                else:
                    vacuum = Vacuum(looker)
                    result = vacuum.vacuum(**args)
        # silence outout if --silence flag is used
        if not args['quiet']:
            print(result)
    elif args['command'] == 'pulse':
        pulse = Pulse(looker)
        result = pulse.run_all()
    else:
        print('No command passed')

    # save to file if --output flag is used
    if args['output']:
        logger.info('Saving results to file: %s', args['output'])
        if os.path.isdir(args['output']):
            error = IsADirectoryError(errno.EISDIR, os.strerror(errno.EISDIR),
                                      args['output'])
            logger.error(error)
            raise error
        elif not args['output'].endswith('.txt'):
            error = ValueError('Output file must be a .txt file')
            logger.exception(error)
            raise error
        elif os.path.isfile(args['output']):
            error = FileExistsError(errno.EEXIST, os.strerror(errno.EEXIST),
                                    args['output'])
            logger.error(error)
            raise error
        else:
            try:
                f = open(args['output'], 'w+')
                f.write(result + '\n')
                f.close()
                logger.info('Results succesfully saved.')
            except Exception as e:
                logger.error(e)
                raise (e)
Esempio n. 20
0
    def upload_file(self,
                    source,
                    destination,
                    name=None,
                    chunk_size=512,
                    process_chunk=None,
                    timeout=None):
        """ Upload a file to the remote directory.

        This method uploads a single file to a given directory in the
        remote directory.

        The **source** parameter refers to the local file to be
        transfered to the remote directory and must to be a
        :term:`path-like object`. If it's a relative path, it's treated
        like relative to the current working directory. If the source
        file can't be found or is not a file, the SourceNotFound
        exception is raised.

        The **destination** parameter refers to the remote directory in
        which the file must be transfered to. It must be a
        :term:`path-like object` of an **existing** directory and it
        must be an absolute path or it will raise the ValueError
        exception. If the destination directory can't be found or is not
        a directory, the DestinationNotFound exception is raised.

        The name parameter can be used to rename the source file while
        uploading it (the content is guaranteed to be the same). It must
        be a string of a :term:`valid file name` and must not conflict
        with an existing file (or directory) in the destination
        directory. By default, it reads the name from the source to
        leave it unchanged. If the name isn't valid, a
        :py:exc:`InvalidFileName` is raised and if the file is
        conflicting, a :py:exc:`FileExistsError` exception is raised.

        Additionally, you can adjust the chunk size value which defines
        how fragmented the file has to be sent to the server and/or pass
        a callback that process each fragment **before** it's sent to
        the server. Usually, the chunk value is between 512 and 8192.

        The callback is called with various parameters and in a
        specific order; the chunk data, the remaining bytes, the file
        size and the file name. The chunk data is a bytes string of the
        actual data about to be sent to the server. The remaining bytes
        is an integer indicating the number of bytes left to be sent
        (and this includes the current chunk of data). The file size is
        a fixed integer telling how large the file is, and the file name
        is the file name currently being processed.

        For instance, it can be used to display a progress indicator.
        Here is an example. ::

            def display_progress(chunk_data, remaining_bytes, file_size, file_name):
                chunk_size = 512
                progress = (file_size - (remaining_bytes - len(chunk_data))) / file_size * 100

                sys.stdout.write("\r{0:0.2f}% | {1}".format(progress, file_name))
                sys.stdout.flush()

                if remaining_bytes <= chunk_size:
                    sys.stdout.write('\n')

                return True

        If the operation takes longer than the given timeout, a
        :py:exc:`TimeoutError` exception is raised.

        :param source:        Foobar.
        :param destination:   Foobar.
        :param name:          Foobar.
        :param chunk_size:    Foobar.
        :param process_chunk: Foobar.
        :param timeout:       Foobar.
        :raises ValueError:          If the destination directory isn't an absolute path.
        :raises SourceNotFound:      If the source file doesn't exist or isn't a file.
        :raises DestinationNotFound: If the destination directory doesn't exist or isn't a directory.
        :raises FileExistsError:     If the source file conflicts with an existing file or directory.
        :raises InvalidFileName:     If the source file doesn't have a valid name.
        :raises TimeoutError:        If it takes more than the timeout value to receive a response.
        """

        # ensure we work with posix paths
        source = PosixPath(source)
        destination = PurePosixPath(destination)

        # normalize the source to work with an absolute path
        if not source.is_absolute():
            source = PosixPath(os.getcwd(), source)

        # compute the name from the source if not specified (file name
        # unchanged)
        if not name:
            name = source.name

        # raise SourceNotFound exception if the source file doesn't
        # exist or is not a file
        if not source.exists() or not source.is_file():
            raise SourceNotFound("Source file could not be found")

        # raise ValueError exception if destination directory is not an
        # absolute path
        if not destination.is_absolute():
            raise ValueError("Destination must be an absolute path")

        # check if the destination directory exists and raises
        # DestinationNotFound exception if it doesn't exist or is not
        # a directory (a root is always a valid destination)
        if str(destination) != destination.root:

            try:
                files = self.list_files(destination.parent, timeout)
            except NotADirectoryError:  # TODO: exception is likely to change
                raise DestinationNotFound(
                    "Destination directory could not be found")
            except TimeoutError:
                raise TimeoutError
            except Exception:
                raise UnexpectedError

            if destination.name not in files or files[
                    destination.name][0] == False:
                raise DestinationNotFound(
                    "Destination directory could not be found")

        # check if the file name doesn't conflict with an existing file
        # (or directory) in the destination directory
        try:
            files = self.list_files(destination, timeout)
        except TimeoutError:
            raise TimeoutError
        except Exception:
            raise UnexpectedError

        if name in files:
            raise FileExistsError

        # initiate and do the upload process
        try:
            self._upload_file(source, destination, name, chunk_size,
                              process_chunk, timeout)
        except ValueError:
            # if chunk size is incorrect
            raise ValueError
        except InvalidFileName:
            # if file name is invalid
            raise InvalidFileName
        except Exception:
            raise NotImplementedError
Esempio n. 21
0
def get_asset(path):
    asset_path = PosixPath(config.runtime.asset_path).resolve() / 'docs' / 'assets' / path
    if not asset_path.exists() or not asset_path.is_file():
        raise Exception('Unable to load asset %s at %s', (path, asset_path))
    with open(asset_path) as f:
        return f.read()
Esempio n. 22
0
def create_h5_files(camels_root: PosixPath,
                    out_file: PosixPath,
                    basins: List,
                    dates: List,
                    with_basin_str: bool = True,
                    seq_length: int = 1):
    """[summary]
    
    Parameters
    ----------
    camels_root : PosixPath
        Path to the main directory of the CAMELS data set
    out_file : PosixPath
        Path of the location, where the hdf5 file should be stored
    basins : Dict
        List containing the 8-digit USGS gauge id
    dates : List
        List of start and end date of the discharge period to use, when combining the data.
    with_basin_str : bool, optional
        If True, stores for each sample the corresponding USGS gauged id, by default True
    seq_length : int, optional
        Length of the requested input sequences., by default 270
    
    Raises
    ------
    FileExistsError
        If file at this location already exists.
    """
    if out_file.is_file():
        raise FileExistsError(f"File already exists at {out_file}")

    with h5py.File(out_file, 'w') as out_f:
        input_data = out_f.create_dataset('input_data',
                                          shape=(0, seq_length, 6),
                                          maxshape=(None, seq_length, 6),
                                          chunks=True,
                                          dtype=np.float32,
                                          compression='gzip')
        target_data = out_f.create_dataset('target_data',
                                           shape=(0, 1),
                                           maxshape=(None, 1),
                                           chunks=True,
                                           dtype=np.float32,
                                           compression='gzip')

        q_stds = out_f.create_dataset('q_stds',
                                      shape=(0, 1),
                                      maxshape=(None, 1),
                                      dtype=np.float32,
                                      compression='gzip',
                                      chunks=True)

        if with_basin_str:
            sample_2_basin = out_f.create_dataset('sample_2_basin',
                                                  shape=(0, ),
                                                  maxshape=(None, ),
                                                  dtype="S10",
                                                  compression='gzip',
                                                  chunks=True)

        for basin in tqdm(basins, file=sys.stdout):
            dataset = CamelsTXT(camels_root=camels_root,
                                basin=basin,
                                is_train=True,
                                seq_length=seq_length,
                                dates=dates)

            num_samples = len(dataset)
            total_samples = input_data.shape[0] + num_samples

            # store input and output samples
            input_data.resize((total_samples, seq_length, 6))
            target_data.resize((total_samples, 1))
            input_data[-num_samples:, :, :] = dataset.x
            target_data[-num_samples:, :] = dataset.y

            # additionally store std of discharge of this basin for each sample
            q_stds.resize((total_samples, 1))
            q_std_array = np.array([dataset.q_std] * num_samples,
                                   dtype=np.float32).reshape(-1, 1)
            q_stds[-num_samples:, :] = q_std_array

            if with_basin_str:
                sample_2_basin.resize((total_samples, ))
                str_arr = np.array([basin.encode("ascii", "ignore")] *
                                   num_samples)
                sample_2_basin[-num_samples:] = str_arr

            out_f.flush()
Esempio n. 23
0
class MP3115Sensor(AbstractTemperatureSensor):
    """
    Class to read the sensor MP3115 on the I2C bus

    To find the sensor id, make sure the i2c bus is enabled, the device is connected, and 
    mp3115a2 module is loaded. Additionally, you have to tell the i2c bus to read the chip
    with the 
    
    "echo mp3115a2 0x60" > /sys/bus/i2c/devices/i2c-1/new_device

    ...command. The i2c-1 bus number may change if the system has more than one i2c bus loadded.

     Then look in /sys/bus/i2c/devices directory for the 1-0060 directory.

     The 0x60 above and the 1-0060 represents the i2c bus id. The bus id can be determined
     with the i2cdetect command is needed.

    """
    def __init__(self, temperature_config):
        super().__init__(temperature_config)
        self.property_bus = "i2c"
        devicepath = PosixPath("/sys/bus/i2c/devices").joinpath(
            temperature_config.device).joinpath("iio:device0")
        self.temperature_path_raw = PosixPath(
            devicepath.joinpath("in_temp_raw"))
        self.temperature_path_scale = PosixPath(
            devicepath.joinpath("in_temp_scale"))
        self.pressure_path_raw = PosixPath(
            devicepath.joinpath("in_pressure_raw"))
        self.pressure_path_scale = PosixPath(
            devicepath.joinpath("in_pressure_scale"))
        # Make sure they exist
        if (not self.temperature_path_raw.exists()
                and not self.temperature_path_raw.is_file()):
            raise DeviceError(self.temperature_path_raw)
        if (not self.temperature_path_scale.exists()
                and not self.temperature_path_scale.is_file()):
            raise DeviceError(self.temperature_path_scale)
        if (not self.pressure_path_raw.exists()
                and not self.pressure_path_raw.is_file()):
            raise DeviceError(self.pressure_path_raw)
        if (not self.pressure_path_scale.exists()
                and not self.pressure_path_scale.is_file()):
            raise DeviceError(self.pressure_path_scale)

    @property
    def temperature(self):
        with self.temperature_path_raw.open() as f:
            data_raw = f.readline()
        with self.temperature_path_scale.open() as f:
            data_scale = f.readline()
        result = int(data_raw) * float(data_scale)
        if (self.property_sensor_config.rounding != -1):
            result = round(result, self.property_sensor_config.rounding)
        return result

    @property
    def pressure(self):
        with self.pressure_path_raw.open() as f:
            data_raw = f.readline()
        with self.pressure_path_scale.open() as f:
            data_scale = f.readline()
        result = int(data_raw) * float(data_scale) * 10
        if (self.property_sensor_config.rounding != -1):
            result = round(result, self.property_sensor_config.rounding)
        return result

    @property
    def bus(self):
        return self.property_bus
Esempio n. 24
0
    def process_create_file_request(self, request):
        """ Process CREATE_FILE request.

        The request is expected to contain the name of the file and the
        destination directory.

        The possible responses are.

        * ACCEPTED with FILE_CREATED, if creating the the was successful
        * REFUSED with INVALID_FILE_NAME if a file doesn't have a valid name
        * REFUSED with NOT_A_DIRECTORY if the destination directory doesn't exist
        * REFUSED with FILE_ALREADY_EXISTS if a file (or directory) with that name already exists

        Other responses include ERROR with BAD_REQUEST if the request is
        imporperly formatted, or ERROR with UNKNOWN ERROR if any other
        error occured during the creation of the file.
        """

        # extract name and directory from the request, send bad request
        # error if something goes wrong
        try:
            assert isinstance(request, tuple)
            assert len(request) == 3

            _, name, directory = request
            directory = PosixPath(directory)

        except Exception:
            response = make_bad_request_error()
            self.socket.send_pyobj(response)

            return

        # return INVALID_FILE_NAME refused response if the file name is
        # not valid
        if not is_file_name_valid(name):
            response = make_invalid_file_name_response()
            self.socket.send_pyobj(response)

            return

        # normalize the directory (later it can be combined with the
        # root directory)
        directory = normalize_directory(directory)

        # combine the destination directory with the root directory
        directory = self.root_directory / directory

        # return NOT_A_DIRECTORY refused response if the destination
        # isn't an actual directory
        if not directory.exists() or directory.is_file():
            response = make_not_a_directory_response()
            self.socket.send_pyobj(response)

            return

        # combine the destination directory with the name to get the
        # full path of the file to create
        file_path = directory / name

        # return FILE_ALREADY_EXISTS refused response if a file (or
        # directory) with that name already exists
        if file_path.exists():
            response = make_file_already_exists_response()
            self.socket.send_pyobj(response)

            return

        # attempt to create the file and return FILE_CREATED unless an
        # error occured
        try:
            file_path.touch()
        except Exception as error:
            response = make_unknown_error_response(str(error))
        else:
            response = make_file_created_response()

        self.socket.send_pyobj(response)
Esempio n. 25
0
    def process_make_directory_request(self, request):
        """ Process MAKE_DIRECTORY request.

        The request is expected to contain the destination directory,
        and the name of the directory to create.

        The possible responses are.

        * ACCEPTED with DIRECTORY_CREATED
        * REFUSED with INVALID_FILE_NAME
        * REFUSED with NOT_A_DIRECTORY
        * REFUSED with FILE_ALREADY_EXISTS

        Other responses include ERROR with BAD_REQUEST or UNKNOWN ERROR.
        """

        # extract directory and name from the request, send bad request
        # error if something goes wrong
        try:
            assert isinstance(request, tuple)
            assert len(request) == 3

            _, name, directory = request
            directory = PosixPath(directory)

        except Exception:
            response = make_bad_request_error()
            self.socket.send_pyobj(response)

            return

        # return INVALID_FILE_NAME refused response if the directory
        # name is not valid
        if not is_file_name_valid(name):
            response = make_invalid_file_name_response()
            self.socket.send_pyobj(response)

            return

        # normalize the directory (later it can be combined with the
        # root directory)
        directory = normalize_directory(directory)

        # combine the destination directory with the root directory
        directory = self.root_directory / directory

        # return NOT_A_DIRECTORY refused response if the destination
        # isn't an actual directory
        if not directory.exists() or directory.is_file():
            response = make_not_a_directory_response()
            self.socket.send_pyobj(response)

            return

        # combine the destination directory with the name to get the
        # full path of the directory to create
        directory_path = directory / name

        # return FILE_ALREADY_EXISTS refused response if the a directory
        # (or a file) with that name already exists
        if directory_path.exists():
            response = make_file_already_exists_response()
            self.socket.send_pyobj(response)

            return

        # attempt to create the directory and return DIRECTORY_CREATED
        # unless an error occured
        try:
            directory_path.mkdir()
        except Exception as error:
            response = make_unknown_error_response(str(error))
        else:
            response = make_directory_created_response()

        self.socket.send_pyobj(response)
Esempio n. 26
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""This gist uses tools to add 100 German cloze image cards."""
from contextlib import closing
from pathlib import PosixPath
import os
from typing import List

import anki

import tools.anki
from tools.process import DoubleAdjectivePic, SingleAdjectivePic

ANKI_DB = PosixPath('/home/grzesiek/Documents/Anki/grzesiek/collection.anki2')
assert (ANKI_DB.is_file())
IMAGE_DIR = PosixPath('images/').absolute()
assert (IMAGE_DIR.is_dir())
SHARED_TAGS = {'200-wichtigsten-deutschen-adjektive', 'Adjektiv'}
DOUBLE_CLOZE_TEMPLATE = """
<div style="display:flex;justify-content:center;">
  <div style="text-align:center;">
    <img src="{left_pic}" style="max-height:200px"/>
    <div>{{{{c1::{left_word}}}}}</div>
  </div>
  <div style="text-align:center;">
    <img src="{right_pic}" style="max-height:200px"/>
    <div>{right_cloze}</div>
  </div>
</div>
"""
Esempio n. 27
0
    def process_upload_file_request(self, request):
        """ Process UPLOAD_FILE request.

        The request is expected to contain the name of the uploaded
        file, its destination directory, its size and the chunk size.

        The possible responses are.

        * ACCEPTED with TRANSFER_ACCEPTED
        * REFUSED  with INCORRECT_FILE_SIZE
        * REFUSED  with INCORRECT_CHUNK_SIZE
        * REFUSED  with INVALID_FILE_NAME
        * REFUSED with NOT_A_DIRECTORY
        * REFUSED with FILE_ALREADY_EXISTS

        Other responses include ERROR with BAD_REQUEST or UNKNOWN ERROR.
        """

        # extract informtion from request and trigger bad request error
        # if something goes wrong
        try:
            assert isinstance(request, tuple)
            assert len(request) == 5

            _, name, directory, file_size, chunk_size = request
            directory = PosixPath(directory)

        except Exception:
            response = make_bad_request_error()
            self.socket.send_pyobj(response)

            return

        # check if file size and chunk size are correct, send refused
        # response and fail early if this is the case
        if file_size <= 0 or file_size >= self.file_size_limit:
            response = make_incorrect_file_size_response()
            self.socket.send_pyobj(response)

            return

        if chunk_size < self.min_chunk_size or chunk_size > self.max_chunk_size:
            response = make_incorrect_chunk_size_response()
            self.socket.send_pyobj(response)

            return

        # return INVALID_FILE_NAME refused response if the upload file
        # name isn't valid
        if not is_file_name_valid(name):
            response = make_invalid_file_name_response()
            self.socket.send_pyobj(response)

            return

        # normalize the directory (later it can be combined with the
        # root directory)
        directory = normalize_directory(directory)

        # combine the destination directory with the root directory
        directory = self.root_directory / directory

        if not directory.exists() or directory.is_file():
            response = make_not_a_directory_response()
            self.socket.send_pyobj(response)

            return

        # combine the destination directory with the name to get the
        # full path of the upload file
        file_path = directory / name

        # check if file doesn't already exists
        if file_path.exists():
            response = make_file_already_exists_response()
            self.socket.send_pyobj(response)

            return

        # upload file request is accepted, initiate the upload
        # process
        try:
            self.initiate_upload_file(file_path, file_size, chunk_size)
        except Exception as error:
            response = make_unknown_error_response(str(error))
        else:
            # todo: adjust the actual chunk size
            response = make_transfer_accepted_response()

        self.socket.send_pyobj(response)
Esempio n. 28
0
def scan_dir(db, base_dir: str, branch: str, component: str, branch_idx: int):
    pool_path = PosixPath(base_dir).joinpath('pool')
    search_path = pool_path.joinpath(branch).joinpath(component)
    compname = '%s-%s' % (branch, component)
    comppath = '%s/%s' % (branch, component)
    cur = db.cursor()
    cur.execute("""SELECT p.package, p.version, p.repo, p.architecture,
          p.filename, p.size, p.mtime, p.sha256
        FROM pv_packages p
        INNER JOIN pv_repos r ON p.repo=r.name WHERE r.path=%s
        UNION ALL
        SELECT p.package, p.version, p.repo, p.architecture,
          p.filename, p.size, p.mtime, p.sha256
        FROM pv_package_duplicate p
        INNER JOIN pv_repos r ON p.repo=r.name WHERE r.path=%s""",
        (comppath, comppath))
    dup_pkgs = set()
    ignore_files = set()
    modified_repo = set()
    del_list = []
    # For each package/version/architecture we already know in the DB:
    for package, version, repopath, architecture, filename, size, mtime, sha256 in cur:
        fullpath = PosixPath(base_dir).joinpath(filename)
        if fullpath.is_file():
            # If a package with the same name exists:
            stat = fullpath.stat()
            sfullpath = str(fullpath)
            if size == stat.st_size and (mtime == int(stat.st_mtime) or
                # Ignore if the file isn't changed
                internal_pkgscan.sha256_file(sfullpath) == sha256):
                ignore_files.add(sfullpath)
            else:
                # Consider the new file to be a duplicate and replace the old one
                dup_pkgs.add(filename)
                del_list.append((filename, package, version, repopath))
        else:
            # If the package has been deleted
            del_list.append((filename, package, version, repopath))
            logger_scan.info('CLEAN  %s', filename)
            module_ipc.publish_change(
                compname, package, architecture, 'delete', version, '')
    # For each package/version/arch/repo to be deleted:
    for row in del_list:
        cur.execute("DELETE FROM pv_packages WHERE filename=%s", (row[0],))
        modified_repo.add(row[1:][-1])
    # Check if there are any new files added. Recursively scan the pool dir and take notes of
    # what we haven't seen yet.
    check_list = []
    for fullpath in search_path.rglob('*.deb'):
        if not fullpath.is_file():
            continue
        stat = fullpath.stat()
        sfullpath = str(fullpath)
        if sfullpath in ignore_files:
            continue
        check_list.append((sfullpath, str(fullpath.relative_to(base_dir)),
                           stat.st_size, int(stat.st_mtime)))
    del ignore_files
    with multiprocessing.dummy.Pool(max(1, os.cpu_count() - 1)) as mpool:
        for pkginfo, depinfo, sodeps, files in mpool.imap_unordered(scan_deb, check_list, 5):
            realname = pkginfo['architecture']
            validdeb = ('debtime' in pkginfo)
            if realname == 'all':
                realname = 'noarch'
            if component != 'main':
                realname = component + '-' + realname
            repo = '%s/%s' % (realname, branch)
            cur.execute("INSERT INTO pv_repos VALUES (%s,%s,%s,%s,%s,%s,%s,now()) "
                "ON CONFLICT DO NOTHING",
                (repo, realname, comppath, branch_idx,
                branch, component, pkginfo['architecture']))
            modified_repo.add(repo)
            pkginfo['repo'] = repo
            dbkey = (pkginfo['package'], pkginfo['version'], repo)
            if pkginfo['filename'] in dup_pkgs:
                if validdeb:
                    logger_scan.info('UPDATE %s', pkginfo['filename'])
                    module_ipc.publish_change(
                        compname, pkginfo['package'], pkginfo['architecture'],
                        'overwrite', pkginfo['version'], pkginfo['version']
                    )
            else:
                cur.execute("SELECT version, filename FROM pv_packages "
                    "WHERE package=%s AND repo=%s", (pkginfo['package'], repo))
                results = cur.fetchall()
                if results:
                    oldver = max(results, key=lambda x: dpkg_vercomp_key(x[0]))
                    vercomp = internal_dpkg_version.dpkg_version_compare(
                        oldver[0], pkginfo['version'])
                    if vercomp == -1:
                        if validdeb:
                            logger_scan.info('NEWER  %s %s %s >> %s',
                                pkginfo['architecture'], pkginfo['package'],
                                pkginfo['version'], oldver[0])
                            module_ipc.publish_change(
                                compname, pkginfo['package'],
                                pkginfo['architecture'], 'upgrade',
                                oldver[0], pkginfo['version']
                            )
                    elif vercomp:
                        logger_scan.warning('OLD    %s %s %s',
                            pkginfo['architecture'], pkginfo['package'],
                            pkginfo['version'])
                    else:
                        cur.execute("DELETE FROM pv_package_sodep "
                            "WHERE package=%s AND version=%s AND repo=%s", dbkey)
                        cur.execute("DELETE FROM pv_package_files "
                            "WHERE package=%s AND version=%s AND repo=%s", dbkey)
                        cur.execute("DELETE FROM pv_package_dependencies "
                            "WHERE package=%s AND version=%s AND repo=%s", dbkey)
                        cur.execute("DELETE FROM pv_package_duplicate "
                            "WHERE package=%s AND version=%s AND repo=%s", dbkey)
                        cur.execute("INSERT INTO pv_package_duplicate "
                            "SELECT * FROM pv_packages WHERE filename=%s",
                            (oldver[1],))
                        cur.execute("DELETE FROM pv_packages "
                            "WHERE package=%s AND version=%s AND repo=%s", dbkey)
                        logger_scan.error('DUP    %s == %s',
                            oldver[1], pkginfo['filename'])
                elif validdeb:
                    logger_scan.info('NEW    %s %s %s', pkginfo['architecture'],
                        pkginfo['package'], pkginfo['version'])
                    module_ipc.publish_change(
                        compname, pkginfo['package'], pkginfo['architecture'],
                        'new', '', pkginfo['version']
                    )
            keys, qms, vals = internal_db.make_insert(pkginfo)
            cur.execute("INSERT INTO pv_packages (%s) VALUES (%s)" %
                (keys, qms), vals)
            for row in depinfo.items():
                cur.execute("INSERT INTO pv_package_dependencies "
                    "VALUES (%s,%s,%s,%s,%s) "
                    "ON CONFLICT ON CONSTRAINT pv_package_dependencies_pkey "
                    "DO UPDATE SET value = %s",
                    dbkey + row + (row[1],))
            for row in sodeps:
                cur.execute("INSERT INTO pv_package_sodep VALUES "
                    "(%s,%s,%s,%s,%s,%s)", dbkey + row)
            for row in files:
                cur.execute("INSERT INTO pv_package_files VALUES "
                    "(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)", dbkey + row)
    for repo in modified_repo:
        cur.execute("UPDATE pv_repos SET mtime=now() WHERE name=%s", (repo,))