Exemplo n.º 1
0
    def timestamp(self) -> int:
        timestamp = self.__DEFAULT_TIMESTAMP

        if exists(self._checkout_info_file):
            checkout_info = read_yaml(self._checkout_info_file)
            timestamp = checkout_info.get(ProjectCheckout.__KEY_TIMESTAMP, self.__DEFAULT_TIMESTAMP)

        return timestamp
Exemplo n.º 2
0
    def timestamp(self) -> int:
        timestamp = VersionCompile.__DEFAULT_TIMESTAMP

        if exists(self._compile_info_file):
            checkout_info = read_yaml(self._compile_info_file)
            timestamp = checkout_info.get(VersionCompile.__KEY_TIMESTAMP,
                                          VersionCompile.__DEFAULT_TIMESTAMP)

        return timestamp
Exemplo n.º 3
0
def get_white_list(datasets_file_path: str,
                   dataset_key: Optional[str]) -> List[str]:
    if dataset_key is None:
        return []

    datasets = read_yaml(datasets_file_path)
    if dataset_key not in datasets:
        raise ValueError("Invalid dataset: '{}'".format(dataset_key))
    return datasets[dataset_key]
Exemplo n.º 4
0
 def __load_data(run_file_path: str):
     data = {
         "result": None,
         "runtime": None,
         "message": "",
         "md5": None
     }
     data.update(read_yaml(run_file_path) if exists(run_file_path) else {})
     return data
Exemplo n.º 5
0
    def timestamp(self):
        timestamp = self.__DEFAULT_TIMESTAMP

        if exists(self._misuse_compile_file):
            compile_info = read_yaml(self._misuse_compile_file)
            timestamp = compile_info.get(self.__TIMESTAMP_KEY,
                                         self.__DEFAULT_TIMESTAMP)

        return timestamp
    def __init__(self, run_mode: DetectorMode, detector: Detector,
                 version: ProjectVersion, findings_base_path: str,
                 findings_filter: FindingsFilter):
        self.run_mode = run_mode
        self.detector = detector
        self.version = version
        self._findings_base_path = findings_base_path
        self._findings_file_path = join(self._get_findings_path(),
                                        self.FINDINGS_FILE)
        self.__FINDINGS = None
        self.__POTENTIAL_HITS = None

        data = {"result": None, "runtime": 0, "message": "", "md5": None}
        data.update(
            read_yaml(self._run_file_path) if exists(self._run_file_path
                                                     ) else {})
        self.result = Result[data["result"]] if data["result"] else None
        self.runtime = data["runtime"]
        self.message = data["message"]
        self._detector_md5 = data["md5"]

        self.findings_filter = findings_filter
Exemplo n.º 7
0
                        action='store_true')

    __add_check_subprocess(available_datasets, subparsers)
    __add_info_subprocess(available_datasets, subparsers)
    __add_checkout_subprocess(available_datasets, subparsers)
    __add_compile_subprocess(available_datasets, subparsers)
    __add_run_subprocess(available_detectors, available_datasets, subparsers)
    __add_publish_subprocess(available_detectors, available_datasets,
                             subparsers)
    __add_stats_subprocess(available_scripts, available_datasets, subparsers)

    return parser


try:
    __default_config = read_yaml("./default.config")
except FileNotFoundError:
    __default_config = None


def __get_default(parameter: str, default):
    if __default_config is not None and parameter in __default_config:
        return __default_config[parameter]
    return default


def __add_check_subprocess(available_datasets: List[str], subparsers) -> None:
    check_parser = subparsers.add_parser(
        'check',
        formatter_class=SortingHelpFormatter,
        help="Check MUBench runtime requirements or dataset consistency.",
Exemplo n.º 8
0
                    self.optimizer.step()
                    self.optimizer.zero_grad()

                if idx % self.opts['print_freq'] == 0:
                    print(f'[Dist] Step: {idx} MMD: {mmd.item()}')
                    if self.opts['use_task_loss']:
                        print(f'[Task] Reward: {acc}, Baseline: {baseline}')
                    # debug information
                    print(
                        f'[Feat] Step: {idx} {dec_act[0, 2, 15:].tolist()} {x[0, 2, 15:].tolist()}'
                    )
                    # To debug, this index is the loc_x, loc_y, yaw of the
                    # digit in MNIST

            if self.opts['use_task_loss']:
                self.optimizer.step()
                self.optimizer.zero_grad()

            # LR scheduler step
            self.lr_sched.step()


if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('--exp', required=True, type=str)
    opts = parser.parse_args()
    opts = io.read_yaml(opts.exp)

    trainer = Trainer(opts)
    trainer.train()
Exemplo n.º 9
0
 def _yaml(self) -> Dict[str, Any]:
     if self._YAML is None:
         self._YAML = read_yaml(self.version_file)
     return self._YAML
Exemplo n.º 10
0
def get_available_datasets(datasets_file_path: str) -> Dict[str, List[str]]:
    return read_yaml(datasets_file_path)
Exemplo n.º 11
0
def __get_lowercase_datasets(datasets_file_path: str) -> Dict[str, List[str]]:
    datasets = read_yaml(datasets_file_path)
    return {k.lower(): [e.lower() for e in v] for k, v in datasets.items()}
Exemplo n.º 12
0
def get_available_dataset_ids(datasets_file_path: str) -> List[str]:
    datasets = read_yaml(datasets_file_path)
    return list(datasets.keys())