Ejemplo n.º 1
0
def forward_proxy(rundir):
    """Forward proxy to location visible from the batch system.

    Arguments:
    - `rundir`: directory for storing the forwarded proxy
    """

    if not mps_tools.check_proxy():
        print("Please create proxy via 'voms-proxy-init -voms cms -rfc'.")
        sys.exit(1)

    local_proxy = subprocess.check_output(["voms-proxy-info", "--path"]).strip()
    shutil.copyfile(local_proxy, os.path.join(rundir,".user_proxy"))
Ejemplo n.º 2
0
def forward_proxy(rundir):
    """Forward proxy to location visible from the batch system.

    Arguments:
    - `rundir`: directory for storing the forwarded proxy
    """

    if not mps_tools.check_proxy():
        print("Please create proxy via 'voms-proxy-init -voms cms -rfc'.")
        sys.exit(1)

    local_proxy = subprocess.check_output(["voms-proxy-info", "--path"]).strip()
    shutil.copyfile(local_proxy, os.path.join(rundir,".user_proxy"))
Ejemplo n.º 3
0
    def __init__(self, argv):
        """Constructor taking the command line arguments.

        Arguments:
        - `args`: command line arguments
        """

        self._first_dataset_ini = True
        self._parser = self._define_parser()
        self._args = self._parser.parse_args(argv)

        if not mps_tools.check_proxy():
            print_msg(
                "Please create proxy via 'voms-proxy-init -voms cms -rfc'.")
            sys.exit(1)

        self._dataset_regex = re.compile(r"^/([^/]+)/([^/]+)/([^/]+)$")
        self._validate_input()

        if self._args.test_mode:
            import Configuration.PyReleaseValidation.relval_steps as rvs
            import Configuration.PyReleaseValidation.relval_production as rvp
            self._args.datasets = [rvs.steps[rvp.workflows[1000][1][0]]["INPUT"].dataSet]
            self._validate_input() # ensure that this change is valid

        self._datasets = sorted([dataset
                                 for pattern in self._args.datasets
                                 for dataset in get_datasets(pattern)
                                 if re.search(self._args.dataset_filter, dataset)])
        if len(self._datasets) == 0:
            print_msg("Found no dataset matching the pattern(s):")
            for d in self._args.datasets: print_msg("\t"+d)
            sys.exit(1)

        self._formatted_dataset = merge_strings(
            [re.sub(self._dataset_regex, r"\1_\2_\3", dataset)
             for dataset in self._datasets])
        self._output_dir = os.path.join(self._args.output_dir,
                                        self._formatted_dataset)
        self._output_dir = os.path.abspath(self._output_dir)
        self._cache = _DasCache(self._output_dir)
        self._prepare_iov_datastructures()
        self._prepare_run_datastructures()

        try:
            os.makedirs(self._output_dir)
        except OSError as e:
            if e.args == (17, "File exists"):
                if self._args.force:
                    pass        # do nothing, just clear the existing output
                elif self._args.use_cache:
                    self._cache.load() # load cache before clearing the output
                else:
                    print_msg("Directory '{}' already exists from previous runs"
                              " of the script. Use '--use-cache' if you want to"
                              " use the cached DAS-query results Or use "
                              "'--force' to remove it."
                              .format(self._output_dir))
                    sys.exit(1)
                files = glob.glob(os.path.join(self._output_dir, "*"))
                for f in files: os.remove(f)
            else:
                raise
Ejemplo n.º 4
0
    def __init__(self, argv):
        """Constructor taking the command line arguments.

        Arguments:
        - `args`: command line arguments
        """

        self._first_dataset_ini = True
        self._parser = self._define_parser()
        self._args = self._parser.parse_args(argv)

        if not mps_tools.check_proxy():
            print_msg(
                "Please create proxy via 'voms-proxy-init -voms cms -rfc'.")
            sys.exit(1)

        self._dataset_regex = re.compile(r"^/([^/]+)/([^/]+)/([^/]+)$")
        self._validate_input()

        if self._args.test_mode:
            import Configuration.PyReleaseValidation.relval_steps as rvs
            import Configuration.PyReleaseValidation.relval_production as rvp
            self._args.datasets = [rvs.steps[rvp.workflows[1000][1][0]]["INPUT"].dataSet]
            self._validate_input() # ensure that this change is valid

        self._datasets = sorted([dataset
                                 for pattern in self._args.datasets
                                 for dataset in get_datasets(pattern)])
        if len(self._datasets) == 0:
            print_msg("Found no dataset matching the pattern(s):")
            for d in self._args.datasets: print_msg("\t"+d)
            sys.exit(1)

        self._formatted_dataset = merge_strings(
            [re.sub(self._dataset_regex, r"\1_\2_\3", dataset)
             for dataset in self._datasets])
        self._output_dir = os.path.join(self._args.output_dir,
                                        self._formatted_dataset)
        self._output_dir = os.path.abspath(self._output_dir)
        self._cache = _DasCache(self._output_dir)
        self._prepare_iov_datastructures()
        self._prepare_run_datastructures()

        try:
            os.makedirs(self._output_dir)
        except OSError as e:
            if e.args == (17, "File exists"):
                if self._args.force:
                    pass        # do nothing, just clear the existing output
                elif self._args.use_cache:
                    self._cache.load() # load cache before clearing the output
                else:
                    print_msg("Directory '{}' already exists from previous runs"
                              " of the script. Use '--use-cache' if you want to"
                              " use the cached DAS-query results Or use "
                              "'--force' to remove it."
                              .format(self._output_dir))
                    sys.exit(1)
                files = glob.glob(os.path.join(self._output_dir, "*"))
                for f in files: os.remove(f)
            else:
                raise