Exemple #1
0
def check_text_files(obtained_fn, expected_fn, fix_callback=lambda x: x, encoding=None):
    """
    Compare two files contents. If the files differ, show the diff and write a nice HTML
    diff file into the data directory.

    :param Path obtained_fn: path to obtained file during current testing.

    :param Path expected_fn: path to the expected file, obtained from previous testing.

    :param str encoding: encoding used to open the files.

    :param callable fix_callback:
        A callback to "fix" the contents of the obtained (first) file.
        This callback receives a list of strings (lines) and must also return a list of lines,
        changed as needed.
        The resulting lines will be used to compare with the contents of expected_fn.
    """
    __tracebackhide__ = True

    obtained_fn = Path(obtained_fn)
    expected_fn = Path(expected_fn)
    obtained_lines = fix_callback(obtained_fn.read_text(encoding=encoding).splitlines())
    expected_lines = expected_fn.read_text(encoding=encoding).splitlines()

    if obtained_lines != expected_lines:
        diff_lines = list(
            difflib.unified_diff(expected_lines, obtained_lines, lineterm="")
        )
        if len(diff_lines) <= 500:
            html_fn = obtained_fn.with_suffix(".diff.html")
            try:
                differ = difflib.HtmlDiff()
                html_diff = differ.make_file(
                    fromlines=expected_lines,
                    fromdesc=expected_fn,
                    tolines=obtained_lines,
                    todesc=obtained_fn,
                )
            except Exception as e:
                html_fn = "(failed to generate html diff: %s)" % e
            else:
                html_fn.write_text(html_diff, encoding="UTF-8")

            diff = ["FILES DIFFER:", str(expected_fn), str(obtained_fn)]
            diff += ["HTML DIFF: %s" % html_fn]
            diff += diff_lines
            raise AssertionError("\n".join(diff))
        else:
            # difflib has exponential scaling and for thousands of lines it starts to take minutes to render
            # the HTML diff.
            msg = [
                "Files are different, but diff is too big ({} lines)".format(
                    len(diff_lines)
                ),
                "- obtained: {}".format(obtained_fn),
                "- expected: {}".format(expected_fn),
            ]
            raise AssertionError("\n".join(msg))
def check_text_files(obtained_fn, expected_fn, fix_callback=lambda x: x, encoding=None):
    """
    Compare two files contents. If the files differ, show the diff and write a nice HTML
    diff file into the data directory.
    :param Path obtained_fn: path to obtained file during current testing.
    :param Path expected_fn: path to the expected file, obtained from previous testing.
    :param str encoding: encoding used to open the files.
    :param callable fix_callback:
        A callback to "fix" the contents of the obtained (first) file.
        This callback receives a list of strings (lines) and must also return a list of lines,
        changed as needed.
        The resulting lines will be used to compare with the contents of expected_fn.
    """
    __tracebackhide__ = True

    obtained_fn = Path(obtained_fn)
    expected_fn = Path(expected_fn)
    obtained_lines = fix_callback(obtained_fn.read_text(encoding=encoding).splitlines())
    expected_lines = expected_fn.read_text(encoding=encoding).splitlines()

    if obtained_lines != expected_lines:
        diff_lines = list(difflib.unified_diff(expected_lines, obtained_lines))
        if len(diff_lines) <= 500:
            html_fn = obtained_fn.with_suffix(".diff.html")
            try:
                differ = difflib.HtmlDiff()
                html_diff = differ.make_file(
                    fromlines=expected_lines,
                    fromdesc=expected_fn,
                    tolines=obtained_lines,
                    todesc=obtained_fn,
                )
            except Exception as e:
                html_fn = "(failed to generate html diff: %s)" % e
            else:
                html_fn.write_text(html_diff, encoding="UTF-8")

            diff = ["FILES DIFFER:", str(expected_fn), str(obtained_fn)]
            diff += ["HTML DIFF: %s" % html_fn]
            diff += diff_lines
            raise AssertionError("\n".join(diff))
        else:
            # difflib has exponential scaling and for thousands of lines it starts to take minutes to render
            # the HTML diff.
            msg = [
                "Files are different, but diff is too big (%s lines)" % (len(diff_lines),),
                "- obtained: %s" % (obtained_fn,),
                "- expected: %s" % (expected_fn,),
            ]
            raise AssertionError("\n".join(msg))
Exemple #3
0
    def test_create_stack_with_changeset(self, patched_create_change_set,
                                         patched_update_term):
        """Test create_stack, force changeset, termination protection."""
        stack_name = 'fake_stack'
        template_path = Path('./tests/cfngin/fixtures/cfn_template.yaml')
        template = Template(body=template_path.read_text())
        parameters = []
        tags = []

        changeset_id = 'CHANGESETID'

        patched_create_change_set.return_value = ([], changeset_id)

        self.stubber.add_response('execute_change_set', {},
                                  {'ChangeSetName': changeset_id})

        with self.stubber:
            self.provider.create_stack(stack_name,
                                       template,
                                       parameters,
                                       tags,
                                       force_change_set=True,
                                       termination_protection=True)
        self.stubber.assert_no_pending_responses()

        patched_create_change_set.assert_called_once_with(
            self.provider.cloudformation,
            stack_name,
            template,
            parameters,
            tags,
            'CREATE',
            service_role=self.provider.service_role)
        patched_update_term.assert_called_once_with(stack_name, True)
Exemple #4
0
    def handle(cls, name, args):
        # type: (str, Dict[str, Any]) -> None
        """Perform the actual test.

        Relies on .cfnlintrc file to be located beside the Runway config file.

        """
        cfnlintrc = Path('./.cfnlintrc')

        if not cfnlintrc.is_file():
            LOGGER.error('File must exist to use this test: %s', cfnlintrc)
            sys.exit(1)

        # prevent duplicate log messages by not passing to the root logger
        logging.getLogger('cfnlint').propagate = False
        try:
            with argv(*['cfn-lint'] + args.get('cli_args', [])):
                runpy.run_module('cfnlint', run_name='__main__')
        except SystemExit as err:  # this call will always result in SystemExit
            if err.code != 0:  # ignore zero exit codes but re-raise for non-zero
                if not (yaml.safe_load(cfnlintrc.read_text())
                        or {}).get('templates'):
                    LOGGER.warning('cfnlintrc is missing a "templates" '
                                   'section which is required by cfn-lint')
                raise
Exemple #5
0
def convert_reorientparams_save_to_mat_script():
    from pathlib2 import Path
    import shutil
    shutil.copy('/computation/convert_to_mat_file_template.m',
                '/computation/convert_to_mat_file.m')
    path = Path('/computation/convert_to_mat_file.m')
    text = path.read_text()
    text = text.replace('x_mm', str(template_dict['reorient_params_x_mm']))
    text = text.replace('y_mm', str(template_dict['reorient_params_y_mm']))
    text = text.replace('z_mm', str(template_dict['reorient_params_z_mm']))
    text = text.replace('pitch', str(template_dict['reorient_params_pitch']))
    text = text.replace('roll', str(template_dict['reorient_params_roll']))
    text = text.replace('yaw', str(template_dict['reorient_params_yaw']))
    text = text.replace('x_scaling',
                        str(template_dict['reorient_params_x_scaling']))
    text = text.replace('y_scaling',
                        str(template_dict['reorient_params_y_scaling']))
    text = text.replace('z_scaling',
                        str(template_dict['reorient_params_z_scaling']))
    text = text.replace('x_affine',
                        str(template_dict['reorient_params_x_affine']))
    text = text.replace('y_affine',
                        str(template_dict['reorient_params_y_affine']))
    text = text.replace('z_affine',
                        str(template_dict['reorient_params_z_affine']))
    path.write_text(text)
    # Run reorient.m script using spm12 standalone and Matlab MCR
    with stdchannel_redirected(sys.stderr, os.devnull):
        spm.SPMCommand.set_mlab_paths(
            matlab_cmd=
            '/opt/spm12/run_spm12.sh /opt/mcr/v95 script /computation/convert_to_mat_file.m',
            use_mcr=True)
Exemple #6
0
    def _build_wp(self):
        try:
            path = '{}{}/'.format(self.APACHE['path'], self._name)

            print(t.bold_yellow('> getting wordpress'))
            download(self.WP_PATH, self._name)
            print(t.bold_yellow('> uncompressing'))
            ZipFile(self._name, 'r').extractall()
            remove(self._name)
            print(t.bold_yellow('> setting up site'))
            move('wordpress', path)
            chown(path, 'www-data', 'www-data')

            rename('{}wp-config-sample.php'.format(path),
                   '{}wp-config.php'.format(path))

            path = Path('{}wp-config.php'.format(path))
            text = path.read_text()
            text = text.replace('database_name_here', self._name)
            text = text.replace('username_here', self._name)
            text = text.replace('password_here', self._psw)
            text = text.replace('put your unique phrase here',
                                self._random_str(50))
            path.write_text(text)
        except Exception as e:
            self._rollback()
            print(
                t.bold_red(
                    'Error trying to build wordpress context, rolling back database changes...'
                ), e)
            exit(3)
Exemple #7
0
def test_copy_or_create(conf_dir):
    src_file = Path(conf_dir, "temp.x.in")
    dst_file = Path(conf_dir, "temp.x.out")

    # file doesn't exist, check file created
    msi_update.copy_or_create(src_file, dst_file, u"!!!")
    assert dst_file.exists()
    content = dst_file.read_text()
    assert content == "!!!"

    # files exists check file copied
    src_file.write_text(u"+++")
    msi_update.copy_or_create(src_file, dst_file, u"!!!")
    assert dst_file.exists()
    content = dst_file.read_text()
    assert content == "+++"
Exemple #8
0
 def generate_ssh_key_instance(self, path, storage):
     """Generate ssh key from file."""
     private_key_path = Path(path)
     instance = SshKey(private_key=private_key_path.read_text(),
                       label=private_key_path.name)
     self.validate_ssh_key(instance, storage)
     return instance
Exemple #9
0
class SNMPtrapd(object):
    def __init__(self, port):
        self.snmptrapd_proc = None
        self.port = port

        self.snmptrapd_log = Path(tc_parameters.WORKING_DIR, "snmptrapd_log")
        self.snmptrapd_stdout_path = Path(tc_parameters.WORKING_DIR,
                                          "snmptrapd_stdout")
        self.snmptrapd_stderr_path = Path(tc_parameters.WORKING_DIR,
                                          "snmptrapd_stderr")

    def wait_for_snmptrapd_log_creation(self):
        return self.snmptrapd_log.exists()

    def wait_for_snmptrapd_startup(self):
        return "NET-SNMP version" in self.snmptrapd_log.read_text()

    def start(self):
        if self.snmptrapd_proc is not None:
            return

        self.snmptrapd_proc = ProcessExecutor().start(
            [
                "snmptrapd",
                "-f",
                "--disableAuthorization=yes",
                "-C",
                "-On",
                "--doNotLogTraps=no",
                self.port,
                "-LF",
                "6-6",
                os.path.relpath(str(self.snmptrapd_log)),
            ],
            self.snmptrapd_stdout_path,
            self.snmptrapd_stderr_path,
        )
        wait_until_true(self.wait_for_snmptrapd_log_creation)
        wait_until_true(self.wait_for_snmptrapd_startup)
        return self.snmptrapd_proc.is_running()

    def stop(self):
        if self.snmptrapd_proc is None:
            return

        self.snmptrapd_proc.terminate()
        try:
            self.snmptrapd_proc.wait(4)
        except TimeoutExpired:
            self.snmptrapd_proc.kill()

        self.snmptrapd_proc = None

    def get_port(self):
        return self.port

    def get_logs(self):
        file_reader = DestinationReader(FileIO)
        return file_reader.read_all_logs(self.snmptrapd_log)
 def generate_ssh_key_instance(self, path, storage):
     """Generate ssh key from file."""
     private_key_path = Path(path)
     instance = SshKey(
         private_key=private_key_path.read_text(),
         label=private_key_path.name
     )
     self.validate_ssh_key(instance, storage)
     return instance
Exemple #11
0
    def update_model_desc(self, new_model_desc_file=None):
        """ Change the task's model_desc """
        execution = self._get_task_property('execution')
        p = Path(new_model_desc_file)
        if not p.is_file():
            raise IOError('mode_desc file %s cannot be found' % new_model_desc_file)
        new_model_desc = p.read_text()
        model_desc_key = list(execution.model_desc.keys())[0] if execution.model_desc else 'design'
        execution.model_desc[model_desc_key] = new_model_desc

        res = self._edit(execution=execution)
        return res.response
Exemple #12
0
def remove_federation_block(remote):
    server_conf_path = "/opt/spire/conf/server/server.conf"
    copy_file_from_remote(remote, server_conf_path)
    server_conf = Path(server_conf_path)
    server_conf_content = server_conf.read_text()
    start_index = server_conf_content.find("\n\n    federation")
    federation_limit = "}\n        }\n    }\n\n"
    end_index = server_conf_content.find(federation_limit, start_index) + len(federation_limit)
    federation_block = server_conf_content[start_index:end_index]
    server_conf_content = server_conf_content.replace(federation_block, "")
    server_conf.write_text(server_conf_content)
    send_file_to_remote(remote, server_conf_path)
Exemple #13
0
def convert_and_run_reorient_script(input_file):
    from pathlib2 import Path
    import shutil
    from nipype.interfaces import spm
    shutil.copy('/computation/reorient_template.m', '/computation/reorient.m')
    path = Path('/computation/reorient.m')
    text = path.read_text()
    text = text.replace('input_file', input_file)
    path.write_text(text)
    # Run convert_to_mat_file.m script using spm12 standalone and Matlab MCR
    with stdchannel_redirected(sys.stderr, os.devnull):
        spm.SPMCommand.set_mlab_paths(matlab_cmd='/opt/spm12/run_spm12.sh /opt/mcr/v95 script /computation/reorient_job.m',
                                  use_mcr=True)
Exemple #14
0
def _get_modules(context,
                 filter_path=None,
                 filter_module=None,
                 filter_changes=True):
    if filter_module is None:
        working_modules = []
        filter_changes = filter_changes if FAB_SKIP_DELTA not in os.environ else False
        working_dirs = _run_local(
            context, "pwd",
            hide='out').stdout.encode("utf8").strip().split('/')
        if working_dirs[-1] == "asystem":
            for filtered_module in filter(
                    lambda module_tmp: isdir(module_tmp) and
                (not filter_changes or _run_local(context,
                                                  "git status --porcelain {}".
                                                  format(module_tmp),
                                                  DIR_ROOT,
                                                  hide='out').stdout),
                    glob.glob('*/*')):
                working_modules.append(filtered_module)
        else:
            root_dir_index = working_dirs.index("asystem")
            if (root_dir_index + 2) < len(working_dirs):
                working_modules.append(working_dirs[root_dir_index + 1] + "/" +
                                       working_dirs[root_dir_index + 2])
            else:
                for nested_modules in filter(
                        lambda module_tmp: isdir(module_tmp), glob.glob('*')):
                    working_modules.append("{}/{}".format(
                        working_dirs[root_dir_index + 1], nested_modules))
        working_modules[:] = [
            module for module in working_modules if filter_path is None
            or glob.glob("{}/{}/{}*".format(DIR_ROOT, module, filter_path))
        ]
        grouped_modules = {}
        for module in working_modules:
            group_path = Path(join(DIR_ROOT, module, ".group"))
            group = group_path.read_text().strip() if group_path.exists(
            ) else "ZZZZZ"
            if group not in grouped_modules:
                grouped_modules[group] = [module]
            else:
                grouped_modules[group].append(module)
        sorted_modules = []
        for group in sorted(grouped_modules):
            sorted_modules.extend(grouped_modules[group])
        return sorted_modules
    else:
        return [filter_module] if filter_path is None or os.path.exists(
            join(DIR_ROOT, filter_module, filter_path)) else []
Exemple #15
0
class SerializedEntity:  # pragma: no cover -- TODO requires functional tests.
    def __init__(self,
                 filename,
                 current_version,
                 entity_name='data',
                 unlink=False,
                 readable=False):
        self.filename = Path(filename)
        self.version = current_version
        self.entity_name = entity_name
        self.unlink = unlink
        self.readable = readable
        self.entity = None

    def load(self):
        if not self.filename.exists():
            return None
        data = self.filename.read_text()
        savedata = jsonpickle.decode(data, keys=True)
        if savedata['version'] > self.version:
            raise RuntimeError(
                "Stored data version {0} is newer than currently supported {1}"
                .format(savedata['version'], self.version))
        self.entity = savedata[self.entity_name]
        if self.unlink:
            os.unlink(str(self.filename))
        return self.entity

    def reset(self, new_value=None):
        self.entity = new_value

    def save(self, entity=None):
        if entity is None:
            entity = self.entity
        if entity is None:
            return
        savedata = {'version': self.version, self.entity_name: entity}
        if self.readable:
            data = jsonpickle.encode(savedata, indent=2, keys=True)
        else:
            data = jsonpickle.encode(savedata, keys=True)
        self.filename.write_bytes(data.encode('utf-8', 'replace'))

    def __enter__(self):
        self.load()
        return self

    def __exit__(self, *args, **kwargs):
        self.save()
Exemple #16
0
def DebuggerSwitch(prod_mode: bool):
    file_data = []
    for filename in os.listdir("./cogs"):
        if filename.endswith(".py"):
            file_directory = os.path.join("./cogs", filename)
            path = Path(file_directory)
            text = path.read_text()
            true_check = re.findall("prod_mode = True", text)
            false_check = re.findall("prod_mode = False", text)
            if true_check:
                text = text.replace("prod_mode = True",
                                    "prod_mode = " + str(prod_mode))
            elif false_check:
                text = text.replace("prod_mode = False",
                                    "prod_mode = " + str(prod_mode))
            path.write_text(text)
Exemple #17
0
    def decrypt(self, input_file):
        slogverify_stdout = Path(tc_parameters.WORKING_DIR,
                                 "slogverify_stdout")
        slogverify_stderr = Path(tc_parameters.WORKING_DIR,
                                 "slogverify_stderr")
        encrypted = Path(tc_parameters.WORKING_DIR, input_file)
        decrypted = Path(tc_parameters.WORKING_DIR, "decrypted.txt")

        CommandExecutor().run(
            [
                self.slogverify, self.decryption_key, encrypted, self.cmac,
                decrypted
            ],
            slogverify_stdout,
            slogverify_stderr,
        )
        return decrypted.read_text().rstrip("\n").split("\n")
Exemple #18
0
def add_federation_block(trust_domain, bundle_endpoint, remote):
    federation_path = "/mnt/c-spiffe/integration_test/resources/federation.conf"
    federation_config_content = Path(federation_path).read_text()
    if federation_config_content.find(trust_domain) == -1:
        federation_config_content = update_federation_block(trust_domain, bundle_endpoint)
    
    server_conf_path = "/opt/spire/conf/server/server.conf"
    copy_file_from_remote(remote, server_conf_path)
    server_conf = Path(server_conf_path)
    server_conf_content = server_conf.read_text()
    start_index = server_conf_content.find("server {")
    end_index = server_conf_content.find("}", start_index)-1
    current_value  = server_conf_content[start_index:end_index]
    new_value = current_value + "\n\n" + federation_config_content + "\n"
    server_conf_content = server_conf_content.replace(current_value, new_value)
    server_conf.write_text(server_conf_content)
    send_file_to_remote(remote, server_conf_path)
Exemple #19
0
    def read_config(cls, filename=None):  # pragma: no cover -- TODO
        """ Reads configuration from JSON file.
		Default location is XDG_DATA_PATH/taskwarrior/config.json
		If file does not exist, returns default configuration values.
		For possible configuration fields see documentation of Config class.
		"""
        filename = Path(filename
                        or xdg.save_data_path('taskwarrior') / 'config.json')
        data = {}
        if filename.exists():
            data = json.loads(filename.read_text())
        taskfile = data.get('taskfile')
        if taskfile:
            taskfile = os.path.expanduser(taskfile)
        return cls(
            taskfile=taskfile,
            separator=data.get('separator'),
            stop_alias=data.get('stop_alias'),
            resume_alias=data.get('resume_alias'),
        )
Exemple #20
0
def main(with_noreg):
    """Test automatic scanning / registering"""
    entry_point_map = manager.get_entry_map(groups='reentry_test', ep_names=['test-plugin', 'test-noreg', 'builtin'])
    data_file = Path(get_datafile())

    assert entry_point_map, 'The \'reentry_test\' entry point group was not found\nMap:\n{}\n\nData File: {}\n\nContents:\n{}'.format(
        manager.get_entry_map(), str(data_file), data_file.read_text())

    try:
        test_entry_point = entry_point_map['reentry_test']['test-plugin']
        builtin_entry_point = entry_point_map['reentry_test']['builtin']
        if with_noreg:
            # note: `reentry scan` for this work
            noreg_entry_point = entry_point_map['reentry_test']['test-noreg']
    except Exception as err:
        print('datafile: {}'.format(str(data_file)))
        print('\nCurrent relevant entry point map:\n\n')
        print(manager.format_map(entry_point_map))
        print('\n')
        scan_map = manager.scan(groups=['reentry_test'], commit=False)
        print('\nFull entry point map after scan:\n\n')
        print(manager.format_map(scan_map))
        raise err

    plugin_class = test_entry_point.load()
    builtin_class = builtin_entry_point.load()

    assert plugin_class.test_string == 'TEST', 'The test string was incorrect'
    assert builtin_class.test_string == 'TEST', 'The test string was incorrect'
    if with_noreg:
        noreg_class = noreg_entry_point.load()
        assert noreg_class.test_string == 'TEST', 'The test string was incorrect'

    plugin_list = [ep.load() for ep in manager.iter_entry_points('reentry_test')]
    assert plugin_class in plugin_list, 'iter_entry_points found differing test entry point from get_entry_map.'
    assert builtin_class in plugin_list, 'iter_entry_points found differing test entry point from get_entry_map.'
    if with_noreg:
        assert noreg_class in plugin_list, 'iter_entry_points found differing test entry point from get_entry_map.'
Exemple #21
0
 def content_accessed(self):
     if not self._cache_accessed:
         self.access()
         file = Path(self.path_accessed)
         self._cache_accessed = file.read_text()
     return self._cache_accessed
Exemple #22
0
 def content_summarized(self):
     from place.post import PostBase
     file = Path(self.path_summarized)
     json_str = file.read_text()
     return PostBase(json_str)
Exemple #23
0
 def loads(file: p, *args, **kwargs):
     if safe:  # If safe mode is enabled
         return safe_load(file.read_text(), Loader=Loader)
     else:  # Or it isn't
         return load(file.read_text(), Loader=Loader)
Exemple #24
0
import sendText
from pathlib2 import Path
import time
import os

f = open('config.txt', 'w')

f.write("+17202357772\n")
f.write("destgps:(50,50)\n")
f.close()

initialise = True

while (True):
    my_file = Path("/home/linaro/Documents/TextMessage")
    if my_file.is_file():
        content = my_file.read_text()
        os.remove(str(my_file))

        print("sending: ", content)
        sendText.text(str(content))

    time.sleep(10)
Exemple #25
0
class Reader:
    """References Reader

    Pleasant interface for .csv reading.
    """

    def __init__(self, file_path: str):
        """Constructor

        :param file_path: path to the csv file
        """
        self._file_path = file_path
        self._file = Path(file_path)

        if not self._file.exists():
            raise FileNotFoundError

        self._columns = self._rows = -1
        self._load_stats()

    def _load_stats(self) -> None:
        """Load stats of the .csv file for future usage
        """
        self._rows = 0
        for line in self._file.read_text(Csv.encoding).split('\n'):
            if self._columns == -1:
                self._columns = len(line.split(Csv.separator))
            self._rows += 1

    def read_content(
            self,
            skip_header: Optional[bool] = False,
            encoding: Optional[str] = Csv.encoding
    ) -> Iterator[List[str]]:
        """Read .csv content

        :param skip_header:
        :param encoding:
        :return: an iterator with each line as a list of its columns
        """
        is_header_skipped = False

        for line in self._file.read_text(encoding).split('\n'):
            # skipping header if needed
            if skip_header and not is_header_skipped:
                is_header_skipped = True
                continue

            # removing empty lines
            if not line:
                continue

            # from a row, removing trailing \n in all fields
            values = [field for field, _ in re.findall(
                Parsing.parse_regex,
                line
            )][:-1]

            # if the data is surrounded by '"', remove them
            yield list(map(
                lambda field: field[1:-1]
                if field.startswith(Csv.delimiter)
                and field.endswith(Csv.delimiter)
                else field,
                values
            ))

    @property
    def columns(self) -> int:
        """Getter for `_columns`
        :return: the number of columns in the file
        """
        return self._columns

    @property
    def rows(self) -> int:
        """Getter for `_rows`
        :return: the number of rows in the file
        """
        return self._rows
    else:
        K_Force = 4 / (beta * (dE_wind + 0.4 * dE_wind)**2)
    #print(i,K_Force)
    K_s.append(K_Force)
    ## creation of a file that will be used in the creation of a Potential Mean Force (PMF) profile
    # Handle errors while calling os.remove()

    with open("pmf_data.txt", "a+") as test_ks:
        test_ks.write("{:.4f} {:.4f} \n".format(K_Force * 0.000446253514585234,
                                                i))

    shutil.copy(template, "{}/CV{:.3f}US".format(new_direc_cv, i))

    path = Path("{}/CV{:.3f}US".format(new_direc_cv, i))
    text = path.read_text()
    text = text.replace("topoledit", "./{}".format(topol_file))
    text = text.replace("coordfiledit", "CV{:.3f}.pdb".format(i))
    text = text.replace("nametoedit", "CV{:.3f}".format(i))
    text = text.replace("cvvaltoedit", "{}".format(i))
    text = text.replace("constantktoedit",
                        "{:.4f}".format(K_Force * 0.000446253514585234))
    path.write_text(text)
    #write job to execution
    with open("CV{:.3f}/job".format(i), "w") as job:
        job.write('''#!/bin/bash
#SBATCH --time=160:00:00
#SBATCH --nodes=1
#SBATCH --ntasks-per-node=8
#SBATCH --cpus-per-task=2
#SBATCH --mem=32GB
class VersionUpdater(object):
    """
    Version number synchronisation interface.

    Updates the version information in

    * setup.json
    * aiida_vasp/__init__.py

    to the current version number.

    The current version number is either parsed from the output of ``git describe --tags --match v*.*.*``, or if the command fails for
    any reason, from setup.json. The current version number is decided on init, syncronization can be executed by calling ``.sync()``.
    """

    version_pat = re.compile(r'\d+.\d+.\d+(-(alpha|beta|rc)(.\d+){0,3}){0,1}')
    init_version_pat = re.compile(r'(__version__ = )([\'"])(.*?)([\'"])',
                                  re.DOTALL | re.MULTILINE)
    replace_tmpl = r'\1\g<2>{}\4'

    def __init__(self):
        """Initialize with documents that should be kept up to date and actual version."""
        self.top_level_init = Path(subpath('reentry', '__init__.py'))
        self.setup_json = Path(subpath('setup.json'))
        self.version = self.get_version()

    def write_to_init(self):
        init_content = self.top_level_init.read_text()
        self.top_level_init.write_text(
            re.sub(self.init_version_pat, self.new_version_str, init_content,
                   re.DOTALL | re.MULTILINE))

    def write_to_setup(self):
        """Write the updated version number to setup.json."""
        with open(str(self.setup_json), 'r') as setup_fo:
            # preserve order
            setup = json.load(setup_fo,
                              object_pairs_hook=collections.OrderedDict)

        setup['version'] = str(self.version)
        with open(str(self.setup_json), 'w') as setup_fo:
            json.dump(setup, setup_fo, indent=4, separators=(',', ': '))

    @property
    def new_version_str(self):
        return self.replace_tmpl.format(str(self.version))

    @property
    def setup_version(self):
        """Grab the parsed version from setup.json."""
        with open(str(self.setup_json), 'r') as setup_fo:
            setup = json.load(setup_fo)

        try:
            version_string = setup['version']
        except KeyError:
            raise AttributeError('No version found in setup.json')

        return version.parse(version_string)

    @property
    def init_version(self):
        """Grab the parsed version from the init file."""
        match = re.search(self.init_version_pat,
                          self.top_level_init.read_text())
        if not match:
            raise AttributeError(
                'No __version__ found in top-level __init__.py')
        return version.parse(match.groups()[2])

    @property
    def tag_version(self):
        """Get the current version number from ``git describe``, fall back to setup.json."""
        try:
            describe_byte_string = subprocess.check_output(
                ['git', 'describe', '--tags', '--match', 'v*.*.*'])
            match = re.search(self.version_pat,
                              describe_byte_string.decode(encoding='UTF-8'))
            version_string = match.string[match.pos:match.end()]
            return version.parse(version_string)
        except subprocess.CalledProcessError:
            return self.setup_version

    def get_version(self):
        return max(self.setup_version, self.init_version, self.tag_version)

    def sync(self):
        if self.version > self.init_version:
            self.write_to_init()
        if self.version > self.setup_version:
            self.write_to_setup()
Exemple #28
0
try:
	from pathlib2 import Path
except ImportError:
	from pathlib import Path
from clckwrkbdgr import xdg
from clckwrkbdgr import commands
import clckwrkbdgr.jobsequence.context
trace = context = clckwrkbdgr.jobsequence.context.init(
		verbose_var='DOTFILES_SETUP_VERBOSE',
		skip_platforms='Windows',
		)

if not commands.has_sudo_rights():
	trace.info('Have not sudo rights, skipping.')
	context.done()

etc_config_file = Path('/etc/X11/app-defaults/XXkb')
if not etc_config_file.exists():
	context.die('{0} is not found, cannot add XXkb settings.'.format(etc_config_file)) # TODO can we create this file if it is absent?

Xresources = xdg.XDG_CONFIG_HOME/'Xresources'
local_XXkb = set([line for line in Xresources.read_text().splitlines() if line.startswith('XXkb.')])
missing = local_XXkb - set(etc_config_file.read_text().splitlines())
if not missing:
	context.done()

trace.error('These XXkb config lines are not present in {0}:'.format(etc_config_file))
for line in missing:
	print(line)
sys.exit(1) # TODO way to fix automatically with sudo.
Exemple #29
0
class LocalConfig(object):
    def __init__(self):

        # self.lk_config_dir_string = str(Path.home()) + '/' + '.config/lk'
        self.lk_config_dir_string = ConfigUtil().lk_config_dir
        self.lk_config_dir_path = Path(self.lk_config_dir_string)

        # self.lk_config_file_path_string = self.lk_config_dir_string + '/' + 'lk_config.yaml'
        self.lk_config_file_path_string = ConfigUtil().user_lk_config_file_path
        self.lk_config_file_path = Path(self.lk_config_file_path_string)

    @property
    def not_found(self):

        if not self.lk_config_file_path.exists():
            return True

        else:
            return False

    @property
    def config_dict(self):

        config_string = self.lk_config_file_path.read_text()
        config_dict = yaml.load(config_string)

        return config_dict

    def add_remote_repo(self, remote_repo):

        self.create_config(remote_repo=remote_repo)

    def default_commands_repo(self):

        try:
            return self.config_dict['default_commands_repo']

        except IOError:

            self.create_config()
            return self.config_dict['default_commands_repo']

            # raise LocalConfigNotFound

    def create_config(self, remote_repo=None):

        if not self.lk_config_dir_path.exists():
            Path.mkdir(self.lk_config_dir_path, parents=True, exist_ok=True)

        if self.lk_config_file_path.exists():

            print('# Config file exists at: {config_file_path}'.format(
                config_file_path=self.lk_config_file_path_string))

            if app_confirm('Do you want to override?'):
                pass
            else:
                click.Abort()

        if remote_repo is None:
            remote_repo_value = app_config.DEFAULT_COMMANDS_REPO
        else:
            remote_repo_value = remote_repo

        config_odict = odict([('default_commands_repo', remote_repo_value)])

        setup_yaml()

        config_yaml_string = yaml.dump(config_odict, default_flow_style=False)

        self.lk_config_file_path.write_text(config_yaml_string.decode('utf-8'))
Exemple #30
0
def cleandata(file_name):
    from pathlib2 import Path
    path = Path(file_name)
    text = path.read_text()
    text = text.replace(',', ' ')
    path.write_text(text)
Exemple #31
0
def _release(context):
    modules = _get_modules(context, "docker-compose.yml")
    for module in modules:
        if FAB_SKIP_TESTS not in os.environ:
            _pull(context, filter_module=module)
            _build(context, filter_module=module)
            _unittest(context, filter_module=module)
            _systest(context, filter_module=module)
    _get_versions_next_release()
    if FAB_SKIP_GIT not in os.environ:
        print("Tagging repository ...")
        _run_local(
            context,
            "git add -A && git commit -m 'Update asystem-{}' && git tag -a {} -m 'Release asystem-{}'"
            .format(_get_versions()[0],
                    _get_versions()[0],
                    _get_versions()[0]),
            env={"HOME": os.environ["HOME"]})
    for module in modules:
        for host in _get_hosts(context, module):
            _clean(context, filter_module=module)
            _pull(context,
                  filter_module=module,
                  filter_host=host,
                  is_release=True)
            _build(context, filter_module=module, is_release=True)
            _package(context, filter_module=module)
            _print_header(module, "release")
            group_path = Path(join(DIR_ROOT, module, ".group"))
            if group_path.exists() and group_path.read_text().strip(
            ).isnumeric() and int(group_path.read_text().strip()) >= 0:
                _run_local(context, "mkdir -p target/release", module)
                _run_local(context,
                           "cp -rvfp docker-compose.yml target/release",
                           module,
                           hide='err',
                           warn=True)
                if isfile(join(DIR_ROOT, module, "Dockerfile")):
                    file_image = "{}-{}.tar.gz".format(_name(module),
                                                       _get_versions()[0])
                    print("docker -> target/release/{}".format(file_image))
                    _run_local(
                        context, "docker image save -o {} {}:{}".format(
                            file_image, _name(module),
                            _get_versions()[0]), join(module,
                                                      "target/release"))
                if glob.glob(
                        join(DIR_ROOT, module,
                             "target/package/main/resources/*")):
                    _run_local(
                        context,
                        "cp -rvfp target/package/main/resources/* target/release",
                        module)
                else:
                    _run_local(context, "mkdir -p target/release/config",
                               module)
                hosts = set(
                    filter(
                        len,
                        _run_local(
                            context,
                            "find {} -type d ! -name '.*' -mindepth 1 -maxdepth 1"
                            .format(DIR_ROOT),
                            hide='out').stdout.replace(
                                DIR_ROOT + "/", "").replace("_",
                                                            "\n").split("\n")))
                Path(
                    join(DIR_ROOT, module,
                         "target/release/hosts")).write_text("\n".join(hosts) +
                                                             "\n")
                if glob.glob(join(DIR_ROOT, module, "target/package/run*")):
                    _run_local(context,
                               "cp -rvfp target/package/run* target/release",
                               module)
                else:
                    _run_local(context, "touch target/release/run.sh", module)
                _print_header("{}/{}".format(host, _name(module)), "release")
                ssh_pass = _ssh_pass(context, host)
                install = "{}/{}/{}".format(DIR_INSTALL, module,
                                            _get_versions()[0])
                print("Copying release to {} ... ".format(host))
                _run_local(
                    context,
                    "{}ssh -q root@{} 'rm -rf {} && mkdir -p {}'".format(
                        ssh_pass, host, install, install))
                _run_local(
                    context,
                    "{}scp -qpr $(find target/release -maxdepth 1 -type f) root@{}:{}"
                    .format(ssh_pass, host, install), module)
                _run_local(
                    context,
                    "{}scp -qpr target/release/config root@{}:{}".format(
                        ssh_pass, host, install), module)
                print("Installing release to {} ... ".format(host))
                _run_local(
                    context,
                    "{}ssh -q root@{} 'chmod +x {}/run.sh && {}/run.sh'".
                    format(ssh_pass, host, install, install))
                _run_local(
                    context,
                    "{}ssh -q root@{} 'docker system prune --volumes -f'".
                    format(ssh_pass, host),
                    hide='err',
                    warn=True)
                _run_local(
                    context,
                    "{}ssh -q root@{} 'find $(dirname {}) -maxdepth 1 -mindepth 1 2>/dev/null | sort | "
                    "head -n $(($(find $(dirname {}) -maxdepth 1 -mindepth 1 2>/dev/null | wc -l) - 2)) | xargs rm -rf'"
                    .format(ssh_pass, host, install, install),
                    hide='err',
                    warn=True)
                _print_footer("{}/{}".format(host, _name(module)), "release")
            else:
                print("Module ignored")
            _print_footer(module, "release")
    _get_versions_next_snapshot()
    if FAB_SKIP_GIT not in os.environ:
        print("Pushing repository ...")
        _run_local(
            context,
            "git add -A && git commit -m 'Update asystem-{}' && git push --all && git push origin --tags"
            .format(_get_versions()[0],
                    _get_versions()[0],
                    _get_versions()[0]),
            env={"HOME": os.environ["HOME"]})
Exemple #32
0
class SNMPtrapd(object):
    TRAP_LOG_PREFIX = 'LIGHT_TEST_SNMP_TRAP_RECEIVED:'

    def __init__(self, port):
        self.snmptrapd_proc = None
        self.port = port

        self.snmptrapd_log = Path(tc_parameters.WORKING_DIR, "snmptrapd_log")
        self.snmptrapd_stdout_path = Path(tc_parameters.WORKING_DIR,
                                          "snmptrapd_stdout")
        self.snmptrapd_stderr_path = Path(tc_parameters.WORKING_DIR,
                                          "snmptrapd_stderr")

    def wait_for_snmptrapd_log_creation(self):
        return self.snmptrapd_log.exists()

    def wait_for_snmptrapd_startup(self):
        return "NET-SNMP version" in self.snmptrapd_log.read_text()

    def start(self):
        if self.snmptrapd_proc is not None:
            return

        self.snmptrapd_proc = ProcessExecutor().start(
            [
                "snmptrapd",
                "-f",
                "--disableAuthorization=yes",
                "-C",
                "-m ALL",
                "-A",
                "-Ddump",
                "-On",
                "--doNotLogTraps=no",
                "--authCommunity=log public",
                self.port,
                "-d",
                "-Lf",
                os.path.relpath(str(self.snmptrapd_log)),
                "-F",
                "{}%v\n".format(self.TRAP_LOG_PREFIX),
            ],
            self.snmptrapd_stdout_path,
            self.snmptrapd_stderr_path,
        )
        wait_until_true(self.wait_for_snmptrapd_log_creation)
        wait_until_true(self.wait_for_snmptrapd_startup)
        return self.snmptrapd_proc.is_running()

    def stop(self):
        if self.snmptrapd_proc is None:
            return

        self.snmptrapd_proc.terminate()
        try:
            self.snmptrapd_proc.wait(4)
        except TimeoutExpired:
            self.snmptrapd_proc.kill()

        self.snmptrapd_proc = None

    def get_port(self):
        return self.port

    def get_traps(self):
        file_reader = DestinationReader(FileIO)
        logs = file_reader.read_all_logs(self.snmptrapd_log)
        trap_list = []
        for log_line in logs:
            res = re.match('({})(.*)'.format(self.TRAP_LOG_PREFIX), log_line)
            if (res):
                trap_list += res.group(2).rstrip().split("\t")
        return sorted(trap_list)

    def get_raw_traps(self):
        file_reader = DestinationReader(FileIO)
        return file_reader.read_all_logs(self.snmptrapd_log)