def set_overrun_secs(self, overrun_secs):
     assert not self._has_set_overrun_secs
     config_path = os.path.join(self._root_dir, 'configfile')
     config_text = phlsys_fs.read_text_file(config_path)
     config_text += '\n--overrun-secs\n{}'.format(overrun_secs)
     phlsys_fs.write_text_file(config_path, config_text)
     self._has_set_overrun_secs = True
 def set_overrun_secs(self, overrun_secs):
     assert not self._has_set_overrun_secs
     config_path = os.path.join(self._root_dir, 'configfile')
     config_text = phlsys_fs.read_text_file(config_path)
     config_text += '\n--overrun-secs\n{}'.format(overrun_secs)
     phlsys_fs.write_text_file(config_path, config_text)
     self._has_set_overrun_secs = True
 def enable_count_cycles_script(self):
     assert not self._has_enabled_count_cycles
     config_path = os.path.join(self._root_dir, 'configfile')
     config_text = phlsys_fs.read_text_file(config_path)
     config_text += '\n--external-report-command\ncount_cycles.sh'
     phlsys_fs.write_text_file(config_path, config_text)
     self._has_enabled_count_cycles = True
def process(args):
    fs = abdt_fs.make_default_accessor()

    with fs.lockfile_context():
        pid = fs.get_pid_or_none()
        if pid is None or not phlsys_pid.is_running(pid):
            raise Exception("Arcyd is not running")

        if args.force:
            phlsys_pid.request_terminate(pid)
        else:
            killfile = 'var/command/killfile'
            phlsys_fs.write_text_file(killfile, '')

            if os.path.isfile(killfile):
                time.sleep(1)
                while os.path.isfile(killfile):
                    print 'waiting for arcyd to remove killfile ..'
                    time.sleep(1)

        # wait for Arcyd to not be running
        if phlsys_pid.is_running(pid):
            time.sleep(1)
            while phlsys_pid.is_running(pid):
                print 'waiting for arcyd to exit'
                time.sleep(1)
 def enable_count_cycles_script(self):
     assert not self._has_enabled_count_cycles
     config_path = os.path.join(self._root_dir, 'configfile')
     config_text = phlsys_fs.read_text_file(config_path)
     config_text += '\n--external-report-command\ncount_cycles.sh'
     phlsys_fs.write_text_file(config_path, config_text)
     self._has_enabled_count_cycles = True
def ensure_repo_ignoring(repo_path):
    if is_repo_definitely_ignoring(repo_path):
        # nothing to do
        return

    repo_attributes_path = os.path.join(repo_path, _REPO_ATTRIBUTES_PATH)

    # check that any existing file is compatible with the new contents we will
    # write, i.e. it is a subset of the new content
    if os.path.exists(repo_attributes_path):
        contents = phlsys_fs.read_text_file(repo_attributes_path)
        lines = contents.splitlines()
        for l in lines:
            stripped = l.strip()
            if stripped and stripped not in _REPO_ATTRIBUTES_TUPLE:
                # we won't try to do any sort of merging, just escalate
                raise Exception(
                    "cannot merge attributes in existing file: {}".format(
                        repo_attributes_path))

    # the file is exactly one of the existing attributes, we can merge
    # correctly by overwriting it with our superset of attributes
    phlsys_fs.write_text_file(
        repo_attributes_path,
        _REPO_ATTRIBUTES_CONTENT)
Exemple #7
0
def reload_arcyd():
    fs = abdt_fs.make_default_accessor()

    with fs.lockfile_context():
        pid = fs.get_pid_or_none()
        if pid is None or not phlsys_pid.is_running(pid):
            raise Exception("Arcyd is not running")

        phlsys_fs.write_text_file('var/command/reload', '')
    def set_pid(self, pid):
        """Set the pid for the current arcyd instance.

        :pid: the integer pid of the current arcyd instance
        :returns: None

        """
        pid_path = self._make_abspath(self._layout.pid)
        phlsys_fs.write_text_file(pid_path, str(pid))
    def set_pid(self, pid):
        """Set the pid for the current arcyd instance.

        :pid: the integer pid of the current arcyd instance
        :returns: None

        """
        pid_path = self._make_abspath(self._layout.pid)
        phlsys_fs.write_text_file(pid_path, str(pid))
    def __init__(
            self,
            root_dir,
            barc_cmd_path,
            arcyon_cmd_path,
            phab_uri,
            alice,
            bob):

        self._root_dir = root_dir
        self.central_path = os.path.join(self._root_dir, 'central')
        os.makedirs(self.central_path)
        self._central_repo = phlsys_git.Repo(self.central_path)
        self._central_repo("init", "--bare")
        self.web_port = pick_free_port()
        shutil.move(
            os.path.join(self.central_path, 'hooks/post-update.sample'),
            os.path.join(self.central_path, 'hooks/post-update'))

        self._command_hold_path = os.path.join(
            self.central_path, 'command/hold_dev_arcyd_refs')

        pre_receive_path = os.path.join(self.central_path, 'hooks/pre-receive')
        phlsys_fs.write_text_file(
            pre_receive_path,
            _PRE_RECEIVE_HOLD_DEV_ARCYD_REFS)
        mode = os.stat(pre_receive_path).st_mode
        os.chmod(pre_receive_path, mode | stat.S_IEXEC)

        self._web = SimpleWebServer(
            self.central_path,
            self.web_port)

        self._workers = []
        for account in (alice, bob):
            account_user = account[0]
            account_email = account[1]
            account_cert = account[2]
            worker_path = os.path.join(self._root_dir, account_user)
            os.makedirs(worker_path)
            self._workers.append(
                _Worker(
                    phlsys_git.Repo(worker_path),
                    worker_path,
                    barc_cmd_path,
                    account_user,
                    account_email,
                    account_cert,
                    arcyon_cmd_path,
                    phab_uri))
            self._workers[-1].setup(self._central_repo.working_dir)

            if len(self._workers) == 1:
                self._workers[0].push_initial_commit()
            else:
                self._workers[-1].repo('checkout', 'master')
 def test_D_exercise_read_log(self):
     with setup_arcyd() as arcyd:
         debug_log_path = '{}/var/log/debug'.format(arcyd._root_dir)
         phlsys_fs.write_text_file(debug_log_path, 'debug log entry')
         info_log_path = '{}/var/log/info'.format(arcyd._root_dir)
         phlsys_fs.write_text_file(info_log_path, 'info log entry')
         info_log = arcyd.info_log()
         debug_log = arcyd.debug_log()
         # [ D] info_log returns correct info log
         self.assertIn('info log entry', info_log)
         # [ D] debug_log returns correct debug log
         self.assertIn('debug log entry', debug_log)
 def test_D_exercise_read_log(self):
     with setup_arcyd() as arcyd:
         debug_log_path = '{}/var/log/debug'.format(arcyd._root_dir)
         phlsys_fs.write_text_file(debug_log_path, 'debug log entry')
         info_log_path = '{}/var/log/info'.format(arcyd._root_dir)
         phlsys_fs.write_text_file(info_log_path, 'info log entry')
         info_log = arcyd.info_log()
         debug_log = arcyd.debug_log()
         # [ D] info_log returns correct info log
         self.assertIn('info log entry', info_log)
         # [ D] debug_log returns correct debug log
         self.assertIn('debug log entry', debug_log)
    def test_B_rotation(self):
        with phlsys_fs.chtmpdir_context():
            debug_handler = _MAKE_HANDLER('testfile')
            content = 'Hello World, this is a test for the rotator.'

            phlsys_fs.write_text_file('testfile', content)
            debug_handler.rotator('testfile', 'dest.gz')

            with gzip.open('dest.gz', 'rb') as dest:
                # [ B] current log can be rotated to compressed one
                self.assertEqual(content, dest.read())
            # [ B] current log is deleted after rotation
            self.assertFalse(os.path.exists('testfile'))
 def test_C_exercise_wait_cycles(self):
     with setup_arcyd() as arcyd:
         # [ C] count_cycles is disabled by default
         self.assertFalse(arcyd._has_enabled_count_cycles)
         arcyd.enable_count_cycles_script()
         # [ C] count_cycles is enabled by enable_count_cycles_script
         self.assertTrue(arcyd._has_enabled_count_cycles)
         phlsys_fs.write_text_file(os.path.join(
             arcyd._root_dir, 'cycle_counter'), '2')
         with phlsys_fs.chdir_context(arcyd._root_dir):
             os.system("./count_cycles.sh")
         # [ C] correct number of cycles counted - 3 (2 + 1)
         self.assertEqual(3, arcyd.count_cycles())
 def test_C_exercise_wait_cycles(self):
     with setup_arcyd() as arcyd:
         # [ C] count_cycles is disabled by default
         self.assertFalse(arcyd._has_enabled_count_cycles)
         arcyd.enable_count_cycles_script()
         # [ C] count_cycles is enabled by enable_count_cycles_script
         self.assertTrue(arcyd._has_enabled_count_cycles)
         phlsys_fs.write_text_file(
             os.path.join(arcyd._root_dir, 'cycle_counter'), '2')
         with phlsys_fs.chdir_context(arcyd._root_dir):
             os.system("./count_cycles.sh")
         # [ C] correct number of cycles counted - 3 (2 + 1)
         self.assertEqual(3, arcyd.count_cycles())
    def test_B_rotation(self):
        with phlsys_fs.chtmpdir_context():
            debug_handler = _MAKE_HANDLER(
                'testfile')
            content = 'Hello World, this is a test for the rotator.'

            phlsys_fs.write_text_file('testfile', content)
            debug_handler.rotator('testfile', 'dest.gz')

            with gzip.open('dest.gz', 'rb') as dest:
                # [ B] current log can be rotated to compressed one
                self.assertEqual(content, dest.read())
            # [ B] current log is deleted after rotation
            self.assertFalse(os.path.exists('testfile'))
    def __init__(self, root_dir, arcyd_command):
        self._root_dir = root_dir
        self._command = _CommandWithWorkingDirectory(arcyd_command, root_dir)

        count_cycles_script_path = os.path.join(self._root_dir,
                                                'count_cycles.sh')
        phlsys_fs.write_text_file(count_cycles_script_path,
                                  _EXTERNAL_REPORT_COUNTER)
        mode = os.stat(count_cycles_script_path).st_mode
        os.chmod(count_cycles_script_path, mode | stat.S_IEXEC)

        self._has_enabled_count_cycles = False
        self._has_started_daemon = False
        self._has_set_overrun_secs = False
    def test_B_webserver(self):
        port = phlsys_web.pick_free_port()
        dirpath = tempfile.mkdtemp()
        filepath = os.path.join(dirpath, 'index.html')
        phlsys_fs.write_text_file(filepath, 'Hello World')
        # [ B] SimpleWebServer starts without any error
        server = phlsys_web.SimpleWebServer(dirpath, port)
        # Give subprocess enough time to start the server
        time.sleep(1)
        response = urllib2.urlopen("http://localhost:{}".format(port))
        # [ B] SimpleWebServer serves correct content
        self.assertEqual('Hello World', response.read())

        server.close()
Exemple #19
0
    def test_B_webserver(self):
        port = phlsys_web.pick_free_port()
        dirpath = tempfile.mkdtemp()
        filepath = os.path.join(dirpath, 'index.html')
        phlsys_fs.write_text_file(filepath, 'Hello World')
        # [ B] SimpleWebServer starts without any error
        server = phlsys_web.SimpleWebServer(dirpath, port)
        # Give subprocess enough time to start the server
        time.sleep(1)
        response = urllib2.urlopen("http://localhost:{}".format(port))
        # [ B] SimpleWebServer serves correct content
        self.assertEqual('Hello World', response.read())

        server.close()
    def __init__(self, root_dir, arcyd_command):
        self._root_dir = root_dir
        self._command = _CommandWithWorkingDirectory(arcyd_command, root_dir)

        count_cycles_script_path = os.path.join(
            self._root_dir, 'count_cycles.sh')
        phlsys_fs.write_text_file(
            count_cycles_script_path,
            _EXTERNAL_REPORT_COUNTER)
        mode = os.stat(count_cycles_script_path).st_mode
        os.chmod(count_cycles_script_path, mode | stat.S_IEXEC)

        self._has_enabled_count_cycles = False
        self._has_started_daemon = False
        self._has_set_overrun_secs = False
    def test_C_WorkaroundSpuriousEolDiff(self):
        badeol_filename = 'badeol_file'
        linked_workers = phlgitu_fixture.CentralisedWithTwoWorkers()
        with contextlib.closing(linked_workers):

            w0 = linked_workers.w0
            w1 = linked_workers.w1

            badeol_path = os.path.join(w0.repo.working_dir, badeol_filename)

            # commit a bad eol file
            w0.commit_new_file(
                message='add file, bad line endings',
                relative_path=badeol_filename,
                contents='windows line ending, whoops!\r\n')

            # enable eol conversion
            w0.commit_new_file(
                message='add .gitattributes, set eol to unix',
                relative_path='.gitattributes',
                contents='* eol=lf\n')

            # checkout onto a new branch to fix the line ending
            w0.repo("checkout", "-b", "fix_badeol")
            phlsys_fs.write_text_file(badeol_path, "good ending\n")

            w0.repo('commit', '-am', 'fix {}'.format(badeol_filename))

            # push both branches back to share with 'w1'
            w0.repo('push', 'origin', 'master:master', 'fix_badeol')

            w1.repo('fetch', 'origin')
            w1.repo('checkout', 'origin/master')

            def checkout_fix_badeol():
                w1.repo('checkout', 'origin/fix_badeol')

            # An error will be raised here, as the badeol file will appear to
            # have been modified.
            self.assertRaises(
                phlsys_subprocess.CalledProcessError,
                checkout_fix_badeol)

            # work around the problem, by ignoring the badeol setting
            phlgitx_ignoreattributes.ensure_repo_ignoring(w1.repo.working_dir)

            # try to checkout back to the branch with the fix
            checkout_fix_badeol()
    def test_B_WorkaroundSpuriousDiff(self):
        ident_filename = 'ident_file'
        linked_workers = phlgitu_fixture.CentralisedWithTwoWorkers()
        with contextlib.closing(linked_workers):

            w0 = linked_workers.w0
            w1 = linked_workers.w1

            ident_path = os.path.join(w0.repo.working_dir, ident_filename)

            # commit a pre-expanded file
            w0.commit_new_file(
                message='add ident, erroneously expanded already',
                relative_path=ident_filename,
                contents='$Id: already expanded, whoops! $')

            # enable ident expansion
            w0.commit_new_file(
                message='add .gitattributes, enable ident',
                relative_path='.gitattributes',
                contents='* ident\n')

            # checkout onto a new branch to fix the ident
            w0.repo("checkout", "-b", "fix_ident")
            phlsys_fs.write_text_file(ident_path, "$Id$")

            w0.repo('commit', '-am', 'fix {}'.format(ident_filename))

            # push both branches back to share with 'w1'
            w0.repo('push', 'origin', 'master:master', 'fix_ident')

            w1.repo('fetch', 'origin')
            w1.repo('checkout', 'origin/master')

            def checkout_fix_ident():
                w1.repo('checkout', 'origin/fix_ident')

            # An error will be raised here, as the ident file will appear to
            # have been modified.
            self.assertRaises(
                phlsys_subprocess.CalledProcessError,
                checkout_fix_ident)

            # work around the problem, by ignoring the ident setting
            phlgitx_ignoreident.ensure_repo_ignoring(w1.repo.working_dir)

            # try to checkout back to the branch with the fix
            checkout_fix_ident()
def ensure_repo_ignoring(repo_path):
    if is_repo_definitely_ignoring(repo_path):
        # nothing to do
        return

    repo_attributes_path = os.path.join(repo_path, _REPO_ATTRIBUTES_PATH)
    if not os.path.exists(repo_attributes_path):
        # create the file with required content
        phlsys_fs.write_text_file(
            repo_attributes_path,
            _REPO_ATTRIBUTES_CONTENT)
    else:
        # we won't try to do any sort of merging, just escalate
        raise Exception(
            "cannot ensure ignore ident in existing file: {}".format(
                repo_attributes_path))
    def _create_config(self, rel_path, content, message):
        """Create and commit the a new config file.

        :rel_path: the string relative path to the config file
        :content: the string contents of the new config file
        :message: the string commit message for the file

        """
        path = self._make_abspath(rel_path)

        if os.path.exists(path):
            raise Exception("config already exists")

        phlsys_fs.write_text_file(path, content)
        self._repo.call('add', rel_path)
        phlgit_commit.index(self._repo, message)
def stop_arcyd_pid(pid):
    killfile = 'var/command/killfile'
    phlsys_fs.write_text_file(killfile, '')

    if os.path.isfile(killfile):
        time.sleep(1)
        while os.path.isfile(killfile):
            print('waiting for arcyd to remove killfile ..')
            time.sleep(1)

    # wait for Arcyd to not be running
    if phlsys_pid.is_running(pid):
        time.sleep(1)
        while phlsys_pid.is_running(pid):
            print('waiting for arcyd to exit')
            time.sleep(1)
    def test_A_command_with_working_directory(self):
        working_dir = tempfile.mkdtemp()
        with phlsys_fs.chtmpdir_context():
            tmp_dir = os.getcwd()
            pycat_script_path = os.path.join(tmp_dir, 'pycat.sh')
            phlsys_fs.write_text_file(pycat_script_path, _PYCAT_COMMAND)
            mode = os.stat(pycat_script_path).st_mode
            os.chmod(pycat_script_path, mode | stat.S_IEXEC)

            self.assertEqual(os.getcwd(), tmp_dir)
            command = phlsys_workingdircommand.CommandWithWorkingDirectory(
                pycat_script_path, working_dir)
            result = command('Alice')
            # [ A] command is executed correctly
            self.assertEqual('Hello Alice!\n', result)
            # [ A] working directory is restored after command execution
            self.assertEqual(os.getcwd(), tmp_dir)
def ensure_repo_ignoring(repo_path):
    """Make sure the .gitattributes override is set up.

    Note that this function will perform clean checkout of all files
    in the working copy from the index so any non-staged changes will
    be lost.

    :repo_path: repository to set up

    """
    if is_repo_definitely_ignoring(repo_path):
        # nothing to do
        return

    repo = phlsys_git.Repo(repo_path)
    repo_attributes_path = os.path.join(repo_path, _REPO_ATTRIBUTES_PATH)

    # Files in our working copy might have been 'smudged' by some
    # filters. After repo-wide attributes override is written those
    # smudged files might be considered as 'modified' because
    # apropriate clean filter is no longer applied.
    #
    # To fix that side effect we need to rebuild the working copy
    # after the attributes are modified.

    # check that any existing file is compatible with the new contents we will
    # write, i.e. it is a subset of the new content
    if os.path.exists(repo_attributes_path):
        contents = phlsys_fs.read_text_file(repo_attributes_path)
        lines = contents.splitlines()
        for l in lines:
            stripped = l.strip()
            if stripped and stripped not in _REPO_ATTRIBUTES_TUPLE:
                # we won't try to do any sort of merging, just escalate
                raise Exception(
                    "cannot merge attributes in existing file: {}".format(
                        repo_attributes_path))

    # the file is exactly one of the existing attributes, we can merge
    # correctly by overwriting it with our superset of attributes
    phlsys_fs.write_text_file(
        repo_attributes_path,
        _REPO_ATTRIBUTES_CONTENT)

    # overwrite working copy with files from index
    repo("checkout-index", "-afqu")
    def test_A_command_with_working_directory(self):
        working_dir = tempfile.mkdtemp()
        with phlsys_fs.chtmpdir_context():
            tmp_dir = os.getcwd()
            pycat_script_path = os.path.join(tmp_dir, 'pycat.sh')
            phlsys_fs.write_text_file(pycat_script_path, _PYCAT_COMMAND)
            mode = os.stat(pycat_script_path).st_mode
            os.chmod(pycat_script_path, mode | stat.S_IEXEC)

            self.assertEqual(os.getcwd(), tmp_dir)
            command = phlsys_workingdircommand.CommandWithWorkingDirectory(
                pycat_script_path, working_dir)
            result = command('Alice')
            # [ A] command is executed correctly
            self.assertEqual('Hello Alice!\n', result)
            # [ A] working directory is restored after command execution
            self.assertEqual(os.getcwd(), tmp_dir)
def ensure_repo_ignoring(repo_path):
    """Make sure the .gitattributes override is set up.

    Note that this function will perform clean checkout of all files
    in the working copy from the index so any non-staged changes will
    be lost.

    :repo_path: repository to set up

    """
    if is_repo_definitely_ignoring(repo_path):
        # nothing to do
        return

    repo = phlsys_git.Repo(repo_path)
    repo_attributes_path = os.path.join(repo_path, _REPO_ATTRIBUTES_PATH)

    # Files in our working copy might have been 'smudged' by some
    # filters. After repo-wide attributes override is written those
    # smudged files might be considered as 'modified' because
    # apropriate clean filter is no longer applied.
    #
    # To fix that side effect we need to rebuild the working copy
    # after the attributes are modified.

    # check that any existing file is compatible with the new contents we will
    # write, i.e. it is a subset of the new content
    if os.path.exists(repo_attributes_path):
        contents = phlsys_fs.read_text_file(repo_attributes_path)
        lines = contents.splitlines()
        for l in lines:
            stripped = l.strip()
            if stripped and stripped not in _REPO_ATTRIBUTES_TUPLE:
                # we won't try to do any sort of merging, just escalate
                raise Exception(
                    "cannot merge attributes in existing file: {}".format(
                        repo_attributes_path))

    # the file is exactly one of the existing attributes, we can merge
    # correctly by overwriting it with our superset of attributes
    phlsys_fs.write_text_file(repo_attributes_path, _REPO_ATTRIBUTES_CONTENT)

    # overwrite working copy with files from index
    repo("checkout-index", "-afqu")
    def _create_config(self, rel_path, content, message):
        """Create and commit a new config file.

        :rel_path: the string relative path to the config file
        :content: the string contents of the new config file
        :message: the string commit message for the file

        """
        if phlgit_diffindex.is_index_dirty(self._repo):
            raise Error("git index has staged changes")

        path = self._make_abspath(rel_path)

        if os.path.exists(path):
            raise Error("config already exists")

        phlsys_fs.write_text_file(path, content)
        self._repo('add', rel_path)
        phlgit_commit.index(self._repo, message)
Exemple #31
0
    def _create_config(self, rel_path, content, message):
        """Create and commit a new config file.

        :rel_path: the string relative path to the config file
        :content: the string contents of the new config file
        :message: the string commit message for the file

        """
        if phlgit_diffindex.is_index_dirty(self._repo):
            raise Error("git index has staged changes")

        path = self._make_abspath(rel_path)

        if os.path.exists(path):
            raise Error("config already exists")

        phlsys_fs.write_text_file(path, content)
        self._repo('add', rel_path)
        phlgit_commit.index(self._repo, message)
Exemple #32
0
    def __init__(self, root_dir, barc_cmd_path, arcyon_cmd_path, phab_uri,
                 alice, bob):

        self._root_dir = root_dir
        self.central_path = os.path.join(self._root_dir, 'central')
        os.makedirs(self.central_path)
        self._central_repo = phlsys_git.Repo(self.central_path)
        self._central_repo("init", "--bare")
        self.web_port = phlsys_web.pick_free_port()
        shutil.move(
            os.path.join(self.central_path, 'hooks/post-update.sample'),
            os.path.join(self.central_path, 'hooks/post-update'))

        self._command_hold_path = os.path.join(self.central_path,
                                               'command/hold_dev_arcyd_refs')

        pre_receive_path = os.path.join(self.central_path, 'hooks/pre-receive')
        phlsys_fs.write_text_file(pre_receive_path,
                                  _PRE_RECEIVE_HOLD_DEV_ARCYD_REFS)
        mode = os.stat(pre_receive_path).st_mode
        os.chmod(pre_receive_path, mode | stat.S_IEXEC)

        self._web = phlsys_web.SimpleWebServer(self.central_path,
                                               self.web_port)

        self._workers = []
        for account in (alice, bob):
            account_user = account[0]
            account_email = account[1]
            account_cert = account[2]
            worker_path = os.path.join(self._root_dir, account_user)
            os.makedirs(worker_path)
            self._workers.append(
                atet_worker.Worker(phlsys_git.Repo(worker_path), worker_path,
                                   barc_cmd_path, account_user, account_email,
                                   account_cert, arcyon_cmd_path, phab_uri))
            self._workers[-1].setup(self._central_repo.working_dir)

            if len(self._workers) == 1:
                self._workers[0].push_initial_commit()
            else:
                self._workers[-1].repo('checkout', 'master')
def ensure_repo_ignoring(repo_path):
    if is_repo_definitely_ignoring(repo_path):
        # nothing to do
        return

    repo_attributes_path = os.path.join(repo_path, _REPO_ATTRIBUTES_PATH)

    # check that any existing file is compatible with the new contents we will
    # write, i.e. it is a subset of the new content
    if os.path.exists(repo_attributes_path):
        contents = phlsys_fs.read_text_file(repo_attributes_path)
        lines = contents.splitlines()
        for l in lines:
            stripped = l.strip()
            if stripped and stripped not in _REPO_ATTRIBUTES_TUPLE:
                # we won't try to do any sort of merging, just escalate
                raise Exception(
                    "cannot merge attributes in existing file: {}".format(
                        repo_attributes_path))

    # the file is exactly one of the existing attributes, we can merge
    # correctly by overwriting it with our superset of attributes
    phlsys_fs.write_text_file(repo_attributes_path, _REPO_ATTRIBUTES_CONTENT)
def ensure_repo_ignoring(repo_path):
    if is_repo_definitely_ignoring(repo_path):
        # nothing to do
        return

    repo_attributes_path = os.path.join(repo_path, _REPO_ATTRIBUTES_PATH)
    if not os.path.exists(repo_attributes_path):
        # create the file with required content
        phlsys_fs.write_text_file(
            repo_attributes_path,
            _REPO_ATTRIBUTES_CONTENT)
    else:
        contents = phlsys_fs.read_text_file(repo_attributes_path)
        if contents in _REPO_ATTRIBUTES_TUPLE:
            # the file is exactly one of the existing attributes, we can merge
            # correctly by overwriting it with our superset of attributes
            phlsys_fs.write_text_file(
                repo_attributes_path,
                _REPO_ATTRIBUTES_CONTENT)
        else:
            # we won't try to do any sort of merging, just escalate
            raise Exception(
                "cannot ensure ignore attributes in existing file: {}".format(
                    repo_attributes_path))
    def test_D_UpdateInfoAttributes(self):

        all_attributes = list(phlgitx_ignoreattributes._REPO_ATTRIBUTES_TUPLE)
        all_attributes.append("")

        all_lines = itertools.combinations(
            all_attributes,
            len(all_attributes) - 1)

        for lines in all_lines:

            content = "\n".join(lines)
            print(content)
            print("---")
            with phlgitu_fixture.lone_worker_context() as worker:

                working_dir = worker.repo.working_dir
                attributes_path = os.path.join(
                    working_dir, '.git/info/attributes')
                phlsys_fs.write_text_file(attributes_path, content)

                # should not throw
                phlgitx_ignoreattributes.ensure_repo_ignoring(
                    worker.repo.working_dir)
 def git_fetch_counter(self, *args, **kwargs):
     phlsys_fs.write_text_file("/tmp/1", self.working_dir)
     fetch_counter_path = os.path.join(self.working_dir, ".git", "fetch_counter")
     if args and args[0] == "fetch":
         if not os.path.exists(fetch_counter_path):
             phlsys_fs.write_text_file(fetch_counter_path, "1")
         else:
             old_count = phlsys_fs.read_text_file(fetch_counter_path)
             new_count = str(int(old_count) + 1)
             phlsys_fs.write_text_file(fetch_counter_path, new_count)
     return old_call(self, *args, **kwargs)
 def git_fetch_counter(self, *args, **kwargs):
     phlsys_fs.write_text_file("/tmp/1", self.working_dir)
     fetch_counter_path = os.path.join(self.working_dir, '.git',
                                       'fetch_counter')
     if args and args[0] == 'fetch':
         if not os.path.exists(fetch_counter_path):
             phlsys_fs.write_text_file(fetch_counter_path, '1')
         else:
             old_count = phlsys_fs.read_text_file(fetch_counter_path)
             new_count = str(int(old_count) + 1)
             phlsys_fs.write_text_file(fetch_counter_path, new_count)
     return old_call(self, *args, **kwargs)
Exemple #38
0
 def hold_dev_arcyd_refs(self):
     phlsys_fs.write_text_file(self._command_hold_path,
                               _PRE_RECEIVE_HOLD_DEV_ARCYD_REFS)
def initialise_here():
    """Return a new default Accessor after initialising the current directory.

    :returns: a new Accessor, mounted at the current directory

    """
    layout = Layout()

    phlsys_subprocess.run('git', 'init')
    repo = phlsys_git.Repo('.')

    # create filesystem hierarchy
    phlsys_fs.write_text_file(layout.arcydroot, 'this dir is an arcydroot')
    phlsys_fs.write_text_file('README', _README)
    phlsys_fs.write_text_file('var/README', _VAR_README)
    phlsys_fs.write_text_file('var/repo/README', _VAR_REPO_README)
    phlsys_fs.write_text_file('var/log/README', _VAR_LOG_README)
    phlsys_fs.write_text_file('var/status/README', _VAR_STATUS_README)
    phlsys_fs.write_text_file('var/command/README', _VAR_COMMAND_README)
    phlsys_fs.write_text_file('var/run/README', _VAR_RUN_README)

    repo.call('add', '.')
    phlsys_fs.write_text_file('.gitignore', 'var\n')
    repo.call('add', '.')
    phlgit_commit.index(repo, 'Initialised new Arcyd instance')

    return Accessor(Layout(), '.')
Exemple #40
0
def initialise_here():
    """Return a new default Accessor after initialising the current directory.

    :returns: a new Accessor, mounted at the current directory

    """
    layout = Layout()

    phlsys_subprocess.run('git', 'init')
    repo = phlsys_git.Repo('.')

    # create filesystem hierarchy
    phlsys_fs.write_text_file(layout.arcydroot, 'this dir is an arcydroot')
    phlsys_fs.write_text_file('README', _README)
    phlsys_fs.write_text_file('config/README', _CONFIG_README)
    phlsys_fs.write_text_file(
        'config/phabricator/README', _CONFIG_PHABRICATOR_README)
    phlsys_fs.write_text_file(
        'config/repository/README', _CONFIG_REPOSITORY_README)
    phlsys_fs.write_text_file('var/README', _VAR_README)
    phlsys_fs.write_text_file('var/repo/README', _VAR_REPO_README)
    phlsys_fs.write_text_file('var/log/README', _VAR_LOG_README)
    phlsys_fs.write_text_file('var/status/README', _VAR_STATUS_README)
    phlsys_fs.write_text_file('var/command/README', _VAR_COMMAND_README)
    phlsys_fs.write_text_file('var/run/README', _VAR_RUN_README)

    repo('add', '.')
    phlsys_fs.write_text_file('.gitignore', 'var\n')
    repo('add', '.')
    phlgit_commit.index(repo, 'Initialised new Arcyd instance')

    return Accessor(Layout(), '.')
 def hold_dev_arcyd_refs(self):
     phlsys_fs.write_text_file(
         self._command_hold_path,
         _PRE_RECEIVE_HOLD_DEV_ARCYD_REFS)