Esempio n. 1
0
 def test_ensure_bag_path_exists(self):
     logger.info(self.getTestHeader('ensure bag path exists, save existing'))
     try:
         saved_bag_path = bdb.ensure_bag_path_exists(self.test_bag_dir)
         self.assertTrue(ospe(self.test_bag_dir), "Bag directory does not exist")
         self.assertTrue(ospe(saved_bag_path), "Saved bag path does not exist")
     except Exception as e:
         self.fail(get_typed_exception(e))
Esempio n. 2
0
 def test_extract_bag_archive_zip_with_relocate_existing(self):
     logger.info(self.getTestHeader('extract bag zip format, relocate existing'))
     try:
         bag_path = bdb.extract_bag(ospj(self.test_archive_dir, 'test-bag.zip'), temp=False)
         self.assertTrue(ospe(bag_path))
         self.assertTrue(bdb.is_bag(bag_path))
         bag_path = bdb.extract_bag(ospj(self.test_archive_dir, 'test-bag.zip'), temp=False)
         self.assertTrue(ospe(bag_path))
         self.assertTrue(bdb.is_bag(bag_path))
         bdb.cleanup_bag(os.path.dirname(bag_path))
         output = self.stream.getvalue()
         self.assertExpectedMessages(["moving existing directory"], output)
     except Exception as e:
         self.fail(get_typed_exception(e))
Esempio n. 3
0
 def test_revert_bag(self):
     logger.info(self.getTestHeader('revert bag'))
     try:
         bdb.revert_bag(self.test_bag_dir)
         self.assertFalse(ospif(ospj(self.test_bag_dir, 'bag-info.txt')))
         self.assertFalse(ospif(ospj(self.test_bag_dir, 'bagit.txt')))
         self.assertFalse(
             ospif(ospj(self.test_bag_dir, 'manifest-sha1.txt')))
         self.assertFalse(ospif(ospj(self.test_bag_dir,
                                     'manifest-md5.txt')))
         self.assertFalse(
             ospif(ospj(self.test_bag_dir, 'manifest-sha1.txt')))
         self.assertFalse(
             ospif(ospj(self.test_bag_dir, 'manifest-sha256.txt')))
         self.assertFalse(
             ospif(ospj(self.test_bag_dir, 'manifest-sha512.txt')))
         self.assertFalse(
             ospif(ospj(self.test_bag_dir, 'tagmanifest-md5.txt')))
         self.assertFalse(
             ospif(ospj(self.test_bag_dir, 'tagmanifest-sha1.txt')))
         self.assertFalse(
             ospif(ospj(self.test_bag_dir, 'tagmanifest-sha256.txt')))
         self.assertFalse(
             ospif(ospj(self.test_bag_dir, 'tagmanifest-sha512.txt')))
         self.assertTrue(ospif(ospj(self.test_bag_dir, 'README.txt')))
         self.assertTrue(
             ospif(ospj(self.test_bag_dir, ospj('test1', 'test1.txt'))))
         self.assertTrue(
             ospif(ospj(self.test_bag_dir, ospj('test2', 'test2.txt'))))
         self.assertFalse(ospe(ospj(self.test_bag_dir, 'data')))
     except Exception as e:
         self.fail(bdbag.get_named_exception(e))
Esempio n. 4
0
 def test_ensure_bag_path_exists_delete_existing(self):
     logger.info(self.getTestHeader('ensure bag path exists, delete existing'))
     try:
         bdb.ensure_bag_path_exists(self.test_bag_dir, save=False)
         self.assertTrue(ospe(self.test_bag_dir), "Bag directory does not exist")
     except Exception as e:
         self.fail(get_typed_exception(e))
Esempio n. 5
0
 def test_cleanup_bag(self):
     logger.info(self.getTestHeader('cleanup bag'))
     try:
         bdb.cleanup_bag(self.test_bag_dir)
         self.assertFalse(ospe(self.test_bag_dir), "Failed to cleanup bag directory")
     except Exception as e:
         self.fail(get_typed_exception(e))
Esempio n. 6
0
 def test_extract_bag_archive_tar(self):
     logger.info(self.getTestHeader('extract bag tar format'))
     try:
         bag_path = bdb.extract_bag(ospj(self.test_archive_dir, 'test-bag.tar'), temp=True)
         self.assertTrue(ospe(bag_path))
         self.assertTrue(bdb.is_bag(bag_path))
         bdb.cleanup_bag(os.path.dirname(bag_path))
     except Exception as e:
         self.fail(get_typed_exception(e))
Esempio n. 7
0
 def test_extract_bag_archive_tar(self):
     logger.info(self.getTestHeader('extract bag tar format'))
     try:
         bag_path = bdb.extract_bag(ospj(self.test_archive_dir, 'test-bag.tar'), temp=True)
         self.assertTrue(ospe(bag_path))
         self.assertTrue(bdb.is_bag(bag_path))
         bdb.cleanup_bag(os.path.dirname(bag_path))
     except Exception as e:
         self.fail(bdbag.get_named_exception(e))
def merge_settings_file(fpath, D, notes):
    if not ospe(fpath):
        return
    notes[os.path.split(fpath)[1]] = fpath
    config = ConfigParser.RawConfigParser()
    config.readfp(codecs.open(fpath, 'r', 'utf-8'))
    for s in config.sections():
        D[s] = D.get(s, {})
        for o in config.options(s):
            D[s][o] = config.get(s, o)
def setup(app):
    """Sphinx extension entry point."""
    app.require_sphinx('1.8')  # For "config-inited" event

    buildsettings_jsonfile = ospj(app.confdir, 'buildsettings.json')
    if ospe(buildsettings_jsonfile):
        # just collect knowledge
        wd['buildsettings_jsonfile'] = buildsettings_jsonfile
        with io.open(buildsettings_jsonfile, 'r', encoding='utf-8') as f1:
            wd['buildsettings'] = json.load(f1)
        log.info("[%s] app.confdir/buildsettings.json :: found" % (__name__,))
    else:
        log.info("[%s] app.confdir/buildsettings.json :: not found" % (__name__,))

    gitloginfo_jsonfile = ospj(app.confdir, 'gitloginfo.json')
    if ospe(gitloginfo_jsonfile):
        # just collect knowledge
        wd['gitloginfo_jsonfile'] = gitloginfo_jsonfile
        with io.open(gitloginfo_jsonfile, 'r', encoding='utf-8') as f1:
            wd['gitloginfo'] = json.load(f1)
        log.info("[%s] app.confdir/gitloginfo.json :: found" % (__name__,))

        wd['filedata'] = wd['gitloginfo'].get('filedata', {})
        wd['project_offset'] = (wd['gitloginfo']['abspath_to_project']
                                [len(wd['gitloginfo']['abspath_to_repo']):]
                                .strip('/'))
        wd['t3docdir'] = wd.get('buildsettings', {}).get('t3docdir',
                                                         'Documentation')
    else:
        log.info("[%s] app.confdir/gitloginfo.json :: not found" % (__name__,))
    if wd.get('filedata'):
        # only connect if there is something to do
        app.connect('html-page-context', _html_page_context)
        app.connect('config-inited', _config_inited)
        log.info("[%s] filedata found" % (__name__,))
    else:
        log.info("[%s] filedata not found" % (__name__,))

    return {
        'parallel_read_safe': True,
        'parallel_write_safe': True,
        'version': __version__,
    }
Esempio n. 10
0
def makehtml():
    for adir in dirs_to_create:
        if not ospe(adir):
            os.makedirs(adir)
    cmdlist = shlex.split(makehtml_cmd)
    with open(cmd_fpath, 'w') as f2:
        f2.write(' \\\n'.join(cmdlist))
        f2.write('\n')
    with open(stdout_fpath, 'wb') as fd_stdout, open(stderr_fpath,
                                                     'wb') as fd_stderr:
        exitcode = call(cmdlist,
                        stdout=fd_stdout,
                        stderr=fd_stderr,
                        shell=False,
                        cwd=cwd)
    return exitcode
Esempio n. 11
0
def prelines_of_file(fpath):
    prelines = []
    atext = "Showing file: '%s'" % fpath
    if ospe(fpath):
        prelines.append(_quote_html(atext + ' - and yes, it does exist.'))
        prelines.append('')
        prelines.append(
            _quote_html('=========================================='))
        prelines.append(_quote_html(fpath.split('/')[-1]))
        prelines.append(
            _quote_html('=============== File START ==============='))
        with codecs.open(fpath, 'r', 'utf-8', 'replace') as f1:
            for aline in f1:
                prelines.append(_quote_html(aline.rstrip()))
        prelines.append(
            _quote_html('=============== File E N D ==============='))
    else:
        prelines.append(_quote_html(atext + ' - but no, it does not exist.'))
    return prelines
Esempio n. 12
0
    def do_GET(self):
        GD['request_counter'] += 1
        msglines = []
        prelines = []
        msglines.append('request: %s' % GD['request_counter'])
        parsed_path = urlparse.urlparse(self.path)
        msglines.append('%s' % parsed_path.path)
        http_response_code = 200
        if parsed_path.path == '/makehtml_no_cache':
            exitcode = call(
                shlex.split('rm -rf /RESULT/Result /RESULT/Cache ' + logdir))
            if exitcode:
                http_response_code = 503
                server.shutdown()
                sys.exit(1)
            atime = time.time()
            exitcode = makehtml()
            btime = time.time()
            duration = '%3.3f' % round(btime - atime, 3)
            if exitcode:
                # Service Unavailable
                http_response_code = 503
                msglines.append(
                    '%s, %s, exitcode: %s, took: %s seconds' %
                    (http_response_code, 'failed', exitcode, duration))
            else:
                msglines.append(
                    '%s, %s, exitcode: %s, took: %s seconds' %
                    (http_response_code, 'succeeded', exitcode, duration))
            prelines = prelines_of_file(warnings_fpath)

        elif parsed_path.path == '/makehtml':
            for afile in files_to_empty_at_start:
                if ospe(afile):
                    with open(afile, 'wb') as f2:
                        pass
            atime = time.time()
            exitcode = makehtml()
            btime = time.time()
            duration = '%3.3f' % round(btime - atime, 3)
            if exitcode:
                http_response_code = 503
                msglines.append(
                    '%s, %s, exitcode: %s, took: %s seconds' %
                    (http_response_code, 'failed', exitcode, duration))
            else:
                msglines.append(
                    '%s, %s, exitcode: %s, took: %s seconds' %
                    (http_response_code, 'succeeded', exitcode, duration))
            prelines = prelines_of_file(warnings_fpath)

        elif parsed_path.path == '/show_cmd':
            prelines = prelines_of_file(cmd_fpath)

        elif parsed_path.path == '/show_stdout':
            prelines = prelines_of_file(stdout_fpath)

        elif parsed_path.path == '/show_stderr':
            prelines = prelines_of_file(stderr_fpath)

        elif parsed_path.path == '/show_warnings':
            prelines = prelines_of_file(warnings_fpath)

        elif parsed_path.path == '/shutdown':
            server.shutdown()

        else:
            http_response_code = 501
            msglines.append(
                '%s, %s' %
                (http_response_code, 'unknown action, showing debug info'))
            prelines = [
                'CLIENT VALUES:',
                'client_address=%s (%s)' %
                (self.client_address, self.address_string()),
                'command=%s' % self.command,
                'path=%s' % self.path,
                'real path=%s' % parsed_path.path,
                'query=%s' % parsed_path.query,
                'request_version=%s' % self.request_version,
                '',
                'SERVER VALUES:',
                'server_version=%s' % self.server_version,
                'sys_version=%s' % self.sys_version,
                'protocol_version=%s' % self.protocol_version,
                'thread=%s' % threading.currentThread().getName(),
                '',
                'HEADERS RECEIVED:',
            ]
            for name, value in sorted(self.headers.items()):
                prelines.append('%s=%s' % (name, value.rstrip()))
            prelines.append('')
            prelines = [_quote_html(aline.rstrip()) for aline in prelines]

        self.send_response(http_response_code)
        self.send_header('content-type', 'text/html')
        self.end_headers()

        host = self.headers['host']
        links = []
        links.append('<a href="http://%s/makehtml">/makehtml</a>' % host)
        links.append(
            '<a href="http://%s/makehtml_no_cache">makehtml_no_cache</a>' %
            host)
        links.append('<a href="http://%s/show_warnings">/show_warnings</a>' %
                     host)
        links.append('<a href="http://%s/show_stderr">/show_stderr</a>' % host)
        links.append('<a href="http://%s/show_stdout">/show_stdout</a>' % host)
        links.append('<a href="http://%s/show_cmd">/show_cmd</a>' % host)
        links.append('<a href="http://%s/debug_info">/debug_info</a>' % host)
        links.append('<a href="http://%s/shutdown">/shutdown</a>' % host)
        msglines.append('<br>| ' + ' | '.join(links) + ' |<br>')

        message2 = ''
        if prelines:
            prelines.insert(0, '<pre>')
            prelines.append('</pre>')
            prelines = ['%s\r\n' % aline for aline in prelines]

        message = '<br>\r\n'.join(msglines)
        message2 = ''.join(prelines)
        self.wfile.write(message + message2)
        return
if not 'epub_author' in G: epub_author = t3author
if not 'epub_copyright' in G: epub_copyright = copyright
if not 'epub_publisher' in G: epub_publisher = t3author
if not 'epub_title' in G: epub_title = project

if not 'htmlhelp_basename' in G: htmlhelp_basename = t3shortname
#
#
#
#
# (7) Settings from Settings.json
#
# Now: Everything can be overriden. Use at you own risk!
#
if ospe(settingsjsonabspath):
    with codecs.open(settingsjsonabspath, 'r', 'utf-8') as f1:
        D = json.load(f1)
    # ATTENTION:
    # everything you have in the "general": {k:v} section of Settings.json
    # is treated as if you had written 'k = v' here in conf.py
    globals().update(D.get('general', {}))

    # extensions are now ADDED
    for e in D.get('extensions', []):
        if not e in extensions:
            extensions.append(e)

    # extlinks are merged (added or updated)
    extlinks.update(D.get('extlinks', {}))
Esempio n. 14
0
def rebuild():
    if debug:
        print('rebuilding...')

    jobfile_data = {}
    jobfile_option = ''
    for f1path in [
            'Documentation/jobfile.json',
            'Documentation/jobfile-NOT_VERSIONED.json'
    ]:
        if os.path.exists(f1path):
            jobfile_option = ' -c jobfile /PROJECT/' + f1path
            with open(f1path, 'rb') as f1:
                jobfile_data = json.load(f1, encoding='utf-8')
            break
    action = jobfile_data.get('dockrun_t3rd', {}).get('action',
                                                      'makehtml-no-cache')
    jobfile_option = jobfile_data.get('dockrun_t3rd',
                                      {}).get('jobfile_option', jobfile_option)
    final_commands = shell_commands + f"dockrun_t3rd {action} {jobfile_option}\n"

    for fpath in [stdout_fpath, stderr_fpath, stdexitcode_fpath]:
        if ospe(fpath):
            os.remove(fpath)
    # cp means: completedProcess
    cp = run(['/bin/zsh'],
             cwd='.',
             stdout=PIPE,
             stderr=PIPE,
             input=final_commands,
             encoding='utf-8',
             errors='replace')
    if ospe('Documentation-GENERATED-temp'):
        if cp.stdout:
            with open(stdout_fpath, 'w', encoding='utf-8') as f2:
                print(cp.stdout, file=f2)
        if cp.stderr:
            with open(stderr_fpath, 'w', encoding='utf-8') as f2:
                print(cp.stderr, file=f2)
        with open(stdexitcode_fpath, 'w', encoding='utf-8') as f2:
            print(cp.returncode, file=f2)
    w00path = 'Documentation-GENERATED-temp/Result/project/0.0.0/_buildinfo/warnings.txt'
    if 0 and ospe(w00path):
        w01path = w00path[:-4] + '-01.txt'
        cp00 = runargs([
            '/bin/grep', '-E', '-e',
            "'WARNING.+class.+reference target not found'", w00path
        ])
        print(cp00)
        sys.exit()
        if cp00.stdout:
            with open(w01path, 'w', encoding='utf-8') as f2:
                print(cp.stdout, file=f2)
    if 0 and ospe(w01path):
        w02path = w00path[:-4] + '-02.txt'
        cp = runzsh('grep -E -e toctree ' + w01path)
        print(cp)
        if cp.stdout:
            with open(w02path, 'w', encoding='utf-8') as f2:
                print(cp.stdout, file=f2)

    return cp