示例#1
0
 def pathoc(
     self,
     specs,
     timeout=None,
     connect_to=None,
     ssl=None,
     ws_read_limit=None,
     use_http2=False,
 ):
     """
         Returns a (messages, text log) tuple.
     """
     if ssl is None:
         ssl = self.ssl
     logfp = StringIO()
     c = pathoc.Pathoc(
         ("localhost", self.d.port),
         ssl=ssl,
         ws_read_limit=ws_read_limit,
         timeout=timeout,
         fp=logfp,
         use_http2=use_http2,
     )
     with c.connect(connect_to):
         ret = []
         for i in specs:
             resp = c.request(i)
             if resp:
                 ret.append(resp)
         for frm in c.wait():
             ret.append(frm)
         c.stop()
         return ret, logfp.getvalue()
 def test_no_historical(self):
     out = StringIO()
     with replace_registry():
         management.call_command(self.command_name, auto=True,
                                 stdout=out)
     self.assertIn(populate_history.Command.NO_REGISTERED_MODELS,
                   out.getvalue())
示例#3
0
def test_push(empty_db,newstatus):
    db = empty_db
    input = remove_email(inv_pre) if db.member_class else inv_pre
    db.import_invitations(input,allfields=True,format='json')
    input = copy.deepcopy(inv_down)
    input['data'] = input['data'][2:]
    input = StringIO(json.dumps(input))
    output = StringIO()
    # ignore registering
    msg = dict(format='member',version=(1,0),status='',uuid='uid06')
    for status in ('registering','new'):
        msg['status'] = status
        assert not InvitationDatabase.process_update(db,msg,input,output)
        assert not output.getvalue()
    # registered leads to sync
    msg['status'] = newstatus
    assert InvitationDatabase.process_update(db,msg,input,output)
    output = json.loads(output.getvalue())
    assert output
    up = copy.deepcopy(inv_up)
    up['data'] = up['data'][2:6]
    output['data'] = sort_by_uuid(output['data'])
    assert output == up
    # not again
    assert not db.process_update(msg,input,output)
示例#4
0
文件: util.py 项目: gyenney/Tools
class _MiniPPrinter(object):
    def __init__(self):
        self._out = StringIO()
        self.indentation = 0

    def text(self, text):
        self._out.write(text)

    def breakable(self, sep=" "):
        self._out.write(sep)

    def begin_group(self, _, text):
        self.text(text)

    def end_group(self, _, text):
        self.text(text)

    def pretty(self, obj):
        if hasattr(obj, "_repr_pretty_"):
            obj._repr_pretty_(self, False)
        else:
            self.text(repr(obj))

    def getvalue(self):
        return self._out.getvalue()
示例#5
0
    def test_item_not_in_defaults(self):

        _stdout = StringIO()

        class RootController(object):
            @expose()
            def index(self):
                return 'Hello, World!'

        app = TestApp(
            make_app(
                RootController(),
                hooks=lambda: [
                    RequestViewerHook(
                        config={'items': ['date']}, writer=_stdout
                    )
                ]
            )
        )
        response = app.get('/')

        out = _stdout.getvalue()

        assert response.status_int == 200
        assert response.body == b_('Hello, World!')
        assert 'date' in out
        assert 'method' not in out
        assert 'status' not in out
        assert 'method' not in out
        assert 'params' not in out
        assert 'hooks' not in out
        assert '200 OK' not in out
        assert "['RequestViewerHook']" not in out
        assert '/' not in out
    def test_debug_print_option_totals_color(self):

        prob = Problem()
        prob.model = FanInGrouped()
        prob.model.linear_solver = LinearBlockGS()
        prob.model.sub.linear_solver = LinearBlockGS()

        prob.model.add_design_var('iv.x1', parallel_deriv_color='par_dv')
        prob.model.add_design_var('iv.x2', parallel_deriv_color='par_dv')
        prob.model.add_design_var('iv.x3')
        prob.model.add_objective('c3.y')

        prob.driver.options['debug_print'] = ['totals']

        prob.setup(check=False, mode='fwd')
        prob.set_solver_print(level=0)
        prob.run_driver()

        indep_list = ['iv.x1', 'iv.x2', 'iv.x3']
        unknown_list = ['c3.y']

        stdout = sys.stdout
        strout = StringIO()
        sys.stdout = strout
        try:
            _ = prob.compute_totals(unknown_list, indep_list, return_format='flat_dict',
                                    debug_print=not prob.comm.rank)
        finally:
            sys.stdout = stdout

        output = strout.getvalue()

        if not prob.comm.rank:
            self.assertTrue('Solving color: par_dv (iv.x1, iv.x2)' in output)
            self.assertTrue('Solving variable: iv.x3' in output)
示例#7
0
    def test_bad_response_from_app(self):
        """When exceptions are raised the hook deals with them properly"""

        _stdout = StringIO()

        class RootController(object):
            @expose()
            def index(self):
                return 'Hello, World!'

        app = TestApp(
            make_app(
                RootController(), hooks=lambda: [
                    RequestViewerHook(writer=_stdout)
                ]
            )
        )
        response = app.get('/404', expect_errors=True)

        out = _stdout.getvalue()

        assert response.status_int == 404
        assert 'path' in out
        assert 'method' in out
        assert 'status' in out
        assert 'method' in out
        assert 'params' in out
        assert 'hooks' in out
        assert '404 Not Found' in out
        assert "['RequestViewerHook']" in out
        assert '/' in out
示例#8
0
    def test_single_blacklist_item(self):

        _stdout = StringIO()

        class RootController(object):
            @expose()
            def index(self):
                return 'Hello, World!'

        app = TestApp(
            make_app(
                RootController(),
                hooks=lambda: [
                    RequestViewerHook(
                        config={'blacklist': ['/']}, writer=_stdout
                    )
                ]
            )
        )
        response = app.get('/')

        out = _stdout.getvalue()

        assert response.status_int == 200
        assert response.body == b_('Hello, World!')
        assert out == ''
示例#9
0
 def assemble_csv(self, queryset):
     headings = [
         'comment',
         'currently_own',
         'expect_to_buy',
         'email',
         'is_helpful',
         'page',
         'referrer',
         'submitted_on',
         'language'
     ]
     csvfile = StringIO()
     writer = csv.writer(csvfile, quoting=csv.QUOTE_ALL)
     writer.writerow([field for field in headings])
     for feedback in queryset:
         feedback.submitted_on = "{}".format(feedback.submitted_on.date())
         feedback.comment = feedback.comment.encode('utf-8')
         if feedback.referrer is not None:
             feedback.referrer = feedback.referrer.encode('utf-8')
         writer.writerow(
             ["{}".format(getattr(feedback, heading))
              for heading in headings]
         )
     return csvfile.getvalue()
示例#10
0
    def test_basic_single_default_hook(self):

        _stdout = StringIO()

        class RootController(object):
            @expose()
            def index(self):
                return 'Hello, World!'

        app = TestApp(
            make_app(
                RootController(), hooks=lambda: [
                    RequestViewerHook(writer=_stdout)
                ]
            )
        )
        response = app.get('/')

        out = _stdout.getvalue()

        assert response.status_int == 200
        assert response.body == b_('Hello, World!')
        assert 'path' in out
        assert 'method' in out
        assert 'status' in out
        assert 'method' in out
        assert 'params' in out
        assert 'hooks' in out
        assert '200 OK' in out
        assert "['RequestViewerHook']" in out
        assert '/' in out
示例#11
0
    def test_write_json(self):
        n = Namespace(doc='top')
        n.add_option(
            'aaa',
            '2011-05-04T15:10:00',
            'the a',
            short_form='a',
            from_string_converter=datetime_from_ISO_string
        )

        c = ConfigurationManager(
            [n],
            use_admin_controls=True,
            use_auto_help=False,
            argv_source=[]
        )

        out = StringIO()
        c.write_conf(for_json, opener=stringIO_context_wrapper(out))
        received = out.getvalue()
        out.close()
        jrec = json.loads(received)

        expect_to_find = {
            "short_form": "a",
            "default": "2011-05-04T15:10:00",
            "doc": "the a",
            "value": "2011-05-04T15:10:00",
            "from_string_converter":
            "configman.datetime_util.datetime_from_ISO_string",
            "name": "aaa"
        }
        for key, value in expect_to_find.items():
            self.assertEqual(jrec['aaa'][key], value)
示例#12
0
 def __str__(self):
     from six.moves import cStringIO as StringIO
     temp_file = StringIO()
     for order in self:
         temp_file.write("%s\n" % str(order))
     #temp_file.write("%s\n" % str(self.head_order))
     return temp_file.getvalue()
示例#13
0
    def get_input(self, filepath, buffer):
        """Parse received input options. If buffer is not false (=='f') if
        gets input data from input buffer othewise opens file specified in
        sourcefilename,

        Args:
            filepath (str): path to the file to read from to retrieve data
            buffer (str): if == 't' reads data from input buffer

        Returns:
            string read from filepath/buffer
        """

        if buffer != 'f':
            filepath = StringIO(sys.stdin.read())
        elif filepath is None:
            msg = "No Input! Please specify --source_filename or --buffer t"
            raise IOError(msg)
        else:
            if filepath.lower().startswith('http'):
                # Create a request for the given URL.
                request = urllib2.Request(filepath)
                data = get_data_from_url(request)
                self.last_byte = len(data)

            else:
                filepath = open(filepath, 'r').read()
                self.last_byte = len(filepath)
                filepath = StringIO(filepath)

        source = pd.read_csv(filepath)
        return source
示例#14
0
文件: debug.py 项目: adamchainz/pecan
        def __call__(self, environ, start_response):
            try:
                return self.app(environ, start_response)
            except Exception as exc:
                # get a formatted exception
                out = StringIO()
                print_exc(file=out)
                LOG.exception(exc)

                # get formatted WSGI environment
                formatted_environ = pformat(environ)

                # render our template
                result = debug_template.render(
                    traceback=out.getvalue(),
                    environment=formatted_environ
                )

                # construct and return our response
                response = Response()
                if isinstance(exc, HTTPException):
                    response.status_int = exc.status
                else:
                    response.status_int = 500
                response.unicode_body = result
                return response(environ, start_response)
示例#15
0
def make_table(items):
    if isinstance(items, dict):
        items = items.items()
        items.sort()
    rows = []
    i = 0
    for name, value in items:
        i += 1
        out = StringIO()
        try:
            pprint.pprint(value, out)
        except Exception as e:
            print('Error: %s' % e, file=out)
        value = html_quote(out.getvalue())
        if len(value) > 100:
            # @@: This can actually break the HTML :(
            # should I truncate before quoting?
            orig_value = value
            value = value[:100]
            value += '<a class="switch_source" style="background-color: #999" href="#" onclick="return expandLong(this)">...</a>'
            value += '<span style="display: none">%s</span>' % orig_value[100:]
        value = formatter.make_wrappable(value)
        if i % 2:
            attr = ' class="even"'
        else:
            attr = ' class="odd"'
        rows.append('<tr%s style="vertical-align: top;"><td>'
                    '<b>%s</b></td><td style="overflow: auto">%s<td></tr>'
                    % (attr, html_quote(name),
                       preserve_whitespace(value, quote=False)))
    return '<table>%s</table>' % (
        '\n'.join(rows))
示例#16
0
 def test_stdlib_methods_support(self, method):
     """
     PrintLogger implements methods of stdlib loggers.
     """
     sio = StringIO()
     getattr(PrintLogger(sio), method)('hello')
     assert 'hello' in sio.getvalue()
示例#17
0
 def test_find_replace_enable(self):
     db_file = os.path.join(self.testdir, 'hash.db')
     broker = ContainerBroker(db_file)
     broker.account = 'a'
     broker.container = 'c'
     broker.initialize()
     ts = utils.Timestamp.now()
     broker.merge_items([
         {'name': 'obj%02d' % i, 'created_at': ts.internal, 'size': 0,
          'content_type': 'application/octet-stream', 'etag': 'not-really',
          'deleted': 0, 'storage_policy_index': 0,
          'ctype_timestamp': ts.internal, 'meta_timestamp': ts.internal}
         for i in range(100)])
     out = StringIO()
     err = StringIO()
     with mock.patch('sys.stdout', out), mock.patch('sys.stderr', err):
         with mock_timestamp_now() as now:
             main([broker.db_file, 'find_and_replace', '10', '--enable'])
     expected = [
         'No shard ranges found to delete.',
         'Injected 10 shard ranges.',
         'Run container-replicator to replicate them to other nodes.',
         "Container moved to state 'sharding' with epoch %s." %
         now.internal,
         'Run container-sharder on all nodes to shard the container.']
     self.assertEqual(expected, out.getvalue().splitlines())
     self.assertEqual(['Loaded db broker for a/c.'],
                      err.getvalue().splitlines())
     self._assert_enabled(broker, now)
     self.assertEqual(
         [(data['lower'], data['upper']) for data in self.shard_data],
         [(sr.lower_str, sr.upper_str) for sr in broker.get_shard_ranges()])
示例#18
0
def get_migration_status(**options):
    # type: (**Any) -> str
    verbosity = options.get('verbosity', 1)

    for app_config in apps.get_app_configs():
        if module_has_submodule(app_config.module, "management"):
            import_module('.management', app_config.name)

    app_labels = [options['app_label']] if options.get('app_label') else None
    db = options.get('database', DEFAULT_DB_ALIAS)
    out = StringIO()
    call_command(
        'showmigrations',
        '--list',
        app_labels=app_labels,
        database=db,
        no_color=options.get('no_color', False),
        settings=options.get('settings', os.environ['DJANGO_SETTINGS_MODULE']),
        stdout=out,
        traceback=options.get('traceback', True),
        verbosity=verbosity,
    )
    connections.close_all()
    out.seek(0)
    output = out.read()
    return re.sub('\x1b\[(1|0)m', '', output)
示例#19
0
    def test_writesOnlyMessageWithLF(self):
        sio = StringIO()
        PlainFileLogObserver(sio)(
            {"system": "some system", "message": ("hello",)}
        )

        assert "hello\n" == sio.getvalue()
示例#20
0
 def catch_output(self, func):
     stdout = sys.stdout
     outbuf = StringIO()
     sys.stdout = outbuf
     func()
     sys.stdout = stdout
     return outbuf.getvalue()
示例#21
0
def csv_users(request, course, lesson):
    """Exports the stats for all students on the given lesson"""
    course = get_object_or_404(Course, slug=course)
    lesson = get_object_or_404(Lesson, slug=lesson, course=course)
    
    buffer = StringIO()
    writer = csv.writer(buffer)
    
    # Headers
    writer.writerow(
        ["Username", "Attempts", "Correct", "Revealed"]
    )
    
    users = course.users.all()
    
    for u in users:
        writer.writerow([
            u.username,
            utils.attempts(user=u, task__section__lesson=lesson),
            utils.correct(user=u, task__section__lesson=lesson),
            utils.revealed(user=u, task__section__lesson=lesson),
        ])
    
    
    return HttpResponse(buffer.getvalue(), "text/csv")
示例#22
0
def run_driver(prob):
    """
    Call `run_driver` on problem and capture output.

    Parameters
    ----------
    prob : Problem
        an instance of Problem

    Returns
    -------
    boolean
        Failure flag; True if failed to converge, False is successful.
    string
        output from calling `run_driver` on the Problem, captured from stdout
    """
    stdout = sys.stdout
    strout = StringIO()

    sys.stdout = strout
    try:
        failed = prob.run_driver()
    finally:
        sys.stdout = stdout

    return failed, strout.getvalue()
示例#23
0
    def test_destination_directory_levels_deep(self):
        from pecan.scaffolds import copy_dir
        f = StringIO()
        copy_dir(
            (
                'pecan', os.path.join('tests', 'scaffold_fixtures', 'simple')
            ),
            os.path.join(self.scaffold_destination, 'some', 'app'),
            {},
            out_=f
        )

        assert os.path.isfile(os.path.join(
            self.scaffold_destination, 'some', 'app', 'foo')
        )
        assert os.path.isfile(os.path.join(
            self.scaffold_destination, 'some', 'app', 'bar', 'spam.txt')
        )
        with open(os.path.join(
            self.scaffold_destination, 'some', 'app', 'foo'
        ), 'r') as f:
            assert f.read().strip() == 'YAR'
        with open(os.path.join(
            self.scaffold_destination, 'some', 'app', 'bar', 'spam.txt'
        ), 'r') as f:
            assert f.read().strip() == 'Pecan'
 def test_auto_cleanup_verbose(self):
     p = Poll.objects.create(
         question="Will this be deleted?", pub_date=datetime.now()
     )
     self.assertEqual(Poll.history.all().count(), 1)
     p.save()
     p.question = "Maybe this one won't...?"
     p.save()
     self.assertEqual(Poll.history.all().count(), 3)
     out = StringIO()
     management.call_command(
         self.command_name,
         "tests.poll",
         auto=True,
         verbosity=2,
         stdout=out,
         stderr=StringIO(),
     )
     self.assertEqual(
         out.getvalue(),
         "<class 'simple_history.tests.models.Poll'> has 3 historical entries\n"
         "Removed 1 historical records for "
         "<class 'simple_history.tests.models.Poll'>\n",
     )
     self.assertEqual(Poll.history.all().count(), 2)
示例#25
0
    def test_logging_setup_with_config_obj(self):
        class RootController(object):
            @expose()
            def index(self):
                import logging
                logging.getLogger('pecantesting').info('HELLO WORLD')
                return "HELLO WORLD"

        f = StringIO()

        from pecan.configuration import conf_from_dict
        app = TestApp(make_app(RootController(), logging=conf_from_dict({
            'loggers': {
                'pecantesting': {
                    'level': 'INFO', 'handlers': ['memory']
                }
            },
            'handlers': {
                'memory': {
                    'level': 'INFO',
                    'class': 'logging.StreamHandler',
                    'stream': f
                }
            }
        })))

        app.get('/')
        assert f.getvalue() == 'HELLO WORLD\n'
    def test_auto_dry_run(self):
        p = Poll.objects.create(
            question="Will this be deleted?", pub_date=datetime.now()
        )
        p.save()

        # not related to dry_run test, just for increasing coverage :)
        # create instance with single-entry history older than "minutes"
        # so it is skipped
        p = Poll.objects.create(
            question="Will this be deleted?", pub_date=datetime.now()
        )
        h = p.history.first()
        h.history_date -= timedelta(hours=1)
        h.save()

        self.assertEqual(Poll.history.all().count(), 3)
        out = StringIO()
        management.call_command(
            self.command_name,
            auto=True,
            minutes=50,
            dry=True,
            stdout=out,
            stderr=StringIO(),
        )
        self.assertEqual(
            out.getvalue(),
            "Removed 1 historical records for "
            "<class 'simple_history.tests.models.Poll'>\n",
        )
        self.assertEqual(Poll.history.all().count(), 3)
示例#27
0
    def test_logging_setup_with_config_obj(self):
        class RootController(object):
            @expose()
            def index(self):
                import logging

                logging.getLogger("pecantesting").info("HELLO WORLD")
                return "HELLO WORLD"

        f = StringIO()

        from pecan.configuration import conf_from_dict

        app = TestApp(
            make_app(
                RootController(),
                logging=conf_from_dict(
                    {
                        "loggers": {"pecantesting": {"level": "INFO", "handlers": ["memory"]}},
                        "handlers": {"memory": {"level": "INFO", "class": "logging.StreamHandler", "stream": f}},
                    }
                ),
            )
        )

        app.get("/")
        assert f.getvalue() == "HELLO WORLD\n"
示例#28
0
        def test_write_ini_with_custom_converters(self):

            def dict_encoder(dict_):
                return ','.join('%s:%s' % (k, v) for (k, v) in dict_.items())

            def dict_decoder(string):
                return dict(x.split(':') for x in string.split(','))

            n = Namespace(doc='top')
            n.add_option(
                'a',
                default={'one': 'One'},
                doc='the doc string',
                to_string_converter=dict_encoder,
                from_string_converter=dict_decoder,
            )
            c = ConfigurationManager(
                [n],
                use_admin_controls=True,
                use_auto_help=False,
                argv_source=[]
            )
            expected = "# the doc string\n#a=one:One\n"
            out = StringIO()
            c.write_conf(for_configobj, opener=stringIO_context_wrapper(out))
            received = out.getvalue()
            out.close()

            self.assertEqual(expected.strip(), received.strip())
示例#29
0
文件: upload.py 项目: Croolis/zulip
def resize_avatar(image_data):
    AVATAR_SIZE = 100
    im = Image.open(StringIO(image_data))
    im = ImageOps.fit(im, (AVATAR_SIZE, AVATAR_SIZE), Image.ANTIALIAS)
    out = StringIO()
    im.save(out, format='png')
    return out.getvalue()
示例#30
0
def run_model(prob, ignore_exception=False):
    """
    Call `run_model` on problem and capture output.

    Parameters
    ----------
    prob : Problem
        an instance of Problem
    ignore_exception : bool
        Set to True to ignore an exception of any kind.

    Returns
    -------
    string
        output from calling `run_model` on the Problem, captured from stdout
    """
    stdout = sys.stdout
    strout = StringIO()

    sys.stdout = strout
    try:
        prob.run_model()
    except Exception:
        if not ignore_exception:
            exc = sys.exc_info()
            reraise(*exc)
    finally:
        sys.stdout = stdout

    return strout.getvalue()
示例#31
0
def exercise_miller_export_as_shelx_hklf():
    s = """\
   1   2  -1   23.34    4.56
   2  -3   9   12.45    6.12
99999999999999999.9999999.99
-999-999-999-9999.99-9999.99
   3   4   5999999.99999999.
   3   4   5-99999.9-999999.
"""
    ma = hklf.reader(file_object=StringIO(s)).as_miller_arrays()[0]
    sio = StringIO()
    ma.export_as_shelx_hklf(file_object=sio)
    ma2 = hklf.reader(
        file_object=StringIO(sio.getvalue())).as_miller_arrays()[0]
    assert approx_equal(ma.indices(), ma2.indices())
    assert approx_equal(ma.data(), ma2.data())
    assert approx_equal(ma.sigmas(), ma2.sigmas())
    #
    ma = ma.select(flex.size_t([0]))

    def check(d, s, f):
        if (s is not None): s = flex.double([s])
        ma2 = ma.array(data=flex.double([d]), sigmas=s)
        sio = StringIO()
        ma2.export_as_shelx_hklf(sio, normalise_if_format_overflow=True)
        assert not show_diff(
            sio.getvalue(), """\
   1   2  -1%s
   0   0   0    0.00    0.00
""" % f)
        try:
            ma2.export_as_shelx_hklf(sio)
        except RuntimeError:
            pass
        else:
            raise Exception_expected

    check(-12345678, 1, "-999999.    0.08")
    check(-12345678, None, "-999999.    0.00")
    check(2, -12345678, "    0.16-999999.")
    check(123456789, 30, "9999999.    2.43")
    check(123456789, None, "9999999.    0.00")
    check(40, 123456789, "    3.249999999.")
    check(-23456789, 123456789, "-999999.5263153.")
    check(123456789, -23456789, "5263153.-999999.")
    #
    ma = hklf.reader(file_object=StringIO(s)).as_miller_arrays()[0]
    ma = ma.select(flex.size_t([0, 1]))
    ma2 = ma.array(data=flex.double([123456789, -23456789]))
    sio = StringIO()
    ma2.export_as_shelx_hklf(sio, normalise_if_format_overflow=True)
    assert not show_diff(
        sio.getvalue(), """\
   1   2  -15263153.    0.00
   2  -3   9-999999.    0.00
   0   0   0    0.00    0.00
""")
    ma2 = ma.array(data=flex.double([-23456789, 823456789]))
    sio = StringIO()
    ma2.export_as_shelx_hklf(sio, normalise_if_format_overflow=True)
    assert not show_diff(
        sio.getvalue(), """\
   1   2  -1-284858.    0.00
   2  -3   99999999.    0.00
   0   0   0    0.00    0.00
""")
示例#32
0
class FormatterTest(TestCase):
    def setUp(self):
        super(FormatterTest, self).setUp()
        self.buff = StringIO()
        self.f = Formatter(self.buff)

    def assertOutput(self, v):
        self.assertEquals(self.buff.getvalue(), v)

    def test_write_non_strings(self):
        class MyObject(object):
            def __repr__(self):
                return 'repr'

            def __str__(self):
                return 'str'

        self.f.write("1:")
        self.f.write(MyObject())
        self.f.write("2:")
        self.f.writeln(MyObject())
        self.assertOutput("1:str2:str\n")

    def test_write_writeln(self):
        self.f.writeln("hello")
        self.assertOutput("hello\n")
        self.f.write("a")
        self.f.write("bcd")
        self.assertOutput("hello\nabcd")

    def test_empty_writeln(self):
        self.f.writeln('a')
        self.f.writeln()
        self.f.writeln()
        self.f.writeln('b')
        self.assertOutput('a\n\n\nb\n')

    def test_multiline_write(self):
        self.f.writeln('a\nb')
        with self.f.indented(3, string='.'):
            self.f.writeln('c\n d\ne')
        self.f.writeln('f')
        self.assertOutput("a\nb\n...c\n... d\n...e\nf\n")

    def test_indentation(self):
        self.f.writeln("begin")
        with self.f.indented(3):
            self.f.writeln("a")
            self.f.writeln("b")
        self.f.writeln("end")
        self.assertOutput("begin\n   a\n   b\nend\n")

    def test_indentation_different_char_through_ctor(self):
        self._test__indentation_different_char(True)

    def test_indentation_different_char_not_through_ctor(self):
        self._test__indentation_different_char(False)

    def _test__indentation_different_char(self, through_constructor):
        if through_constructor:
            self.f = Formatter(self.buff, indentation_string='*')
            indenter = self.f.indented
        else:
            indenter = lambda indentation: self.f.indented(indentation,
                                                           string='*')
        self.f.writeln("a")
        with indenter(2):
            self.f.writeln('b')
        self.f.writeln('c')
        self.assertOutput('a\n**b\nc\n')
示例#33
0
    def print_orderbook(newbook, oldbook):
        if format == 'html':
            tempfile = StringIO()
            tempfile.write('<table>')
            tempfile.write('<tr>')
            tempfile.write("<td><b>Bids</b></td>")
            tempfile.write("<td><b>Asks</b></td>")
            tempfile.write("</tr>")
            bids_new = []
            asks_new = []

            bids_old = []
            asks_old = []
            if newbook.bids != None and len(newbook.bids) > 0:
                for key, value in newbook.bids.price_tree.items(reverse=True):
                    for order in value:
                        bids_new += [str(order)]
            if newbook.asks != None and len(newbook.asks) > 0:
                for key, value in list(newbook.asks.price_tree.items()):
                    for order in value:
                        asks_new += [str(order)]
            if oldbook.bids != None and len(oldbook.bids) > 0:
                for key, value in oldbook.bids.price_tree.items(reverse=True):
                    for order in value:
                        bids_old += [str(order)]
            if oldbook.asks != None and len(oldbook.asks) > 0:
                for key, value in list(oldbook.asks.price_tree.items()):
                    for order in value:
                        asks_old += [str(order)]

            bids_diff = list(d.compare(bids_old, bids_new))
            asks_diff = list(d.compare(asks_old, asks_new))
            bids = []
            for i in bids_diff:
                if i[0:2] == '? ':
                    continue
                elif i[0:2] == '+ ':
                    bids += ["<b>" + i[2:] + "</b>"]
                elif i[0:2] == '- ':
                    bids += ["<strike>" + i[2:] + "</strike>"]
                else:
                    bids += [i]

            asks = []
            for i in asks_diff:
                if i[0:2] == '? ':
                    continue
                elif i[0:2] == '+ ':
                    asks += ["<b>" + i[2:] + "</b>"]
                elif i[0:2] == '- ':
                    asks += ["<strike>" + i[2:] + "</strike>"]
                else:
                    asks += [i]
                        
            for i in zip_longest(bids, asks):
                tempfile.write('<tr><td>' + \
                               (i[0] if i[0] is not None else '') + \
                               '</td><td>' + \
                               (i[1] if i[1] is not None else '') + \
                               '</td></tr>\n')
            tempfile.write('</table><p>')
            tempfile.write("\n<b>Trades</b><br>\n")
            if newbook.tape != None and len(newbook.tape) > 0:
                num = 0
                for entry in newbook.tape:
                    if num < 10: # get last 5 entries
                        tempfile.write(str(entry['quantity']) + " @ " + str(entry['price']) + " (" + str(entry['timestamp']) + ") " + str(entry['party1'][0]) + "/" + str(entry['party2'][0]) + "<br>\n")
                        num += 1
                    else:
                        break
            tempfile.write("\n")
            print(tempfile.getvalue())
        else:
            print(newbook)
def exercise_non_crystallographic_conserving_bonds_and_angles():
    sites_cart, geo = geometry_restraints.manager \
      .construct_non_crystallographic_conserving_bonds_and_angles(
        sites_cart=flex.vec3_double([
          (10.949, 12.815, 15.189),
          (10.405, 13.954, 15.917),
          (10.779, 15.262, 15.227),
          ( 9.916, 16.090, 14.936)]),
        edge_list_bonds=[(0, 1), (1, 2), (2, 3)],
        edge_list_angles=[(0, 2), (1, 3)])
    assert approx_equal(sites_cart, [(6.033, 5.000, 5.253),
                                     (5.489, 6.139, 5.981),
                                     (5.863, 7.447, 5.291),
                                     (5.000, 8.275, 5.000)])
    assert approx_equal(geo.energies_sites(sites_cart=sites_cart).target, 0)
    sites_cart_noise = flex.vec3_double([  # Just to make all residuals unique,
        (6.043, 5.030, 5.233),  # so that the sorted bond list below
        (5.469, 6.119, 5.941),  # has the same order on all platforms.
        (5.893, 7.487, 5.281),
        (5.040, 8.225, 5.020)
    ])
    sio = StringIO()
    geo.show_sorted(sites_cart=sites_cart_noise, f=sio)
    expected_first_part = """\
Bond restraints: 5
Sorted by residual:
bond 2
     3
  ideal  model  delta    sigma   weight residual
  1.231  1.158  0.073 1.00e-01 1.00e+02 5.35e-01
bond 1
     2
  ideal  model  delta    sigma   weight residual
  1.525  1.577 -0.052 1.00e-01 1.00e+02 2.66e-01
bond 1
     3
  ideal  model  delta    sigma   weight residual
  2.401  2.338  0.063 1.41e-01 5.00e+01 1.96e-01
bond 0
     1
  ideal  model  delta    sigma   weight residual
  1.457  1.420  0.037 1.00e-01 1.00e+02 1.37e-01
bond 0
     2
  ideal  model  delta    sigma   weight residual
  2.453  2.462 -0.009 1.41e-01 5.00e+01 3.92e-03

"""
    assert not show_diff(
        sio.getvalue(), expected_first_part + """\
Nonbonded interactions: 0

""")
    #
    sites_cart, geo = geometry_restraints.manager \
      .construct_non_crystallographic_conserving_bonds_and_angles(
        sites_cart=flex.vec3_double([
          (10.949, 12.815, 15.189),
          (10.405, 13.954, 15.917),
          (10.779, 15.262, 15.227),
          ( 9.916, 16.090, 14.936),
          (10.749, 12.615, 15.389)]),
        edge_list_bonds=[(0, 1), (1, 2), (2, 3)],
        edge_list_angles=[(0, 2), (1, 3)])
    sites_cart_noise.append(sites_cart[-1])
    sio = StringIO()
    geo.show_sorted(sites_cart=sites_cart_noise, f=sio)
    assert not show_diff(
        sio.getvalue(), expected_first_part + """\
Nonbonded interactions: 2
Sorted by model distance:
nonbonded 0
          4
   model   vdw
   0.306 1.200
nonbonded 1
          4
   model   vdw
   1.274 1.200

""")
def exercise_with_zeolite(verbose):
    if (not libtbx.env.has_module("iotbx")):
        print("Skipping exercise_with_zeolite(): iotbx not available")
        return
    from iotbx.kriber import strudat
    atlas_file = libtbx.env.find_in_repositories(
        relative_path="phenix_regression/misc/strudat_zeolite_atlas",
        test=os.path.isfile)
    if (atlas_file is None):
        print("Skipping exercise_with_zeolite(): input file not available")
        return
    strudat_contents = strudat.read_all_entries(open(atlas_file))
    strudat_entry = strudat_contents.get("YUG")
    si_structure = strudat_entry.as_xray_structure()
    if (verbose):
        out = sys.stdout
    else:
        out = StringIO()
    drls = distance_and_repulsion_least_squares(
        si_structure=si_structure,
        distance_cutoff=3.5,
        nonbonded_repulsion_function_type="prolsq",
        n_macro_cycles=2,
        out=out)
    #
    nbp = drls.geometry_restraints_manager.pair_proxies().nonbonded_proxies
    assert nbp.n_total() > 50
    # expected is 60, but the exact number depends on the minimizer
    #
    site_labels = drls.minimized_structure.scatterers().extract_labels()
    sites_cart = drls.start_structure.sites_cart()
    pair_proxies = drls.geometry_restraints_manager.pair_proxies()
    out = StringIO()
    pair_proxies.bond_proxies.show_sorted(by_value="residual",
                                          sites_cart=sites_cart,
                                          site_labels=site_labels,
                                          f=out)
    if (verbose):
        sys.stdout.write(out.getvalue())
    assert len(out.getvalue().splitlines()) == 48 * 4 + 2
    assert out.getvalue().splitlines()[-1].find("remaining") < 0
    out = StringIO()
    pair_proxies.bond_proxies.show_sorted(by_value="residual",
                                          sites_cart=sites_cart,
                                          site_labels=site_labels,
                                          f=out,
                                          prefix="0^",
                                          max_items=28)
    if (verbose):
        sys.stdout.write(out.getvalue())
    assert not show_diff(out.getvalue().replace("e-00", "e-0"),
                         """\
0^Bond restraints: 48
0^Sorted by residual:
0^bond O3
0^     O4
0^  ideal  model  delta    sigma   weight residual
0^  2.629  2.120  0.509 1.56e+00 4.10e-01 1.06e-01
...
0^bond SI1
0^     SI1
0^  ideal  model  delta    sigma   weight residual sym.op.
0^  3.071  3.216 -0.145 2.08e+00 2.31e-01 4.83e-03 -x+1/2,-y+1/2,-z+1
0^... (remaining 20 not shown)
""",
                         selections=[range(6), range(-5, 0)])
    out = StringIO()
    pair_proxies.bond_proxies.show_sorted(by_value="delta",
                                          sites_cart=sites_cart,
                                          site_labels=site_labels,
                                          f=out,
                                          prefix="0^",
                                          max_items=28)
    if (verbose):
        sys.stdout.write(out.getvalue())
    assert not show_diff(out.getvalue().replace("e-00", "e-0"),
                         """\
0^Bond restraints: 48
0^Sorted by delta:
0^bond O3
0^     O4
0^  ideal  model  delta    sigma   weight residual
0^  2.629  2.120  0.509 1.56e+00 4.10e-01 1.06e-01
...
0^... (remaining 20 not shown)
""",
                         selections=[range(6), [-1]])
    site_labels_long = ["abc" + label + "def" for label in site_labels]
    out = StringIO()
    pair_proxies.bond_proxies.show_sorted(by_value="residual",
                                          sites_cart=sites_cart,
                                          site_labels=site_labels_long,
                                          f=out,
                                          prefix="^0",
                                          max_items=28)
    if (verbose):
        sys.stdout.write(out.getvalue())
    assert not show_diff(out.getvalue().replace("e-00", "e-0"),
                         """\
^0Bond restraints: 48
^0Sorted by residual:
^0bond abcO3def
^0     abcO4def
^0  ideal  model  delta    sigma   weight residual
^0  2.629  2.120  0.509 1.56e+00 4.10e-01 1.06e-01
...
^0bond abcSI1def
^0     abcSI1def
^0  ideal  model  delta    sigma   weight residual sym.op.
^0  3.071  3.216 -0.145 2.08e+00 2.31e-01 4.83e-03 -x+1/2,-y+1/2,-z+1
^0... (remaining 20 not shown)
""",
                         selections=[range(6), range(-5, 0)])
    out = StringIO()
    pair_proxies.bond_proxies.show_sorted(by_value="residual",
                                          sites_cart=sites_cart,
                                          f=out,
                                          prefix=".=",
                                          max_items=28)
    if (verbose):
        sys.stdout.write(out.getvalue())
    assert not show_diff(out.getvalue().replace("e-00", "e-0"),
                         """\
.=Bond restraints: 48
.=Sorted by residual:
.=bond 4
.=     5
.=  ideal  model  delta    sigma   weight residual
.=  2.629  2.120  0.509 1.56e+00 4.10e-01 1.06e-01
...
.=bond 0
.=     0
.=  ideal  model  delta    sigma   weight residual sym.op.
.=  3.071  3.216 -0.145 2.08e+00 2.31e-01 4.83e-03 -x+1/2,-y+1/2,-z+1
.=... (remaining 20 not shown)
""",
                         selections=[range(6), range(-5, 0)])
    out = StringIO()
    pair_proxies.bond_proxies.show_sorted(by_value="residual",
                                          sites_cart=sites_cart,
                                          f=out,
                                          prefix="-+",
                                          max_items=1)
    if (verbose):
        sys.stdout.write(out.getvalue())
    assert not show_diff(
        out.getvalue().replace("e-00", "e-0"), """\
-+Bond restraints: 48
-+Sorted by residual:
-+bond 4
-+     5
-+  ideal  model  delta    sigma   weight residual
-+  2.629  2.120  0.509 1.56e+00 4.10e-01 1.06e-01
-+... (remaining 47 not shown)
""")
    out = StringIO()
    pair_proxies.bond_proxies.show_sorted(by_value="residual",
                                          sites_cart=sites_cart,
                                          f=out,
                                          prefix="=+",
                                          max_items=0)
    if (verbose):
        sys.stdout.write(out.getvalue())
    assert not show_diff(
        out.getvalue(), """\
=+Bond restraints: 48
=+Sorted by residual:
=+... (remaining 48 not shown)
""")
    #
    sites_cart = si_structure.sites_cart()
    site_labels = [sc.label for sc in si_structure.scatterers()]
    asu_mappings = si_structure.asu_mappings(buffer_thickness=3.5)
    for min_cubicle_edge in [0, 5]:
        pair_generator = crystal.neighbors_fast_pair_generator(
            asu_mappings=asu_mappings,
            distance_cutoff=asu_mappings.buffer_thickness(),
            minimal=False,
            min_cubicle_edge=min_cubicle_edge)
        sorted_asu_proxies = geometry_restraints.nonbonded_sorted_asu_proxies(
            asu_mappings=asu_mappings)
        while (not pair_generator.at_end()):
            p = geometry_restraints.nonbonded_asu_proxy(
                pair=next(pair_generator), vdw_distance=3)
            sorted_asu_proxies.process(p)
        out = StringIO()
        sorted_asu_proxies.show_sorted(by_value="delta",
                                       sites_cart=sites_cart,
                                       site_labels=site_labels,
                                       f=out,
                                       prefix="d%")
        if (verbose):
            sys.stdout.write(out.getvalue())
        assert not show_diff(
            out.getvalue(),
            """\
d%Nonbonded interactions: 7
d%Sorted by model distance:
...
d%nonbonded SI2
d%          SI2
d%   model   vdw sym.op.
d%   3.092 3.000 -x+1,y,-z
...
d%nonbonded SI1
d%          SI1
d%   model   vdw sym.op.
d%   3.216 3.000 -x+1/2,-y+1/2,-z+1
""",
            selections=[range(2), range(10, 14),
                        range(26, 30)])
        out = StringIO()
        sorted_asu_proxies.show_sorted(by_value="delta",
                                       sites_cart=sites_cart,
                                       f=out,
                                       prefix="*j",
                                       max_items=5)
        if (verbose):
            sys.stdout.write(out.getvalue())
        assert not show_diff(out.getvalue(),
                             """\
*jNonbonded interactions: 7
*jSorted by model distance:
...
*jnonbonded 0
*j          1
*j   model   vdw
*j   3.107 3.000
*jnonbonded 0
*j          0
*j   model   vdw sym.op.
*j   3.130 3.000 -x+1,y,-z+1
*j... (remaining 2 not shown)
""",
                             selections=[range(2), range(-9, 0)])
        out = StringIO()
        sorted_asu_proxies.show_sorted(by_value="delta",
                                       sites_cart=sites_cart,
                                       f=out,
                                       prefix="@r",
                                       max_items=0)
        if (verbose):
            sys.stdout.write(out.getvalue())
        assert not show_diff(out.getvalue(), """\
@rNonbonded interactions: 7
""")
示例#36
0
 def show_sub_header(self, title):
     self._out_orig.write(self.out.getvalue())
     self.out = StringIO()
     self._current_sub_header = title
     assert title not in self._sub_header_to_out
     self._sub_header_to_out[title] = self.out
示例#37
0
 def show_header(self, text):
     self._out_orig.write(self.out.getvalue())
     self.out = StringIO()
     super(xtriage_output, self).show_header(text)
示例#38
0
 def __init__(self, out):
     super(xtriage_output, self).__init__(out)
     self.gui_output = True
     self._out_orig = self.out
     self.out = StringIO()
     self._sub_header_to_out = {}
示例#39
0
def generate_ssl_cert(target_file=None, overwrite=False, random=False, return_content=False, serial_number=None):
    # Note: Do NOT import "OpenSSL" at the root scope
    # (Our test Lambdas are importing this file but don't have the module installed)
    from OpenSSL import crypto

    def all_exist(*files):
        return all([os.path.exists(f) for f in files])

    def store_cert_key_files(base_filename):
        key_file_name = '%s.key' % base_filename
        cert_file_name = '%s.crt' % base_filename
        # extract key and cert from target_file and store into separate files
        content = load_file(target_file)
        key_start = '-----BEGIN PRIVATE KEY-----'
        key_end = '-----END PRIVATE KEY-----'
        cert_start = '-----BEGIN CERTIFICATE-----'
        cert_end = '-----END CERTIFICATE-----'
        key_content = content[content.index(key_start): content.index(key_end) + len(key_end)]
        cert_content = content[content.index(cert_start): content.rindex(cert_end) + len(cert_end)]
        save_file(key_file_name, key_content)
        save_file(cert_file_name, cert_content)
        return cert_file_name, key_file_name

    if target_file and not overwrite and os.path.exists(target_file):
        key_file_name = ''
        cert_file_name = ''
        try:
            cert_file_name, key_file_name = store_cert_key_files(target_file)
        except Exception as e:
            # fall back to temporary files if we cannot store/overwrite the files above
            LOG.info('Error storing key/cert SSL files (falling back to random tmp file names): %s' % e)
            target_file_tmp = new_tmp_file()
            cert_file_name, key_file_name = store_cert_key_files(target_file_tmp)
        if all_exist(cert_file_name, key_file_name):
            return target_file, cert_file_name, key_file_name
    if random and target_file:
        if '.' in target_file:
            target_file = target_file.replace('.', '.%s.' % short_uid(), 1)
        else:
            target_file = '%s.%s' % (target_file, short_uid())

    # create a key pair
    k = crypto.PKey()
    k.generate_key(crypto.TYPE_RSA, 2048)

    # create a self-signed cert
    cert = crypto.X509()
    subj = cert.get_subject()
    subj.C = 'AU'
    subj.ST = 'Some-State'
    subj.L = 'Some-Locality'
    subj.O = 'LocalStack Org'  # noqa
    subj.OU = 'Testing'
    subj.CN = 'localhost'
    # Note: new requirements for recent OSX versions: https://support.apple.com/en-us/HT210176
    # More details: https://www.iol.unh.edu/blog/2019/10/10/macos-catalina-and-chrome-trust
    serial_number = serial_number or 1001
    cert.set_version(2)
    cert.set_serial_number(serial_number)
    cert.gmtime_adj_notBefore(0)
    cert.gmtime_adj_notAfter(2 * 365 * 24 * 60 * 60)
    cert.set_issuer(cert.get_subject())
    cert.set_pubkey(k)
    alt_names = b'DNS:localhost,DNS:test.localhost.atlassian.io,IP:127.0.0.1'
    cert.add_extensions([
        crypto.X509Extension(b'subjectAltName', False, alt_names),
        crypto.X509Extension(b'basicConstraints', True, b'CA:false'),
        crypto.X509Extension(b'keyUsage', True, b'nonRepudiation,digitalSignature,keyEncipherment'),
        crypto.X509Extension(b'extendedKeyUsage', True, b'serverAuth')
    ])
    cert.sign(k, 'SHA256')

    cert_file = StringIO()
    key_file = StringIO()
    cert_file.write(to_str(crypto.dump_certificate(crypto.FILETYPE_PEM, cert)))
    key_file.write(to_str(crypto.dump_privatekey(crypto.FILETYPE_PEM, k)))
    cert_file_content = cert_file.getvalue().strip()
    key_file_content = key_file.getvalue().strip()
    file_content = '%s\n%s' % (key_file_content, cert_file_content)
    if target_file:
        key_file_name = '%s.key' % target_file
        cert_file_name = '%s.crt' % target_file
        # check existence to avoid permission denied issues:
        # https://github.com/localstack/localstack/issues/1607
        if not all_exist(target_file, key_file_name, cert_file_name):
            for i in range(2):
                try:
                    save_file(target_file, file_content)
                    save_file(key_file_name, key_file_content)
                    save_file(cert_file_name, cert_file_content)
                    break
                except Exception as e:
                    if i > 0:
                        raise
                    LOG.info('Unable to store certificate file under %s, using tmp file instead: %s' % (target_file, e))
                    # Fix for https://github.com/localstack/localstack/issues/1743
                    target_file = '%s.pem' % new_tmp_file()
                    key_file_name = '%s.key' % target_file
                    cert_file_name = '%s.crt' % target_file
            TMP_FILES.append(target_file)
            TMP_FILES.append(key_file_name)
            TMP_FILES.append(cert_file_name)
        if not return_content:
            return target_file, cert_file_name, key_file_name
    return file_content
示例#40
0
def exercise(args=None):
    from six.moves import cStringIO as StringIO
    if (args is None): args = sys.argv[1:]
    verbose = "--verbose" in args
    #
    if ("--simple" in args):
        fb = fully_buffered_simple
    else:
        fb = fully_buffered
    #
    for command in ["echo hello world", ("echo", "hello", "world")]:
        for result in [
                fb(command=command).raise_if_errors(),
                fb(command=command, join_stdout_stderr=True),
                go(command=command)
        ]:
            if verbose: print(result.stdout_lines)
            assert result.stdout_lines == ["hello world"]
    #
    if (os.path.isfile("/bin/ls")):
        for command in ["/bin/ls /bin", ("/bin/ls", "/bin")]:
            result = fb(command=command).raise_if_errors()
            if verbose: print(result.stdout_lines)
            assert "ls" in result.stdout_lines
    if (os.path.isfile("/usr/bin/wc")):
        for command in ["/usr/bin/wc -l", ("/usr/bin/wc", "-l")]:
            result = fb(command=command).raise_if_errors()
            if verbose: print(result.stdout_lines)
            assert [s.strip() for s in result.stdout_lines] == ["0"]
            result = fb(command=command, stdin_lines=["hello"]) \
              .raise_if_errors()
            if verbose: print(result.stdout_lines)
            assert [s.strip() for s in result.stdout_lines] == ["1"]
            result = fb(command=command, stdin_lines=["hello", "world"]) \
              .raise_if_errors()
            if verbose: print(result.stdout_lines)
            assert [s.strip() for s in result.stdout_lines] == ["2"]
            result = fb(command=command, stdin_lines="hello\nworld\nbye\n") \
              .raise_if_errors()
            if verbose: print(result.stdout_lines)
            assert [s.strip() for s in result.stdout_lines] == ["3"]
    #
    if (os.name == "nt"):
        result = fb(command="dir").raise_if_errors()
        if verbose: print(result.stdout_lines)
        assert len(result.stdout_lines) > 0
        windir = os.environ.get("windir", None)
        if (windir is not None and windir.find(" ") < 0):
            result = fb(command="dir " + windir).raise_if_errors()
            if verbose: print(result.stdout_lines)
            assert len(result.stdout_lines) > 0
    #
    pyexe = sys.executable
    assert pyexe.count('"') == 0
    pyexe = '"' + pyexe + '"'
    if (os.name == "nt"):
        pyexe = "call " + pyexe
    #
    if ("PYTHONPATH" in os.environ):
        if (not hasattr(os, "unsetenv")):
            os.environ["PYTHONPATH"] = ""
        else:
            del os.environ["PYTHONPATH"]
    if (os.name == "nt"):
        result = fb(command="set").raise_if_errors()
    elif (os.path.isfile("/usr/bin/printenv")):
        result = fb(command="/usr/bin/printenv").raise_if_errors()
    else:
        result = None
    if (result is not None):
        if verbose: print(result.stdout_lines)
        for line in result.stdout_lines:
            assert not line.startswith("PYTHONPATH") or line == "PYTHONPATH="
    #
    for stdout_splitlines in [True, False]:
        result = fb(command="%s -V" % pyexe,
                    stdout_splitlines=stdout_splitlines)
        # python -V outputs to stdout or stderr depending on version
        # https://bugs.python.org/issue18338
        if (len(result.stderr_lines) > 0):
            if verbose: print(result.stderr_lines)
            assert result.stderr_lines[0].startswith("Python " +
                                                     sys.version.split()[0])
            if (stdout_splitlines):
                assert result.stdout_buffer is None
                assert result.stdout_lines == []
            else:
                assert result.stdout_buffer == ""
                assert result.stdout_lines is None
        else:
            if verbose: print(result.stdout_lines)
            if (stdout_splitlines):
                assert result.stdout_buffer is None
                assert result.stdout_lines[0].startswith(
                    "Python " + sys.version.split()[0])
            else:
                assert result.stdout_buffer.startswith("Python " +
                                                       sys.version.split()[0])
                assert result.stdout_lines is None
    result = go(command="%s -V" % pyexe)
    if verbose: print(result.stdout_lines)
    assert result.stdout_lines[0].startswith("Python " +
                                             sys.version.split()[0])
    result = fb(command='%s -c "print(3+4)"' % pyexe).raise_if_errors()
    if verbose: print(result.stdout_lines)
    assert result.stdout_lines == ["7"]
    command = command = pyexe \
      + ' -c "import sys; print(len(sys.stdin.read().splitlines()))"'
    result = fb(command=command).raise_if_errors()
    if verbose: print(result.stdout_lines)
    assert result.stdout_lines == ["0"]
    result = fb(command=command, stdin_lines=["hello"]) \
      .raise_if_errors()
    if verbose: print(result.stdout_lines)
    assert result.stdout_lines == ["1"]
    result = fb(command=command, stdin_lines=["hello", "world"]) \
      .raise_if_errors()
    if verbose: print(result.stdout_lines)
    assert result.stdout_lines == ["2"]
    result = fb(command=command, stdin_lines="hello\nworld\nbye\n") \
      .raise_if_errors()
    if verbose: print(result.stdout_lines)
    assert result.stdout_lines == ["3"]
    if ("--quick" in args):
        n_lines_o = 10000
    else:
        n_lines_o = 1000000
    if (fb is fully_buffered_simple):
        n_lines_e = 500  # Windows blocks if this value is greater than 701
    else:
        n_lines_e = 10000
    result = fb(
      command=command, stdin_lines=[str(i) for i in range(n_lines_o)]) \
      .raise_if_errors()
    if verbose: print(result.stdout_lines)
    assert result.stdout_lines == [str(n_lines_o)]
    command = pyexe \
      + ' -c "import sys; sys.stderr.write(sys.stdin.read())"'
    result = fb(command=command, stdin_lines="Hello\nWorld\nBye\n") \
      .raise_if_output()
    s = StringIO()
    result.show_stderr(out=s, prefix="%(")
    if verbose: sys.stdout.write(s.getvalue())
    assert s.getvalue() == """\
%(Hello
%(World
%(Bye
"""
    cat_command = command = pyexe \
      + ' -c "import sys; sys.stdout.write(sys.stdin.read())"'
    result = fb(command=command, stdin_lines="hello\nworld\nbye\n") \
      .raise_if_errors()
    s = StringIO()
    result.show_stdout(out=s, prefix=">:")
    if verbose: sys.stdout.write(s.getvalue())
    assert s.getvalue() == """\
>:hello
>:world
>:bye
"""
    result = fb(
      command=command, stdin_lines=[str(i) for i in range(n_lines_o)]) \
      .raise_if_errors()
    if verbose: print(result.stdout_lines[:5], result.stdout_lines[-5:])
    assert len(result.stdout_lines) == n_lines_o
    assert result.stdout_lines[:5] == ["0", "1", "2", "3", "4"]
    assert result.stdout_lines[-5:] == [
        str(s) for s in range(n_lines_o - 5, n_lines_o)
    ]
    command = pyexe \
      + ' -c "import sys; sys.stderr.write(sys.stdin.read())"'
    result = fb(command=command,
                stdin_lines=[str(i) for i in range(n_lines_e, 0, -1)])
    assert len(result.stdout_lines) == 0
    if verbose: print(result.stderr_lines[:5], result.stderr_lines[-5:])
    assert len(result.stderr_lines) == n_lines_e
    assert result.stderr_lines[:5] == [
        str(s) for s in range(n_lines_e, n_lines_e - 5, -1)
    ]
    assert result.stderr_lines[-5:] == ["5", "4", "3", "2", "1"]
    command = pyexe + "; ".join((''' -c "\
import sys, os
lines = sys.stdin.read()
sys.stdout.write(lines)
sys.stdout.flush()
lines = lines.splitlines()[:%d]
lines.reverse()
nl = chr(%d)
sys.stderr.write(nl.join(lines)+nl)
sys.stderr.flush()"''' % (n_lines_e, ord("\n"))).splitlines())
    result = fb(command=command,
                stdin_lines=[str(i) for i in range(n_lines_o)])
    if verbose: print(result.stdout_lines[:5], result.stdout_lines[-5:])
    if verbose: print(result.stderr_lines[:5], result.stderr_lines[-5:])
    assert len(result.stdout_lines) == n_lines_o
    assert result.stdout_lines[:5] == ["0", "1", "2", "3", "4"]
    assert result.stdout_lines[-5:] == [
        str(s) for s in range(n_lines_o - 5, n_lines_o)
    ]
    assert len(result.stderr_lines) == n_lines_e
    assert result.stderr_lines[:5] == [
        str(s) for s in range(n_lines_e - 1, n_lines_e - 6, -1)
    ]
    assert result.stderr_lines[-5:] == ["4", "3", "2", "1", "0"]
    result = go(command=command,
                stdin_lines=[str(i) for i in range(n_lines_o)])
    if verbose: print(result.stdout_lines[:5], result.stdout_lines[-5:])
    assert len(result.stdout_lines) == n_lines_o + n_lines_e
    assert result.stdout_lines[:5] == ["0", "1", "2", "3", "4"]
    assert result.stdout_lines[-5:] == ["4", "3", "2", "1", "0"]
    #
    try:
        fb(command="C68649356116218352").raise_if_errors()
    except RuntimeError as e:
        if verbose: print(e)
        # Just check for RuntimeError; there are now additional
        # specific error messages.
        pass
        # assert str(e).startswith("child process stderr output:\n")
    else:
        raise Exception_expected
    #
    for stdout_splitlines in [True, False]:
        for n, b in [(10, 20), (11, 23), (12, 26), (13, 29)]:
            try:
                fb(command=cat_command,
                   stdin_lines=[str(i) for i in range(n)],
                   stdout_splitlines=stdout_splitlines).raise_if_output()
            except RuntimeError as e:
                if verbose: print(e)
                assert str(e).startswith("unexpected child process output:\n")
                if (stdout_splitlines):
                    if (n != 13):
                        assert str(e).endswith(str(n - 1))
                    else:
                        assert str(e).endswith("  remaining 3 lines omitted.")
                else:
                    assert str(e).endswith("  length of output: %d bytes" % b)
            else:
                raise Exception_expected
    #
    fb(command=cat_command).raise_if_errors_or_output()
    #
    result = fb(command=["nslookup", "localhost"])
    if verbose:
        print(result.stdout_lines)
        print(result.stderr_lines)
    #
    while ("--forever" in args):
        pass
    #
    print("OK")
示例#41
0
class Daemon:
    IFACE = "127.0.0.1"

    def __init__(self, ssl=None, **daemonargs):
        self.q = queue.Queue()
        self.logfp = StringIO()
        daemonargs["logfp"] = self.logfp
        self.thread = _PaThread(self.IFACE, self.q, ssl, daemonargs)
        self.thread.start()
        self.port = self.q.get(True, 5)
        self.urlbase = "%s://%s:%s" % ("https" if ssl else "http", self.IFACE,
                                       self.port)

    def __enter__(self):
        return self

    def __exit__(self, type, value, traceback):
        self.logfp.truncate(0)
        self.shutdown()
        return False

    def p(self, spec):
        """
            Return a URL that will render the response in spec.
        """
        return "%s/p/%s" % (self.urlbase, spec)

    def text_log(self):
        return self.logfp.getvalue()

    def wait_for_silence(self, timeout=5):
        self.thread.server.wait_for_silence(timeout=timeout)

    def expect_log(self, n, timeout=5):
        l = []
        start = time.time()
        while True:
            l = self.log()
            if time.time() - start >= timeout:
                return None
            if len(l) >= n:
                break
        return l

    def last_log(self):
        """
            Returns the last logged request, or None.
        """
        l = self.expect_log(1)
        if not l:
            return None
        return l[-1]

    def log(self):
        """
            Return the log buffer as a list of dictionaries.
        """
        return self.thread.server.get_log()

    def clear_log(self):
        """
            Clear the log.
        """
        return self.thread.server.clear_log()

    def shutdown(self):
        """
            Shut the daemon down, return after the thread has exited.
        """
        self.thread.server.shutdown()
        self.thread.join()
    def run(self, args=None):
        """Execute the script."""
        from dxtbx.model.experiment_list import ExperimentListDumper
        from dials.array_family import flex
        from dials.util.options import flatten_experiments
        from time import time
        from dials.util import log

        start_time = time()

        # Parse the command line
        params, options = self.parser.parse_args(args=args,
                                                 show_diff_phil=False)

        if __name__ == "__main__":
            # Configure the logging
            log.config(params.verbosity,
                       info=params.output.log,
                       debug=params.output.debug_log)

        from dials.util.version import dials_version

        logger.info(dials_version())

        # Log the diff phil
        diff_phil = self.parser.diff_phil.as_str()
        if diff_phil != "":
            logger.info("The following parameters have been modified:\n")
            logger.info(diff_phil)
        '''
        I am going to start adding stuff here
        this comment above any code means that I, JORGE DIAZ JR,
        added whatever line of code with this signature,
        JAD7
        '''
        # JAD7
        logger.info("JAD7: {}".format(params.input.experiments))

        # Ensure we have a data block
        experiments = flatten_experiments(params.input.experiments)

        # JAD7
        logger.info("JAD7: {}".format(experiments))

        if len(experiments) == 0:
            self.parser.print_help()
            return

        # Loop through all the imagesets and find the strong spots
        reflections = flex.reflection_table.from_observations(
            experiments, params)

        # JAD7
        logger.info("JAD7: {}".format(reflections))

        # Add n_signal column - before deleting shoeboxes
        from dials.algorithms.shoebox import MaskCode

        good = MaskCode.Foreground | MaskCode.Valid
        reflections["n_signal"] = reflections["shoebox"].count_mask_values(
            good)

        # Delete the shoeboxes
        if not params.output.shoeboxes:
            del reflections["shoebox"]

        # ascii spot count per image plot
        from dials.util.ascii_art import spot_counts_per_image_plot

        for i, experiment in enumerate(experiments):
            ascii_plot = spot_counts_per_image_plot(
                reflections.select(reflections["id"] == i))
            if len(ascii_plot):
                logger.info(
                    "\nHistogram of per-image spot count for imageset %i:" % i)
                logger.info(ascii_plot)

        # Save the reflections to file
        logger.info("\n" + "-" * 80)
        reflections.as_file(params.output.reflections)
        logger.info("Saved {} reflections to {}".format(
            len(reflections), params.output.reflections))

        # Save the experiments
        if params.output.experiments:
            logger.info("Saving experiments to {}".format(
                params.output.experiments))
            dump = ExperimentListDumper(experiments)
            dump.as_file(params.output.experiments)

        # Print some per image statistics
        if params.per_image_statistics:
            from dials.algorithms.spot_finding import per_image_analysis
            from six.moves import cStringIO as StringIO

            s = StringIO()
            for i, experiment in enumerate(experiments):
                print("Number of centroids per image for imageset %i:" % i,
                      file=s)
                imageset = experiment.imageset
                stats = per_image_analysis.stats_imageset(
                    imageset,
                    reflections.select(reflections["id"] == i),
                    resolution_analysis=False,
                )
                per_image_analysis.print_table(stats, out=s)
            logger.info(s.getvalue())

        # Print the time
        logger.info("Time Taken: %f" % (time() - start_time))

        if params.output.experiments:
            return experiments, reflections
        else:
            return reflections
 def test_no_args(self):
     out = StringIO()
     management.call_command(self.command_name,
                             stdout=out,
                             stderr=StringIO())
     self.assertIn(populate_history.Command.COMMAND_HINT, out.getvalue())
示例#44
0
 def test_print_formatted_data(self):
     capturedOut = StringIO()
     sys.stdout = capturedOut
     awsumepy.print_formatted_data('some-formatted-data')
     sys.stdout = sys.__stdout__
     self.assertNotEqual(capturedOut.getvalue(), '')
示例#45
0
def exercise():
    import libtbx.utils
    if (libtbx.utils.detect_multiprocessing_problem() is not None):
        print("multiprocessing not available, skipping this test")
        return
    if (os.name == "nt"):
        print(
            "easy_mp fixed_func not supported under Windows, skipping this test"
        )
        return
    from mmtbx.validation.sequence import validation, get_sequence_n_copies, \
      get_sequence_n_copies_from_files
    import iotbx.bioinformatics
    import iotbx.pdb
    from iotbx import file_reader
    import libtbx.load_env  # import dependency
    from libtbx.test_utils import Exception_expected, contains_lines, approx_equal
    from six.moves import cStringIO as StringIO
    pdb_in = iotbx.pdb.input(source_info=None,
                             lines="""\
ATOM      2  CA  ARG A  10      -6.299  36.344   7.806  1.00 55.20           C
ATOM     25  CA  TYR A  11      -3.391  33.962   7.211  1.00 40.56           C
ATOM     46  CA  ALA A  12      -0.693  34.802   4.693  1.00 67.95           C
ATOM     56  CA  ALA A  13       0.811  31.422   3.858  1.00 57.97           C
ATOM     66  CA  GLY A  14       4.466  31.094   2.905  1.00 49.24           C
ATOM     73  CA  ALA A  15       7.163  28.421   2.671  1.00 54.70           C
ATOM     83  CA  ILE A  16       6.554  24.685   2.957  1.00 51.79           C
ATOM    102  CA  LEU A  17       7.691  23.612   6.406  1.00 42.30           C
ATOM    121  CA  PTY A  18       7.292  19.882   5.861  1.00 36.68           C
ATOM    128  CA  PHE A  19       5.417  16.968   4.327  1.00 44.99           C
ATOM    148  CA  GLY A  20       3.466  14.289   6.150  1.00 41.99           C
ATOM    155  CA  GLY A  21       1.756  11.130   4.965  1.00 35.77           C
ATOM    190  CA  ALA A  24       1.294  19.658   3.683  1.00 47.02           C
ATOM    200  CA  VAL A  24A      2.361  22.009   6.464  1.00 37.13           C
ATOM    216  CA  HIS A  25       2.980  25.633   5.535  1.00 42.52           C
ATOM    234  CA  LEU A  26       4.518  28.425   7.577  1.00 47.63           C
ATOM    253  CA  ALA A  27       2.095  31.320   7.634  1.00 38.61           C
ATOM    263  CA  ARG A  28       1.589  34.719   9.165  1.00 37.04           C
END""")
    seq1 = iotbx.bioinformatics.sequence(
        "MTTPSHLSDRYELGEILGFGGMSEVHLARD".lower())
    v = validation(pdb_hierarchy=pdb_in.construct_hierarchy(),
                   sequences=[seq1],
                   log=null_out(),
                   nproc=1)
    out = StringIO()
    v.show(out=out)
    assert contains_lines(
        out.getvalue(), """\
  sequence identity: 76.47%
  13 residue(s) missing from PDB chain (9 at start, 1 at end)
  2 gap(s) in chain
  4 mismatches to sequence
    residue IDs:  12 13 15 24""")
    cif_block = v.sequence_as_cif_block()
    assert list(cif_block['_struct_ref.pdbx_seq_one_letter_code']) == [
        ';MTTPSHLSDRYELGEILGFGGMSEVHLARD\n;'
    ]
    # assert approx_equal(cif_block['_struct_ref_seq.pdbx_auth_seq_align_beg'],
    #                     ['10', '14', '16', '19', '24'])
    # assert approx_equal(cif_block['_struct_ref_seq.pdbx_auth_seq_align_end'],
    #                     ['11', '14', '17', '21', '28'])
    # assert approx_equal(cif_block['_struct_ref_seq.db_align_beg'],
    #                     ['10', '14', '16', '19', '25'])
    # assert approx_equal(cif_block['_struct_ref_seq.db_align_end'],
    #                     ['11', '14', '17', '21', '29'])
    # assert cif_block['_struct_ref_seq.pdbx_seq_align_beg_ins_code'][4] == 'A'
    seq2 = iotbx.bioinformatics.sequence("MTTPSHLSDRYELGEILGFGGMSEVHLA")
    v = validation(pdb_hierarchy=pdb_in.construct_hierarchy(),
                   sequences=[seq2],
                   log=null_out(),
                   nproc=1)
    out = StringIO()
    v.show(out=out)
    assert contains_lines(
        out.getvalue(), """\
  1 residues not found in sequence
    residue IDs:  28""")
    try:
        v = validation(pdb_hierarchy=pdb_in.construct_hierarchy(),
                       sequences=[],
                       log=null_out(),
                       nproc=1)
    except AssertionError:
        pass
    else:
        raise Exception_expected
    cif_block = v.sequence_as_cif_block()
    print(list(cif_block['_struct_ref.pdbx_seq_one_letter_code']))
    assert list(cif_block['_struct_ref.pdbx_seq_one_letter_code']) == [
        ';MTTPSHLSDRYELGEILGFGGMSEVHLA\n;'
    ]
    # assert approx_equal(cif_block['_struct_ref_seq.pdbx_auth_seq_align_end'],
    #                     ['11', '14', '17', '21', '27'])
    # assert approx_equal(cif_block['_struct_ref_seq.db_align_end'],
    #                     ['11', '14', '17', '21', '28'])
    #
    pdb_in2 = iotbx.pdb.input(source_info=None,
                              lines="""\
ATOM      2  CA  ARG A  10      -6.299  36.344   7.806  1.00 55.20           C
ATOM     25  CA  TYR A  11      -3.391  33.962   7.211  1.00 40.56           C
ATOM     46  CA  ALA A  12      -0.693  34.802   4.693  1.00 67.95           C
ATOM     56  CA  ALA A  13       0.811  31.422   3.858  1.00 57.97           C
ATOM     66  CA  GLY A  14       4.466  31.094   2.905  1.00 49.24           C
ATOM     73  CA  ALA A  15       7.163  28.421   2.671  1.00 54.70           C
ATOM     83  CA  ILE A  16       6.554  24.685   2.957  1.00 51.79           C
ATOM    102  CA  LEU A  17       7.691  23.612   6.406  1.00 42.30           C
TER
ATOM   1936  P     G B   2     -22.947 -23.615  15.323  1.00123.20           P
ATOM   1959  P     C B   3     -26.398 -26.111  19.062  1.00110.06           P
ATOM   1979  P     U B   4     -29.512 -30.638  21.164  1.00101.06           P
ATOM   1999  P     C B   5     -30.524 -36.109  21.527  1.00 92.76           P
ATOM   2019  P     U B   6     -28.684 -41.458  21.223  1.00 87.42           P
ATOM   2062  P     G B   8     -18.396 -45.415  21.903  1.00 80.35           P
ATOM   2085  P     A B   9     -13.852 -43.272  24.156  1.00 77.76           P
ATOM   2107  P     G B  10      -8.285 -44.242  26.815  1.00 79.86           P
END
""")
    seq3 = iotbx.bioinformatics.sequence("AGCUUUGGAG")
    v = validation(pdb_hierarchy=pdb_in2.construct_hierarchy(),
                   sequences=[seq2, seq3],
                   log=null_out(),
                   nproc=1,
                   extract_coordinates=True)
    out = StringIO()
    v.show(out=out)
    cif_block = v.sequence_as_cif_block()
    assert approx_equal(cif_block['_struct_ref.pdbx_seq_one_letter_code'],
                        [';MTTPSHLSDRYELGEILGFGGMSEVHLA\n;', ';AGCUUUGGAG\n;'])
    # assert approx_equal(cif_block['_struct_ref_seq.pdbx_auth_seq_align_beg'],
    #                     ['10', '14', '16', '2', '6', '8'])
    # assert approx_equal(cif_block['_struct_ref_seq.pdbx_auth_seq_align_end'],
    #                     ['11', '14', '17', '4', '6', '10'])
    assert (len(v.chains[0].get_outliers_table()) == 3)
    assert (len(v.get_table_data()) == 4)
    assert approx_equal(
        v.chains[0].get_mean_coordinate_for_alignment_range(11, 11),
        (-0.693, 34.802, 4.693))
    assert approx_equal(
        v.chains[0].get_mean_coordinate_for_alignment_range(11, 14),
        (2.93675, 31.43475, 3.53175))
    assert (v.chains[0].get_highlighted_residues() == [11, 12, 14])
    assert contains_lines(
        out.getvalue(), """\
  3 mismatches to sequence
    residue IDs:  12 13 15""")
    assert contains_lines(
        out.getvalue(), """\
  sequence identity: 87.50%
  2 residue(s) missing from PDB chain (1 at start, 0 at end)
  1 gap(s) in chain
  1 mismatches to sequence
    residue IDs:  5""")
    s = easy_pickle.dumps(v)
    seq4 = iotbx.bioinformatics.sequence("")
    try:
        v = validation(pdb_hierarchy=pdb_in2.construct_hierarchy(),
                       sequences=[seq4],
                       log=null_out(),
                       nproc=1,
                       extract_coordinates=True)
    except AssertionError:
        pass
    else:
        raise Exception_expected
    # check that nucleic acid chain doesn't get aligned against protein sequence
    pdb_in = iotbx.pdb.input(source_info=None,
                             lines="""\
ATOM  18932  P  B DG D   1     -12.183  60.531  25.090  0.50364.79           P
ATOM  18963  P  B DG D   2      -9.738  55.258  20.689  0.50278.77           P
ATOM  18994  P  B DA D   3     -10.119  47.855  19.481  0.50355.17           P
ATOM  19025  P  B DT D   4     -13.664  42.707  21.119  0.50237.06           P
ATOM  19056  P  B DG D   5     -19.510  39.821  21.770  0.50255.45           P
ATOM  19088  P  B DA D   6     -26.096  40.001  21.038  0.50437.49           P
ATOM  19120  P  B DC D   7     -31.790  41.189  18.413  0.50210.00           P
ATOM  19149  P  B DG D   8     -34.639  41.306  12.582  0.50313.99           P
ATOM  19179  P  B DA D   9     -34.987  38.244   6.813  0.50158.92           P
ATOM  19210  P  B DT D  10     -32.560  35.160   1.082  0.50181.38           P
HETATM19241  P  BTSP D  11     -27.614  30.137   0.455  0.50508.17           P
""")
    sequences, _ = iotbx.bioinformatics.fasta_sequence_parse.parse(
        """>4GFH:A|PDBID|CHAIN|SEQUENCE
MSTEPVSASDKYQKISQLEHILKRPDTYIGSVETQEQLQWIYDEETDCMIEKNVTIVPGLFKIFDEILVNAADNKVRDPS
MKRIDVNIHAEEHTIEVKNDGKGIPIEIHNKENIYIPEMIFGHLLTSSNYDDDEKKVTGGRNGYGAKLCNIFSTEFILET
ADLNVGQKYVQKWENNMSICHPPKITSYKKGPSYTKVTFKPDLTRFGMKELDNDILGVMRRRVYDINGSVRDINVYLNGK
SLKIRNFKNYVELYLKSLEKKRQLDNGEDGAAKSDIPTILYERINNRWEVAFAVSDISFQQISFVNSIATTMGGTHVNYI
TDQIVKKISEILKKKKKKSVKSFQIKNNMFIFINCLIENPAFTSQTKEQLTTRVKDFGSRCEIPLEYINKIMKTDLATRM
FEIADANEENALKKSDGTRKSRITNYPKLEDANKAGTKEGYKCTLVLTEGDSALSLAVAGLAVVGRDYYGCYPLRGKMLN
VREASADQILKNAEIQAIKKIMGLQHRKKYEDTKSLRYGHLMIMTDQDHDGSHIKGLIINFLESSFPGLLDIQGFLLEFI
TPIIKVSITKPTKNTIAFYNMPDYEKWREEESHKFTWKQKYYKGLGTSLAQEVREYFSNLDRHLKIFHSLQGNDKDYIDL
AFSKKKADDRKEWLRQYEPGTVLDPTLKEIPISDFINKELILFSLADNIRSIPNVLDGFKPGQRKVLYGCFKKNLKSELK
VAQLAPYVSECTAYHHGEQSLAQTIIGLAQNFVGSNNIYLLLPNGAFGTRATGGKDAAAARYIYTELNKLTRKIFHPADD
PLYKYIQEDEKTVEPEWYLPILPMILVNGAEGIGTGWSTYIPPFNPLEIIKNIRHLMNDEELEQMHPWFRGWTGTIEEIE
PLRYRMYGRIEQIGDNVLEITELPARTWTSTIKEYLLLGLSGNDKIKPWIKDMEEQHDDNIKFIITLSPEEMAKTRKIGF
YERFKLISPISLMNMVAFDPHGKIKKYNSVNEILSEFYYVRLEYYQKRKDHMSERLQWEVEKYSFQVKFIKMIIEKELTV
TNKPRNAIIQELENLGFPRFNKEGKPYYGSPNDEIAEQINDVKGATSDEEDEESSHEDTENVINGPEELYGTYEYLLGMR
IWSLTKERYQKLLKQKQEKETELENLLKLSAKDIWNTDLKAFEVGYQEFLQRDAEAR
>4GFH:D|PDBID|CHAIN|SEQUENCE
GGATGACGATX
""")
    v = validation(
        pdb_hierarchy=pdb_in.construct_hierarchy(),
        sequences=sequences,
        log=null_out(),
        nproc=1,
    )
    out = StringIO()
    v.show(out=out)
    assert v.chains[0].n_missing == 0
    assert v.chains[0].n_missing_end == 0
    assert v.chains[0].n_missing_start == 0
    assert len(v.chains[0].alignment.matches()) == 11
    #
    pdb_in = iotbx.pdb.input(source_info=None,
                             lines="""\
ATOM      2  CA  GLY A   1       1.367   0.551   0.300  1.00  7.71           C
ATOM      6  CA  CYS A   2       2.782   3.785   1.683  1.00  5.18           C
ATOM     12  CA  CYS A   3      -0.375   5.128   3.282  1.00  5.21           C
ATOM     18  CA  SER A   4      -0.870   2.048   5.492  1.00  7.19           C
ATOM     25  CA  LEU A   5       2.786   2.056   6.642  1.00  6.78           C
ATOM     33  CA  PRO A   6       3.212   4.746   9.312  1.00  7.03           C
ATOM     40  CA  PRO A   7       6.870   5.690   8.552  1.00  7.97           C
ATOM     47  CA  CYS A   8       6.021   6.070   4.855  1.00  6.48           C
ATOM     53  CA  ALA A   9       2.812   8.041   5.452  1.00  7.15           C
ATOM     58  CA  LEU A  10       4.739  10.382   7.748  1.00  8.36           C
ATOM     66  CA  SER A  11       7.292  11.200   5.016  1.00  7.00           C
ATOM     73  CA  ASN A  12       4.649  11.435   2.264  1.00  5.40           C
ATOM     81  CA  PRO A  13       1.879  13.433   3.968  1.00  5.97           C
ATOM     88  CA  ASP A  14       0.485  15.371   0.986  1.00  7.70           C
ATOM     96  CA  TYR A  15       0.565  12.245  -1.180  1.00  6.55           C
ATOM    108  CA  CYS A  16      -1.466  10.260   1.363  1.00  7.32           C
ATOM    113  N   NH2 A  17      -2.612  12.308   2.058  1.00  8.11           N
""")
    seq = iotbx.bioinformatics.sequence("GCCSLPPCALSNPDYCX")
    # match last residue
    v = validation(
        pdb_hierarchy=pdb_in.construct_hierarchy(),
        sequences=[seq],
        log=null_out(),
        nproc=1,
    )
    out = StringIO()
    v.show(out=out)
    assert v.chains[0].n_missing == 0
    assert v.chains[0].n_missing_end == 0
    assert v.chains[0].n_missing_start == 0
    assert len(v.chains[0].alignment.matches()) == 17
    # ignore non-protein residue
    v = validation(pdb_hierarchy=pdb_in.construct_hierarchy(),
                   sequences=[seq],
                   log=null_out(),
                   nproc=1,
                   ignore_hetatm=True)
    out = StringIO()
    v.show(out=out)
    assert v.chains[0].n_missing == 1
    assert v.chains[0].n_missing_end == 1
    assert v.chains[0].n_missing_start == 0
    assert len(v.chains[0].alignment.matches()) == 17
    #
    pdb_in = iotbx.pdb.input(source_info=None,
                             lines="""\
ATOM   2518  CA  PRO C   3      23.450  -5.848  45.723  1.00 85.24           C
ATOM   2525  CA  GLY C   4      20.066  -4.416  44.815  1.00 79.25           C
ATOM   2529  CA  PHE C   5      19.408  -0.913  46.032  1.00 77.13           C
ATOM   2540  CA  GLY C   6      17.384  -1.466  49.208  1.00 83.44           C
ATOM   2544  CA  GLN C   7      17.316  -5.259  49.606  1.00 89.25           C
ATOM   2553  CA  GLY C   8      19.061  -6.829  52.657  1.00 90.67           C
""")
    sequences, _ = iotbx.bioinformatics.fasta_sequence_parse.parse(
        """>1JN5:A|PDBID|CHAIN|SEQUENCE
MASVDFKTYVDQACRAAEEFVNVYYTTMDKRRRLLSRLYMGTATLVWNGNAVSGQESLSEFFEMLPSSEFQISVVDCQPV
HDEATPSQTTVLVVICGSVKFEGNKQRDFNQNFILTAQASPSNTVWKIASDCFRFQDWAS
>1JN5:B|PDBID|CHAIN|SEQUENCE
APPCKGSYFGTENLKSLVLHFLQQYYAIYDSGDRQGLLDAYHDGACCSLSIPFIPQNPARSSLAEYFKDSRNVKKLKDPT
LRFRLLKHTRLNVVAFLNELPKTQHDVNSFVVDISAQTSTLLCFSVNGVFKEVDGKSRDSLRAFTRTFIAVPASNSGLCI
VNDELFVRNASSEEIQRAFAMPAPTPSSSPVPTLSPEQQEMLQAFSTQSGMNLEWSQKCLQDNNWDYTRSAQAFTHLKAK
GEIPEVAFMK
>1JN5:C|PDBID|CHAIN|SEQUENCE
GQSPGFGQGGSV
""")
    v = validation(
        pdb_hierarchy=pdb_in.construct_hierarchy(),
        sequences=sequences,
        log=null_out(),
        nproc=1,
    )
    out = StringIO()
    v.show(out=out)
    assert v.chains[0].n_missing_start == 3
    assert v.chains[0].n_missing_end == 3
    assert v.chains[0].identity == 1.0
    assert v.chains[0].alignment.match_codes == 'iiimmmmmmiii'
    #
    pdb_in = iotbx.pdb.input(source_info=None,
                             lines="""\
ATOM      2  CA  ALA A   2      -8.453  57.214 -12.754  1.00 52.95           C
ATOM      7  CA  LEU A   3      -8.574  59.274  -9.471  1.00 24.33           C
ATOM     15  CA  ARG A   4     -12.178  60.092  -8.575  1.00 28.40           C
ATOM     26  CA  GLY A   5     -14.170  61.485  -5.667  1.00 26.54           C
ATOM     30  CA  THR A   6     -17.784  60.743  -4.783  1.00 31.78           C
ATOM     37  CA  VAL A   7     -19.080  64.405  -4.464  1.00 21.31           C
""")
    seq = iotbx.bioinformatics.sequence("XALRGTV")
    v = validation(
        pdb_hierarchy=pdb_in.construct_hierarchy(),
        sequences=[seq],
        log=null_out(),
        nproc=1,
    )
    out = StringIO()
    v.show(out=out)
    assert v.chains[0].n_missing_start == 1
    assert v.chains[0].n_missing_end == 0
    assert v.chains[0].identity == 1.0
    assert v.chains[0].alignment.match_codes == 'immmmmm'
    #
    pdb_in = iotbx.pdb.input(source_info=None,
                             lines="""\
ATOM   2171  CA  ASP I 355       5.591 -11.903   1.133  1.00 41.60           C
ATOM   2175  CA  PHE I 356       7.082  -8.454   0.828  1.00 39.82           C
ATOM   2186  CA  GLU I 357       5.814  -6.112  -1.877  1.00 41.12           C
ATOM   2195  CA  GLU I 358       8.623  -5.111  -4.219  1.00 42.70           C
ATOM   2199  CA  ILE I 359      10.346  -1.867  -3.363  1.00 43.32           C
ATOM   2207  CA  PRO I 360      11.658   0.659  -5.880  1.00 44.86           C
ATOM   2214  CA  GLU I 361      14.921  -0.125  -7.592  1.00 44.32           C
ATOM   2219  CA  GLU I 362      15.848   3.489  -6.866  1.00 44.27           C
HETATM 2224  CA  TYS I 363      16.482   2.005  -3.448  1.00 44.52           C
""")
    seq = iotbx.bioinformatics.sequence("NGDFEEIPEEYL")
    v = validation(
        pdb_hierarchy=pdb_in.construct_hierarchy(),
        sequences=[seq],
        log=null_out(),
        nproc=1,
    )
    out = StringIO()
    v.show(out=out)
    assert v.chains[0].n_missing_start == 2
    assert v.chains[0].n_missing_end == 1
    assert v.chains[0].identity == 1.0
    pdb_in = iotbx.pdb.input(source_info=None,
                             lines="""\
ATOM    450  CA  ASN A   1      37.242  41.665  44.160  1.00 35.89           C
ATOM    458  CA  GLY A   2      37.796  38.269  42.523  1.00 30.13           C
HETATM  463  CA AMSE A   3      35.878  39.005  39.326  0.54 22.83           C
HETATM  464  CA BMSE A   3      35.892  39.018  39.323  0.46 22.96           C
ATOM    478  CA  ILE A   4      37.580  38.048  36.061  1.00 22.00           C
ATOM    486  CA  SER A   5      37.593  40.843  33.476  1.00 18.73           C
ATOM    819  CA  ALA A   8      25.982  34.781  27.220  1.00 18.43           C
ATOM    824  CA  ALA A   9      23.292  32.475  28.614  1.00 19.60           C
HETATM  830  CA BMSE A  10      22.793  30.814  25.223  0.41 22.60           C
HETATM  831  CA CMSE A  10      22.801  30.850  25.208  0.59 22.54           C
ATOM    845  CA  GLU A  11      26.504  30.054  24.966  1.00 25.19           C
ATOM    854  CA  GLY A  12      25.907  28.394  28.320  1.00 38.88           C
""")
    seq = iotbx.bioinformatics.sequence("NGMISAAAAMEG")
    v = validation(
        pdb_hierarchy=pdb_in.construct_hierarchy(),
        sequences=[seq],
        log=null_out(),
        nproc=1,
    )
    out = StringIO()
    v.show(out=out)
    assert v.chains[0].alignment.a == 'NGMISXXAAMEG'
    assert v.chains[0].alignment.b == 'NGMISAAAAMEG'
    pdb_in = iotbx.pdb.input(source_info=None,
                             lines="""\
ATOM   4615  CA  ALA C   1       1.000   1.000   1.000  1.00 10.00
ATOM   4622  CA  ALA C   2       1.000   1.000   1.000  1.00 10.00
ATOM   4627  CA  ALA C   3       1.000   1.000   1.000  1.00 10.00
ATOM   4634  CA  ALA C   4       1.000   1.000   1.000  1.00 10.00
ATOM   4646  CA  ALA C   5       1.000   1.000   1.000  1.00 10.00
ATOM   4658  CA  ALA C   6       1.000   1.000   1.000  1.00 10.00
ATOM   4664  CA  ALA C   7       1.000   1.000   1.000  1.00 10.00
ATOM   4669  CA  ALA C   8       1.000   1.000   1.000  1.00 10.00
ATOM   4680  CA  ARG C   9       1.000   1.000   1.000  1.00 10.00
ATOM   4690  CA  GLY C  10       1.000   1.000   1.000  1.00 10.00
ATOM   4698  CA  PRO C  11       1.000   1.000   1.000  1.00 10.00
ATOM   4705  CA  LYS C  12       1.000   1.000   1.000  1.00 10.00
ATOM   4712  CA  TRP C  13       1.000   1.000   1.000  1.00 10.00
ATOM   4726  CA  GLU C  14       1.000   1.000   1.000  1.00 10.00
ATOM   4738  CA  SER C  15       1.000   1.000   1.000  1.00 10.00
ATOM   4744  CA  THR C  16       1.000   1.000   1.000  1.00 10.00
ATOM   4751  CA  GLY C  17       1.000   1.000   1.000  1.00 10.00
ATOM   4755  CA  TYR C  18       1.000   1.000   1.000  1.00 10.00
ATOM   4767  CA  PHE C  19       1.000   1.000   1.000  1.00 10.00
ATOM   4778  CA  ALA C  20       1.000   1.000   1.000  1.00 10.00
ATOM   4786  CA  ALA C  21       1.000   1.000   1.000  1.00 10.00
ATOM   4798  CA  TRP C  22       1.000   1.000   1.000  1.00 10.00
ATOM   4812  CA  GLY C  23       1.000   1.000   1.000  1.00 10.00
ATOM   4816  CA  GLN C  24       1.000   1.000   1.000  1.00 10.00
ATOM   4822  CA  GLY C  25       1.000   1.000   1.000  1.00 10.00
ATOM   4826  CA  THR C  26       1.000   1.000   1.000  1.00 10.00
ATOM   4833  CA  LEU C  27       1.000   1.000   1.000  1.00 10.00
ATOM   4841  CA  VAL C  28       1.000   1.000   1.000  1.00 10.00
ATOM   4848  CA  THR C  29       1.000   1.000   1.000  1.00 10.00
ATOM   4855  CA  VAL C  30       1.000   1.000   1.000  1.00 10.00
ATOM   4862  CA  SER C  31       1.000   1.000   1.000  1.00 10.00
ATOM   4868  CA  SER C  32       1.000   1.000   1.000  1.00 10.00
END
""")
    seq = iotbx.bioinformatics.sequence(
        "AAAAAAAARGKWESPAALLKKAAWCSGTLVTVSSASAPKWKSTSGCYFAAPWNKRALRVTVLQSS")
    v = validation(
        pdb_hierarchy=pdb_in.construct_hierarchy(),
        sequences=[seq],
        log=null_out(),
        nproc=1,
    )
    out = StringIO()
    v.show(out=out)

    # check that shortest matching sequence is chosen
    # example from 6H4N, chain a, and I
    sequences, _ = iotbx.bioinformatics.fasta_sequence_parse.parse("""\
>6H4N:a|PDBID|CHAIN|SEQUENCE
AAUUGAAGAGUUUGAUCAUGGCUCAGAUUGAACGCUGGCGGCAGGCCUAACACAUGCAAGUCGAACGGUAACAGGAAGAA
GCUUGCUUCUUUGCUGACGAGUGGCGGACGGGUGAGUAAUGUCUGGGAAACUGCCUGAUGGAGGGGGAUAACUACUGGAA
ACGGUAGCUAAUACCGCAUAACGUCGCAAGACCAAAGAGGGGGACCUUCGGGCCUCUUGCCAUCGGAUGUGCCCAGAUGG
GAUUAGCUAGUAGGUGGGGUAACGGCUCACCUAGGCGACGAUCCCUAGCUGGUCUGAGAGGAUGACCAGCCACACUGGAA
CUGAGACACGGUCCAGACUCCUACGGGAGGCAGCAGUGGGGAAUAUUGCACAAUGGGCGCAAGCCUGAUGCAGCCAUGCC
GCGUGUAUGAAGAAGGCCUUCGGGUUGUAAAGUACUUUCAGCGGGGAGGAAGGGAGUAAAGUUAAUACCUUUGCUCAUUG
ACGUUACCCGCAGAAGAAGCACCGGCUAACUCCGUGCCAGCAGCCGCGGUAAUACGGAGGGUGCAAGCGUUAAUCGGAAU
UACUGGGCGUAAAGCGCACGCAGGCGGUUUGUUAAGUCAGAUGUGAAAUCCCCGGGCUCAACCUGGGAACUGCAUCUGAU
ACUGGCAAGCUUGAGUCUCGUAGAGGGGGGUAGAAUUCCAGGUGUAGCGGUGAAAUGCGUAGAGAUCUGGAGGAAUACCG
GUGGCGAAGGCGGCCCCCUGGACGAAGACUGACGCUCAGGUGCGAAAGCGUGGGGAGCAAACAGGAUUAGAUACCCUGGU
AGUCCACGCCGUAAACGAUGUCGACUUGGAGGUUGUGCCCUUGAGGCGUGGCUUCCGGAGCUAACGCGUUAAGUCGACCG
CCUGGGGAGUACGGCCGCAAGGUUAAAACUCAAAUGAAUUGACGGGGGCCCGCACAAGCGGUGGAGCAUGUGGUUUAAUU
CGAUGCAACGCGAAGAACCUUACCUGGUCUUGACAUCCACGGAAGUUUUCAGAGAUGAGAAUGUGCCUUCGGGAACCGUG
AGACAGGUGCUGCAUGGCUGUCGUCAGCUCGUGUUGUGAAAUGUUGGGUUAAGUCCCGCAACGAGCGCAACCCUUAUCCU
UUGUUGCCAGCGGUCCGGCCGGGAACUCAAAGGAGACUGCCAGUGAUAAACUGGAGGAAGGUGGGGAUGACGUCAAGUCA
UCAUGGCCCUUACGACCAGGGCUACACACGUGCUACAAUGGCGCAUACAAAGAGAAGCGACCUCGCGAGAGCAAGCGGAC
CUCAUAAAGUGCGUCGUAGUCCGGAUUGGAGUCUGCAACUCGACUCCAUGAAGUCGGAAUCGCUAGUAAUCGUGGAUCAG
AAUGCCACGGUGAAUACGUUCCCGGGCCUUGUACACACCGCCCGUCACACCAUGGGAGUGGGUUGCAAAAGAAGUAGGUA
GCUUAACCUUCGGGAGGGCGCUUACCACUUUGUGAUUCAUGACUGGGGUGAAGUCGUAACAAGGUAACCGUAGGGGAACC
UGCGGUUGGAUCAC
>6H4N:I|PDBID|CHAIN|SEQUENCE
CUCCU
""")
    pdb_in = iotbx.pdb.input(source_info=None,
                             lines="""\
ATOM  95502  P     C I1536     211.989 143.717 147.208  1.00 16.47           P
ATOM  95503  OP1   C I1536     213.292 143.696 146.494  1.00 16.47           O
ATOM  95504  OP2   C I1536     211.250 144.996 147.359  1.00 16.47           O
ATOM  95505  O5'   C I1536     211.021 142.666 146.541  1.00 16.47           O
ATOM  95506  C5'   C I1536     211.671 141.536 146.021  1.00 16.47           C
ATOM  95507  C4'   C I1536     211.059 140.260 146.502  1.00 16.47           C
ATOM  95508  O4'   C I1536     209.764 140.432 147.128  1.00 16.47           O
ATOM  95509  C3'   C I1536     210.818 139.353 145.303  1.00 16.47           C
ATOM  95510  O3'   C I1536     211.011 137.993 145.604  1.00 16.47           O
ATOM  95511  C2'   C I1536     209.372 139.646 144.938  1.00 16.47           C
ATOM  95512  O2'   C I1536     208.735 138.572 144.276  1.00 16.47           O
ATOM  95513  C1'   C I1536     208.757 139.866 146.316  1.00 16.47           C
ATOM  95514  N1    C I1536     207.618 140.788 146.322  1.00 16.47           N
ATOM  95515  C2    C I1536     206.610 140.626 145.378  1.00 16.47           C
ATOM  95516  O2    C I1536     206.712 139.721 144.535  1.00 16.47           O
ATOM  95517  N3    C I1536     205.560 141.463 145.396  1.00 16.47           N
ATOM  95518  C4    C I1536     205.492 142.420 146.320  1.00 16.47           C
ATOM  95519  N4    C I1536     204.429 143.227 146.302  1.00 16.47           N
ATOM  95520  C5    C I1536     206.496 142.595 147.306  1.00 16.47           C
ATOM  95521  C6    C I1536     207.522 141.754 147.283  1.00 16.47           C
ATOM  95522  P     U I1537     212.458 137.366 145.505  1.00 11.96           P
ATOM  95523  OP1   U I1537     212.292 135.894 145.567  1.00 11.96           O
ATOM  95524  OP2   U I1537     213.344 138.045 146.479  1.00 11.96           O
ATOM  95525  O5'   U I1537     212.962 137.720 144.038  1.00 11.96           O
ATOM  95526  C5'   U I1537     214.363 137.934 143.772  1.00 11.96           C
ATOM  95527  C4'   U I1537     214.522 138.678 142.472  1.00 11.96           C
ATOM  95528  O4'   U I1537     213.714 137.951 141.515  1.00 11.96           O
ATOM  95529  C3'   U I1537     213.970 140.098 142.549  1.00 11.96           C
ATOM  95530  O3'   U I1537     214.924 141.159 142.799  1.00 11.96           O
ATOM  95531  C2'   U I1537     212.939 140.210 141.413  1.00 11.96           C
ATOM  95532  O2'   U I1537     212.980 141.292 140.508  1.00 11.96           O
ATOM  95533  C1'   U I1537     212.990 138.848 140.714  1.00 11.96           C
ATOM  95534  N1    U I1537     211.632 138.324 140.509  1.00 11.96           N
ATOM  95535  C2    U I1537     211.212 138.082 139.216  1.00 11.96           C
ATOM  95536  O2    U I1537     211.943 138.228 138.252  1.00 11.96           O
ATOM  95537  N3    U I1537     209.897 137.730 139.076  1.00 11.96           N
ATOM  95538  C4    U I1537     208.966 137.602 140.074  1.00 11.96           C
ATOM  95539  O4    U I1537     207.834 137.203 139.798  1.00 11.96           O
ATOM  95540  C5    U I1537     209.473 137.843 141.382  1.00 11.96           C
ATOM  95541  C6    U I1537     210.749 138.206 141.544  1.00 11.96           C
ATOM  95542  P     C I1538     216.031 141.722 141.738  1.00 11.10           P
ATOM  95543  OP1   C I1538     216.814 142.772 142.428  1.00 11.10           O
ATOM  95544  OP2   C I1538     215.385 142.057 140.453  1.00 11.10           O
ATOM  95545  O5'   C I1538     217.081 140.541 141.538  1.00 11.10           O
ATOM  95546  C5'   C I1538     218.494 140.848 141.429  1.00 11.10           C
ATOM  95547  C4'   C I1538     218.962 140.916 139.986  1.00 11.10           C
ATOM  95548  O4'   C I1538     218.034 140.280 139.091  1.00 11.10           O
ATOM  95549  C3'   C I1538     219.276 142.298 139.408  1.00 11.10           C
ATOM  95550  O3'   C I1538     220.629 142.126 139.044  1.00 11.10           O
ATOM  95551  C2'   C I1538     218.657 142.315 138.005  1.00 11.10           C
ATOM  95552  O2'   C I1538     219.358 142.774 136.857  1.00 11.10           O
ATOM  95553  C1'   C I1538     218.164 140.883 137.832  1.00 11.10           C
ATOM  95554  N1    C I1538     216.943 140.702 137.064  1.00 11.10           N
ATOM  95555  C2    C I1538     217.041 140.096 135.813  1.00 11.10           C
ATOM  95556  O2    C I1538     218.163 139.770 135.401  1.00 11.10           O
ATOM  95557  N3    C I1538     215.932 139.850 135.093  1.00 11.10           N
ATOM  95558  C4    C I1538     214.748 140.195 135.580  1.00 11.10           C
ATOM  95559  N4    C I1538     213.670 139.968 134.827  1.00 11.10           N
ATOM  95560  C5    C I1538     214.617 140.827 136.842  1.00 11.10           C
ATOM  95561  C6    C I1538     215.722 141.024 137.566  1.00 11.10           C
ATOM  95562  P     C I1539     221.798 142.624 139.940  1.00 17.77           P
ATOM  95563  OP1   C I1539     221.300 143.669 140.865  1.00 17.77           O
ATOM  95564  OP2   C I1539     222.961 142.899 139.061  1.00 17.77           O
ATOM  95565  O5'   C I1539     222.148 141.341 140.812  1.00 17.77           O
ATOM  95566  C5'   C I1539     223.493 140.934 140.997  1.00 17.77           C
ATOM  95567  C4'   C I1539     223.633 139.444 140.845  1.00 17.77           C
ATOM  95568  O4'   C I1539     222.661 138.972 139.877  1.00 17.77           O
ATOM  95569  C3'   C I1539     224.967 138.959 140.300  1.00 17.77           C
ATOM  95570  O3'   C I1539     225.970 138.853 141.295  1.00 17.77           O
ATOM  95571  C2'   C I1539     224.602 137.629 139.658  1.00 17.77           C
ATOM  95572  O2'   C I1539     224.482 136.616 140.642  1.00 17.77           O
ATOM  95573  C1'   C I1539     223.209 137.924 139.109  1.00 17.77           C
ATOM  95574  N1    C I1539     223.219 138.333 137.681  1.00 17.77           N
ATOM  95575  C2    C I1539     223.353 137.370 136.683  1.00 17.77           C
ATOM  95576  O2    C I1539     223.476 136.178 136.982  1.00 17.77           O
ATOM  95577  N3    C I1539     223.342 137.742 135.392  1.00 17.77           N
ATOM  95578  C4    C I1539     223.202 139.017 135.059  1.00 17.77           C
ATOM  95579  N4    C I1539     223.202 139.332 133.762  1.00 17.77           N
ATOM  95580  C5    C I1539     223.059 140.033 136.041  1.00 17.77           C
ATOM  95581  C6    C I1539     223.067 139.642 137.318  1.00 17.77           C
ATOM  95582  P     U I1540     227.517 139.071 140.915  1.00 25.44           P
ATOM  95583  OP1   U I1540     228.321 138.910 142.156  1.00 25.44           O
ATOM  95584  OP2   U I1540     227.626 140.309 140.102  1.00 25.44           O
ATOM  95585  O5'   U I1540     227.868 137.833 139.978  1.00 25.44           O
ATOM  95586  C5'   U I1540     228.014 136.524 140.520  1.00 25.44           C
ATOM  95587  C4'   U I1540     228.308 135.503 139.447  1.00 25.44           C
ATOM  95588  O4'   U I1540     227.513 135.808 138.268  1.00 25.44           O
ATOM  95589  C3'   U I1540     229.761 135.445 138.980  1.00 25.44           C
ATOM  95590  O3'   U I1540     230.104 134.098 138.659  1.00 25.44           O
ATOM  95591  C2'   U I1540     229.740 136.281 137.705  1.00 25.44           C
ATOM  95592  O2'   U I1540     230.767 135.976 136.785  1.00 25.44           O
ATOM  95593  C1'   U I1540     228.360 135.950 137.145  1.00 25.44           C
ATOM  95594  N1    U I1540     227.809 136.996 136.268  1.00 25.44           N
ATOM  95595  C2    U I1540     227.053 136.589 135.186  1.00 25.44           C
ATOM  95596  O2    U I1540     226.815 135.418 134.956  1.00 25.44           O
ATOM  95597  N3    U I1540     226.574 137.600 134.393  1.00 25.44           N
ATOM  95598  C4    U I1540     226.781 138.951 134.566  1.00 25.44           C
ATOM  95599  O4    U I1540     226.286 139.746 133.765  1.00 25.44           O
ATOM  95600  C5    U I1540     227.583 139.293 135.701  1.00 25.44           C
ATOM  95601  C6    U I1540     228.061 138.329 136.493  1.00 25.44           C
END
""")
    v = validation(
        pdb_hierarchy=pdb_in.construct_hierarchy(),
        sequences=sequences,
        log=null_out(),
        nproc=1,
    )
    assert (v.chains[0].get_alignment() == ['CUCCU', 'CUCCU'])

    # all tests below here have additional dependencies
    if (not libtbx.env.has_module("ksdssp")):
        print("Skipping advanced tests (require ksdssp module)")
        return
    pdb_file = libtbx.env.find_in_repositories(
        relative_path="phenix_regression/pdb/1ywf.pdb", test=os.path.isfile)
    if (pdb_file is not None):
        seq = iotbx.bioinformatics.sequence(
            "MGSSHHHHHHSSGLVPRGSHMAVRELPGAWNFRDVADTATALRPGRLFRSSELSRLDDAGRATLRRLGITDVADLRSSREVARRGPGRVPDGIDVHLLPFPDLADDDADDSAPHETAFKRLLTNDGSNGESGESSQSINDAATRYMTDEYRQFPTRNGAQRALHRVVTLLAAGRPVLTHCFAGKDRTGFVVALVLEAVGLDRDVIVADYLRSNDSVPQLRARISEMIQQRFDTELAPEVVTFTKARLSDGVLGVRAEYLAAARQTIDETYGSLGGYLRDAGISQATVNRMRGVLLG"
        )
        pdb_in = file_reader.any_file(pdb_file, force_type="pdb")
        hierarchy = pdb_in.file_object.hierarchy
        v = validation(pdb_hierarchy=hierarchy,
                       sequences=[seq],
                       log=null_out(),
                       nproc=1,
                       include_secondary_structure=True,
                       extract_coordinates=True)
        out = StringIO()
        v.show(out=out)
        aln1, aln2, ss = v.chains[0].get_alignment(include_sec_str=True)
        assert ("HHH" in ss) and ("LLL" in ss) and ("---" in ss)
        cif_block = v.sequence_as_cif_block()
        assert cif_block[
            '_struct_ref.pdbx_seq_one_letter_code'] == seq.sequence
        # assert list(
        #   cif_block['_struct_ref_seq.pdbx_auth_seq_align_beg']) == ['4', '117']
        # assert list(
        #   cif_block['_struct_ref_seq.pdbx_auth_seq_align_end']) == ['85', '275']
        # assert list(cif_block['_struct_ref_seq.seq_align_beg']) == ['1', '114']
        # assert list(cif_block['_struct_ref_seq.seq_align_end']) == ['82', '272']
        # determine relative counts of sequences and chains
        n_seq = get_sequence_n_copies(pdb_hierarchy=hierarchy,
                                      sequences=[seq] * 4,
                                      copies_from_xtriage=4,
                                      out=null_out())
        assert (n_seq == 1)
        hierarchy = hierarchy.deep_copy()
        chain2 = hierarchy.only_model().chains()[0].detached_copy()
        hierarchy.only_model().append_chain(chain2)
        n_seq = get_sequence_n_copies(pdb_hierarchy=hierarchy,
                                      sequences=[seq] * 4,
                                      copies_from_xtriage=2,
                                      out=null_out())
        assert (n_seq == 1)
        n_seq = get_sequence_n_copies(pdb_hierarchy=hierarchy,
                                      sequences=[seq],
                                      copies_from_xtriage=2,
                                      out=null_out())
        assert (n_seq == 4)
        try:
            n_seq = get_sequence_n_copies(pdb_hierarchy=hierarchy,
                                          sequences=[seq] * 3,
                                          copies_from_xtriage=2,
                                          out=null_out())
        except Sorry as s:
            assert ("round number" in str(s))
        else:
            raise Exception_expected
        n_seq = get_sequence_n_copies(pdb_hierarchy=hierarchy,
                                      sequences=[seq] * 3,
                                      copies_from_xtriage=2,
                                      force_accept_composition=True,
                                      out=null_out())
        assert (n_seq == 1)
        try:
            n_seq = get_sequence_n_copies(pdb_hierarchy=hierarchy,
                                          sequences=[seq] * 4,
                                          copies_from_xtriage=1,
                                          out=null_out())
        except Sorry as s:
            assert ("less than" in str(s))
        else:
            raise Exception_expected
        n_seq = get_sequence_n_copies(
            pdb_hierarchy=hierarchy,
            sequences=[seq] * 4,
            copies_from_xtriage=1,
            assume_xtriage_copies_from_sequence_file=True,
            out=null_out())
        assert (n_seq == 0.5)
        hierarchy = hierarchy.deep_copy()
        chain2 = hierarchy.only_model().chains()[0].detached_copy()
        hierarchy.only_model().append_chain(chain2)
        try:
            n_seq = get_sequence_n_copies(pdb_hierarchy=hierarchy,
                                          sequences=[seq] * 2,
                                          copies_from_xtriage=2,
                                          out=null_out())
        except Sorry as s:
            assert ("round number" in str(s))
        else:
            raise Exception_expected
        n_seq = get_sequence_n_copies(pdb_hierarchy=hierarchy,
                                      sequences=[seq],
                                      copies_from_xtriage=1,
                                      out=null_out())
        assert (n_seq == 3)
        hierarchy = hierarchy.deep_copy()
        chain2 = hierarchy.only_model().chains()[0].detached_copy()
        hierarchy.only_model().append_chain(chain2)
        n_seq = get_sequence_n_copies(pdb_hierarchy=hierarchy,
                                      sequences=[seq] * 2,
                                      copies_from_xtriage=2,
                                      out=null_out())
        assert (n_seq == 4)
        # now with files as input
        seq_file = "tmp_mmtbx_validation_sequence.fa"
        open(seq_file, "w").write(">1ywf\n%s" % seq.sequence)
        n_seq = get_sequence_n_copies_from_files(pdb_file=pdb_file,
                                                 seq_file=seq_file,
                                                 copies_from_xtriage=4,
                                                 out=null_out())
        try:
            assert (n_seq == 4)
        finally:
            os.remove(seq_file)
def run():
  for code, lines in pdbs.items():
    #if code!='VAL': continue
    print(('-%s- ' % code)*10)
    f=open("tst_symmetric_flips_%s.pdb" % code, "w").write(lines)
    cmd = "phenix.pdb_interpretation tst_symmetric_flips_%s.pdb" % code
    cmd += ' flip_symmetric_amino_acids=False'
    print(cmd)
    ero = easy_run.fully_buffered(command=cmd)
    out = StringIO()
    ero.show_stdout(out=out)
    i=0
    if code in ['LEU', 'VAL']:
      d = largest_chiral(out.getvalue(),
                         geo_strs[code],
                         i+2)
      print('chiral',d)
      assert abs(d-values[code][i])<.1, '%s %s' % (d, values[code][i])
    else:
      d = largest_torsion(out.getvalue(),
                          geo_strs[code],
                          i+1)
      print('torsion',d)
      assert abs(d-values[code][i])<1, '%s %s' % (d, values[code][i])
    i+=1
    if code in ['LEU', 'VAL']:
      d = largest_chiral(out.getvalue(),
                         geo_strs[code],
                         i+2)
      print('delta',d)
      assert abs(d-values[code][i])<.1, '%s %s' % (d, values[code][i])
    else:
      d = largest_torsion(out.getvalue(),
                          geo_strs[code],
                          i+1)
      print('delta',d)
      assert abs(d-values[code][i])<1, '%s %s' % (d, values[code][i])

    pdb_inp = iotbx.pdb.input('tst_symmetric_flips_%s.pdb' % code)
    hierarchy = pdb_inp.construct_hierarchy()
    rc = hierarchy.flip_symmetric_amino_acids()
    print(rc)
    hierarchy.write_pdb_file("tst_symmetric_flips_%s.pdb" % code)
    cmd = "phenix.pdb_interpretation tst_symmetric_flips_%s.pdb" % code
    cmd += ' flip_symmetric_amino_acids=False'
    print(cmd)
    ero = easy_run.fully_buffered(command=cmd)
    out = StringIO()
    ero.show_stdout(out=out)

    i+=1
    if code in ['LEU', 'VAL']:
      d = largest_chiral(out.getvalue(),
                         geo_strs[code],
                         i)
      print('chiral',d)
      assert abs(d-values[code][i])<.1, '%s %s' % (d, values[code][i])
    else:
      d = largest_torsion(out.getvalue(),
                          geo_strs[code],
                          i-1)
      print('torsion',d)
      assert abs(d-values[code][i])<1, '%s %s' % (d, values[code][i])
    i+=1
    if code in ['LEU', 'VAL']:
      d = largest_chiral(out.getvalue(),
                         geo_strs[code],
                         i)
      print('delta',d)
      assert abs(d-values[code][i])<.1, '%s %s' % (d, values[code][i])
    else:
      d = largest_torsion(out.getvalue(),
                          geo_strs[code],
                          i-1)
      print('delta',d)
      assert abs(d-values[code][i])<1, '%s %s' % (d, values[code][i])
示例#47
0
 def setUp(self):
     self.output = StringIO()
     self.error = StringIO()
     self.input = StringIO()
 def test_no_historical(self):
     out = StringIO()
     with replace_registry({"test_place": Place}):
         management.call_command(self.command_name, auto=True, stdout=out)
     self.assertIn(clean_duplicate_history.Command.NO_REGISTERED_MODELS,
                   out.getvalue())
示例#49
0
    def apply_significance_filter(self, experiments, reflections):

        self.logger.log_step_time("SIGNIFICANCE_FILTER")

        # Apply an I/sigma filter ... accept resolution bins only if they
        #   have significant signal; tends to screen out higher resolution observations
        #   if the integration model doesn't quite fit
        target_symm = symmetry(
            unit_cell=self.params.scaling.unit_cell,
            space_group_info=self.params.scaling.space_group)

        new_experiments = ExperimentList()
        new_reflections = flex.reflection_table()

        for experiment in experiments:
            exp_reflections = reflections.select(
                reflections['exp_id'] == experiment.identifier)

            N_obs_pre_filter = exp_reflections.size()

            N_bins_small_set = N_obs_pre_filter // self.params.select.significance_filter.min_ct
            N_bins_large_set = N_obs_pre_filter // self.params.select.significance_filter.max_ct

            # Ensure there is at least one bin.
            N_bins = max([
                min([
                    self.params.select.significance_filter.n_bins,
                    N_bins_small_set
                ]), N_bins_large_set, 1
            ])

            #print ("\nN_obs_pre_filter %d"%N_obs_pre_filter)
            #print >> out, "Total obs %d Choose n bins = %d"%(N_obs_pre_filter,N_bins)
            #if indices_to_edge is not None:
            #  print >> out, "Total preds %d to edge of detector"%indices_to_edge.size()

            # Build a miller array for the experiment reflections
            exp_miller_indices = miller.set(
                target_symm, exp_reflections['miller_index_asymmetric'], True)
            exp_observations = miller.array(
                exp_miller_indices, exp_reflections['intensity.sum.value'],
                flex.sqrt(exp_reflections['intensity.sum.variance']))

            assert exp_observations.size() == exp_reflections.size()

            out = StringIO()
            bin_results = show_observations(exp_observations,
                                            out=out,
                                            n_bins=N_bins)

            if self.params.output.log_level == 0:
                self.logger.log(out.getvalue())

            acceptable_resolution_bins = [
                bin.mean_I_sigI > self.params.select.significance_filter.sigma
                for bin in bin_results
            ]

            acceptable_nested_bin_sequences = [
                i for i in range(len(acceptable_resolution_bins))
                if False not in acceptable_resolution_bins[:i + 1]
            ]

            if len(acceptable_nested_bin_sequences) == 0:
                continue
            else:
                N_acceptable_bins = max(acceptable_nested_bin_sequences) + 1

                imposed_res_filter = float(bin_results[N_acceptable_bins -
                                                       1].d_range.split()[2])

                imposed_res_sel = exp_observations.resolution_filter_selection(
                    d_min=imposed_res_filter)

                assert imposed_res_sel.size() == exp_reflections.size()

                new_exp_reflections = exp_reflections.select(imposed_res_sel)

                if new_exp_reflections.size() > 0:
                    new_experiments.append(experiment)
                    new_reflections.extend(new_exp_reflections)

                #self.logger.log("N acceptable bins %d"%N_acceptable_bins)
                #self.logger.log("Old n_obs: %d, new n_obs: %d"%(N_obs_pre_filter, exp_observations.size()))
                #if indices_to_edge is not None:
                #  print >> out, "Total preds %d to edge of detector"%indices_to_edge.size()

        removed_reflections = len(reflections) - len(new_reflections)
        removed_experiments = len(experiments) - len(new_experiments)

        self.logger.log(
            "Reflections rejected because of significance filter: %d" %
            removed_reflections)
        self.logger.log(
            "Experiments rejected because of significance filter: %d" %
            removed_experiments)

        # MPI-reduce total counts
        comm = self.mpi_helper.comm
        MPI = self.mpi_helper.MPI
        total_removed_reflections = comm.reduce(removed_reflections, MPI.SUM,
                                                0)
        total_removed_experiments = comm.reduce(removed_experiments, MPI.SUM,
                                                0)

        # rank 0: log total counts
        if self.mpi_helper.rank == 0:
            self.logger.main_log(
                "Total reflections rejected because of significance filter: %d"
                % total_removed_reflections)
            self.logger.main_log(
                "Total experiments rejected because of significance filter: %d"
                % total_removed_experiments)

        self.logger.log_step_time("SIGNIFICANCE_FILTER", True)

        return new_experiments, new_reflections
示例#50
0
 def display(self, add_comments=False):
     '''Display options in a config file form.'''
     output = StringIO()
     keys = self._options.keys()
     keys.sort()
     currentSection = None
     for sect, opt in keys:
         if sect != currentSection:
             if currentSection is not None:
                 output.write('\n')
             output.write('[')
             output.write(sect)
             output.write("]\n")
             currentSection = sect
         if add_comments:
             doc = self._options[sect, opt].doc()
             if not doc:
                 doc = "No information available, sorry."
             doc = re.sub(r"\s+", " ", doc)
             output.write("\n# %s\n" % ("\n# ".join(wrap(doc)),))
         self._options[sect, opt].write_config(output)
     return output.getvalue()
示例#51
0
    def test_deep_analysis_error_iprint(self):

        class ImplCompTwoStatesAE(ImplicitComponent):

            def setup(self):
                self.add_input('x', 0.5)
                self.add_output('y', 0.0)
                self.add_output('z', 2.0, lower=1.5, upper=2.5)

                self.maxiter = 10
                self.atol = 1.0e-12

                self.declare_partials(of='*', wrt='*')

                self.counter = 0

            def apply_nonlinear(self, inputs, outputs, residuals):
                """
                Don't solve; just calculate the residual.
                """

                x = inputs['x']
                y = outputs['y']
                z = outputs['z']

                residuals['y'] = y - x - 2.0*z
                residuals['z'] = x*z + z - 4.0

                self.counter += 1
                if self.counter > 5 and self.counter < 11:
                    raise AnalysisError('catch me')

            def linearize(self, inputs, outputs, jac):
                """
                Analytical derivatives.
                """

                # Output equation
                jac[('y', 'x')] = -1.0
                jac[('y', 'y')] = 1.0
                jac[('y', 'z')] = -2.0

                # State equation
                jac[('z', 'z')] = -inputs['x'] + 1.0
                jac[('z', 'x')] = -outputs['z']


        top = Problem()
        top.model = Group()
        top.model.add_subsystem('px', IndepVarComp('x', 7.0))

        sub = top.model.add_subsystem('sub', Group())
        sub.add_subsystem('comp', ImplCompTwoStatesAE())

        top.model.connect('px.x', 'sub.comp.x')

        top.model.nonlinear_solver = NewtonSolver()
        top.model.nonlinear_solver.options['maxiter'] = 2
        top.model.nonlinear_solver.options['solve_subsystems'] = True
        top.model.linear_solver = ScipyIterativeSolver()

        sub.nonlinear_solver = NewtonSolver()
        sub.nonlinear_solver.options['maxiter'] = 2
        sub.linear_solver = ScipyIterativeSolver()

        ls = top.model.nonlinear_solver.linesearch = ArmijoGoldsteinLS(bound_enforcement='wall')
        ls.options['maxiter'] = 5
        ls.options['alpha'] = 10.0
        ls.options['retry_on_analysis_error'] = True
        ls.options['c'] = 10000.0

        top.setup(check=False)
        top.set_solver_print(level=2)

        stdout = sys.stdout
        strout = StringIO()

        sys.stdout = strout
        try:
            top.run_model()
        finally:
            sys.stdout = stdout

        output = strout.getvalue().split('\n')
        self.assertTrue(output[26].startswith('|  LS: AG 5'))
示例#52
0
class CLITest(TestCase):
    def setUp(self):
        self.output = StringIO()
        self.error = StringIO()
        self.input = StringIO()

    def test_cli_reading_web_form_password_with_multiple_password_attempts(self):
        password_attempts = (i for i in ("incorrect", "badger"))
        cli = self.build_cli(
            getpass=lambda prompt: next(password_attempts).encode("utf-8"),
            arguments=("--path", self.keychain_path, "onetosix",),
        )
        cli.run()

        self.assert_output("123456\n")
        self.assert_no_error_output()

    def test_cli_with_bad_item_name(self):
        cli = self.build_cli(
            getpass=lambda prompt: "badger".encode("utf-8"),
            arguments=("--path", self.keychain_path, "onetos",),
        )

        self.assert_exit_status(EX_DATAERR, cli.run)
        self.assert_no_output()
        self.assert_error_output("1pass: Could not find an item named 'onetos'\n")

    def test_cli_with_fuzzy_matching(self):
        cli = self.build_cli(
            getpass=lambda prompt: "badger".encode("utf-8"),
            arguments=("--fuzzy", "--path", self.keychain_path, "onetos",),
        )
        cli.run()

        self.assert_output("123456\n")
        self.assert_no_error_output()

    def test_cli_cancelled_password_prompt(self):
        def keyboard_interrupt(prompt):
            raise KeyboardInterrupt()
        cli = self.build_cli(
            getpass=keyboard_interrupt,
            arguments=("--path", self.keychain_path, "onetosix",),
        )

        self.assert_exit_status(0, cli.run)
        self.assert_output("\n")
        self.assert_no_error_output()

    def test_correct_password_from_stdin(self):
        def flunker(prompt):
            self.fail("Password prompt was invoked")
        self.input.write("badger\n")
        self.input.seek(0)
        cli = self.build_cli(
            getpass=flunker,
            arguments=("--no-prompt", "--path", self.keychain_path, "onetosix",),
        )
        cli.run()

        self.assert_output("123456\n")
        self.assert_no_error_output()

    def test_incorrect_password_from_stdin(self):
        def flunker(prompt):
            self.fail("Password prompt was invoked")
        self.input.write("wrong-password\n")
        self.input.seek(0)
        cli = self.build_cli(
            getpass=flunker,
            arguments=("--no-prompt", "--path", self.keychain_path, "onetosix",),
        )

        self.assert_exit_status(EX_DATAERR, cli.run)
        self.assert_no_output()
        self.assert_error_output("1pass: Incorrect master password\n")

    def build_cli(self, **kwargs):
        cli_kwargs = {
            "stdin": self.input,
            "stdout": self.output,
            "stderr": self.error,
        }
        cli_kwargs.update(kwargs)
        return CLI(**cli_kwargs)

    def assert_exit_status(self, expected_status, func):
        try:
            func()
        except SystemExit as exit:
            self.assertEquals(expected_status, exit.code)
        else:
            self.fail("Expected a SystemExit to be raised")

    def assert_output(self, expected_output):
        self.assertEquals(expected_output, self.output.getvalue())

    def assert_no_output(self):
        self.assert_output("")

    def assert_error_output(self, expected_output):
        self.assertEquals(expected_output, self.error.getvalue())

    def assert_no_error_output(self):
        self.assert_error_output("")

    @property
    def keychain_path(self):
        return os.path.join(os.path.dirname(__file__), "data", "1Password.agilekeychain")
示例#53
0
def exercise_cbetadev():
    regression_pdb = libtbx.env.find_in_repositories(
        relative_path="phenix_regression/pdb/pdb1jxt.ent", test=os.path.isfile)
    if (regression_pdb is None):
        print(
            "Skipping exercise_cbetadev(): input pdb (pdb1jxt.ent) not available"
        )
        return
    from mmtbx.validation import cbetadev
    from iotbx import file_reader
    pdb_in = file_reader.any_file(file_name=regression_pdb)
    hierarchy = pdb_in.file_object.hierarchy
    validation = cbetadev.cbetadev(pdb_hierarchy=hierarchy, outliers_only=True)
    assert approx_equal(validation.get_weighted_outlier_percent(),
                        4.40420846587)
    for unpickle in [False, True]:
        if unpickle:
            validation = loads(dumps(validation))
        assert (validation.n_outliers == len(validation.results) == 6)
        assert ([cb.id_str() for cb in validation.results] == [
            ' A   7 AILE', ' A   8 BVAL', ' A   8 CVAL', ' A  30 BTHR',
            ' A  39 BTHR', ' A  43 BASP'
        ])
        assert approx_equal([cb.deviation for cb in validation.results], [
            0.25977096732623106, 0.2577218834868609, 0.6405578498280606,
            0.81238828498566, 0.9239566035292618, 0.5001892640352836
        ])
        out = StringIO()
        validation.show_old_output(out=out, verbose=True)
        assert not show_diff(
            out.getvalue(), """\
pdb:alt:res:chainID:resnum:dev:dihedralNABB:Occ:ALT:
pdb :A:ile: A:   7 :  0.260: -46.47:   0.45:A:
pdb :B:val: A:   8 :  0.258:  80.92:   0.30:B:
pdb :C:val: A:   8 :  0.641: -53.98:   0.20:C:
pdb :B:thr: A:  30 :  0.812: -76.98:   0.30:B:
pdb :B:thr: A:  39 :  0.924:  56.41:   0.30:B:
pdb :B:asp: A:  43 :  0.500:   7.56:   0.25:B:
SUMMARY: 6 C-beta deviations >= 0.25 Angstrom (Goal: 0)
""")

    # Now with all residues
    validation = cbetadev.cbetadev(pdb_hierarchy=hierarchy,
                                   outliers_only=False)
    for unpickle in [False, True]:
        if unpickle:
            validation = loads(dumps(validation))
        for outlier in validation.results:
            assert (len(outlier.xyz) == 3)
        assert (validation.n_outliers == 6)
        assert (len(validation.results) == 51)
        out = StringIO()
        validation.show_old_output(out=out, verbose=True)
        assert not show_diff(
            out.getvalue(), """\
pdb:alt:res:chainID:resnum:dev:dihedralNABB:Occ:ALT:
pdb : :thr: A:   1 :  0.102:  11.27:   1.00: :
pdb :A:thr: A:   2 :  0.022: -49.31:   0.67:A:
pdb : :cys: A:   3 :  0.038: 103.68:   1.00: :
pdb : :cys: A:   4 :  0.047:-120.73:   1.00: :
pdb : :pro: A:   5 :  0.069:-121.41:   1.00: :
pdb : :ser: A:   6 :  0.052: 112.87:   1.00: :
pdb :A:ile: A:   7 :  0.260: -46.47:   0.45:A:
pdb :B:ile: A:   7 :  0.153: 122.97:   0.55:B:
pdb :A:val: A:   8 :  0.184:-155.36:   0.50:A:
pdb :B:val: A:   8 :  0.258:  80.92:   0.30:B:
pdb :C:val: A:   8 :  0.641: -53.98:   0.20:C:
pdb : :ala: A:   9 :  0.061: -82.84:   1.00: :
pdb :A:arg: A:  10 :  0.023: 172.25:   1.00:A:
pdb : :ser: A:  11 :  0.028:-129.11:   1.00: :
pdb :A:asn: A:  12 :  0.021: -80.80:   0.50:A:
pdb :B:asn: A:  12 :  0.199:  50.01:   0.50:B:
pdb :A:phe: A:  13 :  0.067: -37.32:   0.65:A:
pdb :B:phe: A:  13 :  0.138:  19.24:   0.35:B:
pdb : :asn: A:  14 :  0.065: -96.35:   1.00: :
pdb : :val: A:  15 :  0.138: -96.63:   1.00: :
pdb : :cys: A:  16 :  0.102: -28.64:   1.00: :
pdb : :arg: A:  17 :  0.053:-106.79:   1.00: :
pdb : :leu: A:  18 :  0.053:-141.51:   1.00: :
pdb : :pro: A:  19 :  0.065:-146.95:   1.00: :
pdb : :thr: A:  21 :  0.086:  53.80:   1.00: :
pdb :A:pro: A:  22 :  0.092: -83.39:   0.55:A:
pdb :A:glu: A:  23 :  0.014:-179.53:   0.50:A:
pdb :B:glu: A:  23 :  0.050:-179.78:   0.50:B:
pdb : :ala: A:  24 :  0.056: -88.96:   1.00: :
pdb : :leu: A:  25 :  0.084:-106.42:   1.00: :
pdb : :cys: A:  26 :  0.074: -94.70:   1.00: :
pdb : :ala: A:  27 :  0.056: -62.15:   1.00: :
pdb : :thr: A:  28 :  0.056:-114.82:   1.00: :
pdb :A:tyr: A:  29 :  0.068:   0.22:   0.65:A:
pdb :A:thr: A:  30 :  0.180: 103.27:   0.70:A:
pdb :B:thr: A:  30 :  0.812: -76.98:   0.30:B:
pdb : :cys: A:  32 :  0.029: -84.07:   1.00: :
pdb : :ile: A:  33 :  0.048:-119.17:   1.00: :
pdb : :ile: A:  34 :  0.045:  99.02:   1.00: :
pdb : :ile: A:  35 :  0.052:-128.24:   1.00: :
pdb : :pro: A:  36 :  0.084:-142.29:   1.00: :
pdb : :ala: A:  38 :  0.039:  50.01:   1.00: :
pdb :A:thr: A:  39 :  0.093: -96.63:   0.70:A:
pdb :B:thr: A:  39 :  0.924:  56.41:   0.30:B:
pdb : :cys: A:  40 :  0.013:-144.11:   1.00: :
pdb : :pro: A:  41 :  0.039: -97.09:   1.00: :
pdb :A:asp: A:  43 :  0.130:-146.91:   0.75:A:
pdb :B:asp: A:  43 :  0.500:   7.56:   0.25:B:
pdb : :tyr: A:  44 :  0.085:-143.63:   1.00: :
pdb : :ala: A:  45 :  0.055:  33.32:   1.00: :
pdb : :asn: A:  46 :  0.066: -50.46:   1.00: :
SUMMARY: 6 C-beta deviations >= 0.25 Angstrom (Goal: 0)
""")

    # Auxilary function: extract_atoms_from_residue_group
    from mmtbx.validation.cbetadev import extract_atoms_from_residue_group
    from iotbx import pdb
    pdb_1 = pdb.input(source_info=None,
                      lines="""\
ATOM   1185  N  ASER A 146      24.734  37.097  16.303  0.50 16.64           N
ATOM   1186  N  BSER A 146      24.758  37.100  16.337  0.50 16.79           N
ATOM   1187  CA ASER A 146      24.173  37.500  17.591  0.50 16.63           C
ATOM   1188  CA BSER A 146      24.237  37.427  17.662  0.50 16.87           C
ATOM   1189  C  ASER A 146      22.765  36.938  17.768  0.50 15.77           C
ATOM   1190  C  BSER A 146      22.792  36.945  17.783  0.50 15.94           C
ATOM   1191  O  ASER A 146      22.052  36.688  16.781  0.50 14.91           O
ATOM   1192  O  BSER A 146      22.091  36.741  16.779  0.50 15.17           O
ATOM   1193  CB ASER A 146      24.118  39.035  17.649  0.50 16.93           C
ATOM   1194  CB BSER A 146      24.321  38.940  17.904  0.50 17.48           C
ATOM   1195  OG ASER A 146      23.183  39.485  18.611  0.50 17.56           O
ATOM   1196  OG BSER A 146      23.468  39.645  17.028  0.50 18.32           O  """
                      ).construct_hierarchy()
    pdb_2 = pdb.input(source_info=None,
                      lines="""\
ATOM   1185  N   SER A 146      24.734  37.097  16.303  0.50 16.64           N
ATOM   1187  CA  SER A 146      24.173  37.500  17.591  0.50 16.63           C
ATOM   1189  C   SER A 146      22.765  36.938  17.768  0.50 15.77           C
ATOM   1191  O   SER A 146      22.052  36.688  16.781  0.50 14.91           O
ATOM   1193  CB ASER A 146      24.118  39.035  17.649  0.50 16.93           C
ATOM   1194  CB BSER A 146      24.321  38.940  17.904  0.50 17.48           C
ATOM   1195  OG ASER A 146      23.183  39.485  18.611  0.50 17.56           O
ATOM   1196  OG BSER A 146      23.468  39.645  17.028  0.50 18.32           O  """
                      ).construct_hierarchy()
    pdb_3 = pdb.input(source_info=None,
                      lines="""\
ATOM   1185  N   SER A 146      24.734  37.097  16.303  0.50 16.64           N
ATOM   1187  CA  SER A 146      24.173  37.500  17.591  0.50 16.63           C
ATOM   1189  C   SER A 146      22.765  36.938  17.768  0.50 15.77           C
ATOM   1191  O   SER A 146      22.052  36.688  16.781  0.50 14.91           O
ATOM   1193  CB  SER A 146      24.118  39.035  17.649  0.50 16.93           C
ATOM   1195  OG ASER A 146      23.183  39.485  18.611  0.50 17.56           O
ATOM   1196  OG BSER A 146      23.468  39.645  17.028  0.50 18.32           O  """
                      ).construct_hierarchy()
    rg1 = pdb_1.only_model().only_chain().only_residue_group()
    rg2 = pdb_2.only_model().only_chain().only_residue_group()
    rg3 = pdb_3.only_model().only_chain().only_residue_group()
    all_relevant_atoms_1 = extract_atoms_from_residue_group(rg1)
    all_relevant_atoms_2 = extract_atoms_from_residue_group(rg2)
    all_relevant_atoms_3 = extract_atoms_from_residue_group(rg3)
    keys_1 = [sorted([k for k in a.keys()]) for a in all_relevant_atoms_1]
    keys_2 = [sorted([k for k in a.keys()]) for a in all_relevant_atoms_2]
    keys_3 = [sorted([k for k in a.keys()]) for a in all_relevant_atoms_3]
    assert keys_1 == [[' C  ', ' CA ', ' CB ', ' N  '],
                      [' C  ', ' CA ', ' CB ', ' N  ']]
    assert keys_2 == [[' C  ', ' CA ', ' CB ', ' N  '],
                      [' C  ', ' CA ', ' CB ', ' N  ']]
    assert keys_3 == [[' C  ', ' CA ', ' CB ', ' N  ']]
    print("OK")
示例#54
0
 def __init__(self):
     self._out = StringIO()
     self.indentation = 0
示例#55
0
def exercise(verbose=0):
  distance_ideal = 1.8
  default_vdw_distance = 3.6
  vdw_1_4_factor = 3.5/3.6
  sites_cart_manual = flex.vec3_double([
    (1,3,0), (2,3,0), (3,2,0), (3,1,0), (4,1,0), (3,4,0), (4,3,0), (5,3,0),
    (6,2,0), (7,2,0), (8,3,0), (7,4,0), (6,4,0), (7,5,0), (6,6,0), (8,6,0)])
  bond_proxies = geometry_restraints.bond_sorted_asu_proxies(asu_mappings=None)
  for i_seqs in [(0,1),(1,2),(2,3),(3,4),(1,5),(2,6),(5,6),
                 (6,7),(7,8),(8,9),(9,10),(10,11),(11,12),
                 (12,7),(11,13),(13,14),(14,15),(15,13)]:
    bond_proxies.process(geometry_restraints.bond_simple_proxy(
      i_seqs=i_seqs, distance_ideal=distance_ideal, weight=100))
  angle_proxies = geometry_restraints.shared_angle_proxy()
  for i_seqs,angle_ideal in [[(0,1,2),135],
                             [(0,1,5),135],
                             [(1,2,3),135],
                             [(3,2,6),135],
                             [(2,3,4),120],
                             [(1,2,6),90],
                             [(2,6,5),90],
                             [(6,5,1),90],
                             [(5,1,2),90],
                             [(2,6,7),135],
                             [(5,6,7),135],
                             [(6,7,8),120],
                             [(6,7,12),120],
                             [(7,8,9),120],
                             [(8,9,10),120],
                             [(9,10,11),120],
                             [(10,11,12),120],
                             [(11,12,7),120],
                             [(12,7,8),120],
                             [(10,11,13),120],
                             [(12,11,13),120],
                             [(11,13,15),150],
                             [(11,13,14),150],
                             [(13,15,14),60],
                             [(15,14,13),60],
                             [(14,13,15),60]]:
    angle_proxies.append(geometry_restraints.angle_proxy(
      i_seqs=i_seqs, angle_ideal=angle_ideal, weight=1))
  if (0 or verbose):
    dump_pdb(file_name="manual.pdb", sites_cart=sites_cart_manual)
  for traditional_convergence_test in [True,False]:
    for sites_cart_selection in [True, False]:
      sites_cart = sites_cart_manual.deep_copy()
      if sites_cart_selection:
        sites_cart_selection = flex.bool(sites_cart.size(), True)
        sites_cart_selection[1] = False
      assert bond_proxies.asu.size() == 0
      bond_params_table = geometry_restraints.extract_bond_params(
        n_seq=sites_cart.size(),
        bond_simple_proxies=bond_proxies.simple)
      manager = geometry_restraints.manager.manager(
        bond_params_table=bond_params_table,
        angle_proxies=angle_proxies)
      minimized = geometry_restraints.lbfgs.lbfgs(
        sites_cart=sites_cart,
        geometry_restraints_manager=manager,
        lbfgs_termination_params=scitbx.lbfgs.termination_parameters(
          traditional_convergence_test=traditional_convergence_test,
          drop_convergence_test_max_drop_eps=1.e-20,
          drop_convergence_test_iteration_coefficient=1,
          max_iterations=1000),
        sites_cart_selection=sites_cart_selection,
        )
      assert minimized.minimizer.iter() > 100
      sites_cart_minimized_1 = sites_cart.deep_copy()
      if (0 or verbose):
        dump_pdb(
          file_name="minimized_1.pdb", sites_cart=sites_cart_minimized_1)
      bond_deltas = geometry_restraints.bond_deltas(
        sites_cart=sites_cart_minimized_1,
        proxies=bond_proxies.simple)
      angle_deltas = geometry_restraints.angle_deltas(
        sites_cart=sites_cart_minimized_1,
        proxies=angle_proxies)
      if (0 or verbose):
        for proxy,delta in zip(bond_proxies.simple, bond_deltas):
          print("bond:", proxy.i_seqs, delta)
        for proxy,delta in zip(angle_proxies, angle_deltas):
          print("angle:", proxy.i_seqs, delta)
      assert is_below_limit(
        value=flex.max(flex.abs(bond_deltas)), limit=0, eps=1.e-6)
      assert is_below_limit(
        value=flex.max(flex.abs(angle_deltas)), limit=0, eps=2.e-6)
  sites_cart += matrix.col((1,1,0)) - matrix.col(sites_cart.min())
  unit_cell_lengths = list(  matrix.col(sites_cart.max())
                           + matrix.col((1,-1.2,4)))
  unit_cell_lengths[1] *= 2
  unit_cell_lengths[2] *= 2
  xray_structure = xray.structure(
    crystal_symmetry=crystal.symmetry(
      unit_cell=unit_cell_lengths,
      space_group_symbol="P112"))
  for serial,site in zip(count(1), sites_cart):
    xray_structure.add_scatterer(xray.scatterer(
      label="C%02d"%serial,
      site=xray_structure.unit_cell().fractionalize(site)))
  if (0 or verbose):
    xray_structure.show_summary().show_scatterers()
  p1_structure = (xray_structure
    .apply_shift((-.5,-.5,0))
    .expand_to_p1()
    .apply_shift((.5,.5,0)))
  for shift in [(1,0,0), (0,1,0), (0,0,1)]:
    p1_structure.add_scatterers(p1_structure.apply_shift(shift).scatterers())
  if (0 or verbose):
    open("p1_structure.pdb", "w").write(p1_structure.as_pdb_file())
  nonbonded_cutoff = 6.5
  asu_mappings = xray_structure.asu_mappings(
    buffer_thickness=nonbonded_cutoff)
  bond_asu_table = crystal.pair_asu_table(asu_mappings=asu_mappings)
  geometry_restraints.add_pairs(bond_asu_table, bond_proxies.simple)
  shell_asu_tables = crystal.coordination_sequences.shell_asu_tables(
    pair_asu_table=bond_asu_table,
    max_shell=3)
  shell_sym_tables = [shell_asu_table.extract_pair_sym_table()
    for shell_asu_table in shell_asu_tables]
  bond_params_table = geometry_restraints.extract_bond_params(
    n_seq=sites_cart.size(),
    bond_simple_proxies=bond_proxies.simple)
  atom_energy_types = flex.std_string(sites_cart.size(), "Default")
  nonbonded_params = geometry_restraints.nonbonded_params(
    factor_1_4_interactions=vdw_1_4_factor,
    const_shrink_1_4_interactions=0,
    default_distance=default_vdw_distance)
  nonbonded_params.distance_table.setdefault(
    "Default")["Default"] = default_vdw_distance
  pair_proxies = geometry_restraints.pair_proxies(
    bond_params_table=bond_params_table,
    shell_asu_tables=shell_asu_tables,
    model_indices=None,
    conformer_indices=None,
    nonbonded_params=nonbonded_params,
    nonbonded_types=atom_energy_types,
    nonbonded_distance_cutoff_plus_buffer=nonbonded_cutoff)
  if (0 or verbose):
    print("pair_proxies.bond_proxies.n_total():", \
           pair_proxies.bond_proxies.n_total(), end=' ')
    print("simple:", pair_proxies.bond_proxies.simple.size(), end=' ')
    print("sym:", pair_proxies.bond_proxies.asu.size())
    print("pair_proxies.nonbonded_proxies.n_total():", \
           pair_proxies.nonbonded_proxies.n_total(), end=' ')
    print("simple:", pair_proxies.nonbonded_proxies.simple.size(), end=' ')
    print("sym:", pair_proxies.nonbonded_proxies.asu.size())
    print("min_distance_nonbonded: %.2f" % flex.min(
      geometry_restraints.nonbonded_deltas(
        sites_cart=sites_cart,
        sorted_asu_proxies=pair_proxies.nonbonded_proxies)))
  s = StringIO()
  pair_proxies.bond_proxies.show_histogram_of_model_distances(
    sites_cart=sites_cart,
    f=s,
    prefix="[]")
  assert s.getvalue().splitlines()[0] == "[]Histogram of bond lengths:"
  assert s.getvalue().splitlines()[5].startswith("[]      1.80 -     1.80:")
  s = StringIO()
  pair_proxies.bond_proxies.show_histogram_of_deltas(
    sites_cart=sites_cart,
    f=s,
    prefix="][")
  assert s.getvalue().splitlines()[0] == "][Histogram of bond deltas:"
  assert s.getvalue().splitlines()[5].startswith("][     0.000 -    0.000:")
  s = StringIO()
  pair_proxies.bond_proxies.show_sorted(
    by_value="residual",
    sites_cart=sites_cart,
    max_items=3,
    f=s,
    prefix=":;")
  l = s.getvalue().splitlines()
  assert l[0] == ":;Bond restraints: 18"
  assert l[1] == ":;Sorted by residual:"
  assert l[2].startswith(":;bond ")
  assert l[3].startswith(":;     ")
  assert l[4] == ":;  ideal  model  delta    sigma   weight residual"
  for i in [5,-2]:
    assert l[i].startswith(":;  1.800  1.800 ")
  assert l[-1] == ":;... (remaining 15 not shown)"
  s = StringIO()
  pair_proxies.nonbonded_proxies.show_histogram_of_model_distances(
    sites_cart=sites_cart,
    f=s,
    prefix="]^")
  assert not show_diff(s.getvalue(), """\
]^Histogram of nonbonded interaction distances:
]^      2.16 -     3.03: 3
]^      3.03 -     3.89: 12
]^      3.89 -     4.75: 28
]^      4.75 -     5.61: 44
]^      5.61 -     6.48: 54
""")
  s = StringIO()
  pair_proxies.nonbonded_proxies.show_sorted(
    by_value="delta",
    sites_cart=sites_cart,
    max_items=7,
    f=s,
    prefix=">,")
  assert not show_diff(s.getvalue(), """\
>,Nonbonded interactions: 141
>,Sorted by model distance:
>,nonbonded 15
>,          15
>,   model   vdw sym.op.
>,   2.164 3.600 -x+2,-y+1,z
...
>,nonbonded 4
>,          8
>,   model   vdw
>,   3.414 3.600
>,... (remaining 134 not shown)
""",
    selections=[range(6), range(-5,0)])
  vdw_1_sticks = []
  vdw_2_sticks = []
  for proxy in pair_proxies.nonbonded_proxies.simple:
    if (proxy.vdw_distance == default_vdw_distance):
      vdw_1_sticks.append(pml_stick(
        begin=sites_cart[proxy.i_seqs[0]],
        end=sites_cart[proxy.i_seqs[1]]))
    else:
      vdw_2_sticks.append(pml_stick(
        begin=sites_cart[proxy.i_seqs[0]],
        end=sites_cart[proxy.i_seqs[1]]))
  mps = asu_mappings.mappings()
  for proxy in pair_proxies.nonbonded_proxies.asu:
    if (proxy.vdw_distance == default_vdw_distance):
      vdw_1_sticks.append(pml_stick(
        begin=mps[proxy.i_seq][0].mapped_site(),
        end=mps[proxy.j_seq][proxy.j_sym].mapped_site()))
    else:
      vdw_2_sticks.append(pml_stick(
        begin=mps[proxy.i_seq][0].mapped_site(),
        end=mps[proxy.j_seq][proxy.j_sym].mapped_site()))
  if (0 or verbose):
    pml_write(f=open("vdw_1.pml", "w"), label="vdw_1", sticks=vdw_1_sticks)
    pml_write(f=open("vdw_2.pml", "w"), label="vdw_2", sticks=vdw_2_sticks)
  #
  i_pdb = count(2)
  for use_crystal_symmetry in [False, True]:
    if (not use_crystal_symmetry):
      crystal_symmetry = None
      site_symmetry_table = None
    else:
      crystal_symmetry = xray_structure
      site_symmetry_table = xray_structure.site_symmetry_table()
    for sites_cart in [sites_cart_manual.deep_copy(),
                       sites_cart_minimized_1.deep_copy()]:
      manager = geometry_restraints.manager.manager(
        crystal_symmetry=crystal_symmetry,
        site_symmetry_table=site_symmetry_table,
        nonbonded_params=nonbonded_params,
        nonbonded_types=atom_energy_types,
        nonbonded_function=geometry_restraints.prolsq_repulsion_function(),
        bond_params_table=bond_params_table,
        shell_sym_tables=shell_sym_tables,
        nonbonded_distance_cutoff=nonbonded_cutoff,
        nonbonded_buffer=1,
        angle_proxies=angle_proxies,
        plain_pairs_radius=5)
      manager = manager.select(selection=flex.bool(sites_cart.size(), True))
      manager = manager.select(
        iselection=flex.size_t_range(stop=sites_cart.size()))
      pair_proxies = manager.pair_proxies(sites_cart=sites_cart)
      minimized = geometry_restraints.lbfgs.lbfgs(
        sites_cart=sites_cart,
        geometry_restraints_manager=manager,
        lbfgs_termination_params=scitbx.lbfgs.termination_parameters(
          max_iterations=1000))
      if (0 or verbose):
        minimized.final_target_result.show()
        print("number of function evaluations:", minimized.minimizer.nfun())
        print("n_updates_pair_proxies:", manager.n_updates_pair_proxies)
      if (not use_crystal_symmetry):
        assert minimized.final_target_result.bond_residual_sum < 1.e-3
        assert minimized.final_target_result.nonbonded_residual_sum < 0.1
      else:
        assert minimized.final_target_result.bond_residual_sum < 1.e-2
        assert minimized.final_target_result.nonbonded_residual_sum < 0.1
      assert minimized.final_target_result.angle_residual_sum < 1.e-3
      if (0 or verbose):
        pdb_file_name = "minimized_%d.pdb" % next(i_pdb)
        print("Writing file:", pdb_file_name)
        dump_pdb(file_name=pdb_file_name, sites_cart=sites_cart)
      if (manager.site_symmetry_table is None):
        additional_site_symmetry_table = None
      else:
        additional_site_symmetry_table = sgtbx.site_symmetry_table()
      assert manager.new_including_isolated_sites(
        n_additional_sites=0,
        site_symmetry_table=additional_site_symmetry_table,
        nonbonded_types=flex.std_string()).plain_pairs_radius \
          == manager.plain_pairs_radius
      if (crystal_symmetry is not None):
        assert len(manager.plain_pair_sym_table) == 16
        if (0 or verbose):
          manager.plain_pair_sym_table.show()
  #
  xray_structure.set_u_iso(values=flex.double([
    0.77599982480241358, 0.38745781137212021, 0.20667558236418682,
    0.99759840171302094, 0.8917287406687805, 0.64780251325379845,
    0.24878590382983534, 0.59480621182194615, 0.58695637792905142,
    0.33997130213653637, 0.51258699130743735, 0.79760289141276675,
    0.39996577657875021, 0.4329328819341467, 0.70422156561726479,
    0.87260110626999332]))
  class parameters: pass
  parameters.sphere_radius = 5
  parameters.distance_power = 0.7
  parameters.average_power = 0.9
  parameters.wilson_b_weight = 1.3952
  parameters.wilson_b_weight_auto = False
  adp_energies = adp_restraints.energies_iso(
    plain_pair_sym_table=manager.plain_pair_sym_table,
    xray_structure=xray_structure,
    parameters=parameters,
    wilson_b=None,
    use_hd=False,
    use_u_local_only = False,
    compute_gradients=False,
    gradients=None,
    normalization=False,
    collect=True)
  assert adp_energies.number_of_restraints == 69
  assert approx_equal(adp_energies.residual_sum, 6.24865382467)
  assert adp_energies.gradients is None
  assert adp_energies.u_i.size() == adp_energies.number_of_restraints
  assert adp_energies.u_j.size() == adp_energies.number_of_restraints
  assert adp_energies.r_ij.size() == adp_energies.number_of_restraints
  for wilson_b in [None, 10, 100]:
    finite_difference_gradients = flex.double()
    eps = 1.e-6
    for i_scatterer in range(xray_structure.scatterers().size()):
      rs = []
      for signed_eps in [eps, -eps]:
        xray_structure_eps = xray_structure.deep_copy_scatterers()
        xray_structure_eps.scatterers()[i_scatterer].u_iso += signed_eps
        adp_energies = adp_restraints.energies_iso(
          plain_pair_sym_table=manager.plain_pair_sym_table,
          xray_structure=xray_structure_eps,
          parameters=parameters,
          wilson_b=wilson_b,
          use_u_local_only = False,
          use_hd=False,
          compute_gradients=True,
          gradients=None,
          normalization=False,
          collect=False)
        rs.append(adp_energies.residual_sum)
        assert adp_energies.gradients.size() \
            == xray_structure.scatterers().size()
        assert adp_energies.u_i == None
        assert adp_energies.u_j == None
        assert adp_energies.r_ij == None
      finite_difference_gradients.append((rs[0]-rs[1])/(2*eps))
    sel = flex.bool(xray_structure.scatterers().size(), True)
    xray_structure.scatterers().flags_set_grad_u_iso(sel.iselection())
    adp_energies = adp_restraints.energies_iso(
      plain_pair_sym_table=manager.plain_pair_sym_table,
      xray_structure=xray_structure,
      parameters=parameters,
      wilson_b=wilson_b,
      use_u_local_only = False,
      use_hd=False,
      compute_gradients=True,
      gradients=None,
      normalization=False,
      collect=False)
    assert approx_equal(adp_energies.gradients, finite_difference_gradients)
  print("OK")
示例#56
0
文件: tst.py 项目: dials/cctbx
def exercise_with_tst_input_map(use_mrcfile=None, file_name=None):
    if not file_name:
        file_name = libtbx.env.under_dist(module_name="iotbx",
                                          path="ccp4_map/tst_input.map")

    print("\nTesting read of input map with axis order 3 1 2 "+\
           "\n and use_mrcfile=",use_mrcfile)
    if use_mrcfile:
        m = mrcfile.map_reader(file_name=file_name, verbose=True)
        for label in m.labels:
            print(label)
    else:
        m = iotbx.ccp4_map.map_reader(file_name=file_name)
    assert approx_equal(m.header_min, -0.422722190619)
    assert approx_equal(m.header_max, 0.335603952408)
    assert approx_equal(m.header_mean, 0)
    assert approx_equal(m.header_rms, 0.140116646886)
    assert m.unit_cell_grid == (16, 8, 16)
    assert approx_equal(m.unit_cell().parameters(),
                        (82.095001220703125, 37.453998565673828,
                         69.636001586914062, 90.0, 101.47599792480469, 90.0))
    assert m.unit_cell_crystal_symmetry().space_group_number() == 5
    assert m.map_data().origin() == (0, 0, 0)
    assert m.map_data().all() == (16, 8, 16)
    assert approx_equal(
        m.pixel_sizes(),
        (5.130937576293945, 4.6817498207092285, 4.352250099182129))
    assert not m.map_data().is_padded()
    out = StringIO()
    m.show_summary(out=out)
    assert ("map cell grid: (16, 8, 16)" in out.getvalue())
    uc = m.unit_cell_crystal_symmetry().unit_cell()
    assert approx_equal(m.unit_cell().parameters(), m.unit_cell().parameters())
    assert approx_equal(m.grid_unit_cell().parameters(),
                        (5.13094, 4.68175, 4.35225, 90, 101.476, 90))

    # Read and write map with offset

    print("\nReading and writing map with origin not at zero, use_mrcfile=",
          use_mrcfile)

    from scitbx.array_family.flex import grid
    from scitbx.array_family import flex
    from cctbx import uctbx, sgtbx

    real_map = m.map_data().as_double()
    grid_start = (5, 5, 5)
    grid_end = (9, 10, 11)

    if use_mrcfile:
        iotbx.mrcfile.write_ccp4_map(
            file_name="shifted_map.mrc",
            unit_cell=uctbx.unit_cell(m.unit_cell().parameters()),
            space_group=sgtbx.space_group_info("P1").group(),
            gridding_first=grid_start,
            gridding_last=grid_end,
            map_data=real_map,
            labels=flex.std_string(["iotbx.ccp4_map.tst"]),
            verbose=True)
        m = mrcfile.map_reader(file_name='shifted_map.mrc', verbose=True)

    else:
        iotbx.ccp4_map.write_ccp4_map(
            file_name="shifted_map.ccp4",
            unit_cell=uctbx.unit_cell(m.unit_cell().parameters()),
            space_group=sgtbx.space_group_info("P1").group(),
            gridding_first=grid_start,
            gridding_last=grid_end,
            map_data=real_map,
            labels=flex.std_string(["iotbx.ccp4_map.tst"]))
        m = iotbx.ccp4_map.map_reader(file_name='shifted_map.ccp4')

    print(m.map_data().origin(), m.map_data().all())
    print("GRID:", m.unit_cell_grid)
    assert m.unit_cell_grid == (16, 8, 16)
    assert approx_equal(m.unit_cell().parameters(),
                        (82.095001220703125, 37.453998565673828,
                         69.636001586914062, 90.0, 101.47599792480469, 90.0))
    assert m.unit_cell_crystal_symmetry().space_group_number() == 1
    print(m.map_data().origin(), m.map_data().all())
    assert m.map_data().origin() == (5, 5, 5)
    assert m.map_data().all() == (5, 6, 7)
def exercise_na_restraints_output_to_geo(verbose=False):
    for dependency in ("chem_data", "ksdssp"):
        if not libtbx.env.has_module(dependency):
            print(
                "Skipping exercise_na_restraints_output_to_geo(): %s not available"
                % (dependency))
            return
    pdb_str_1dpl_cutted = """\
CRYST1   24.627   42.717   46.906  90.00  90.00  90.00 P 21 21 21    8
ATOM    184  P    DG A   9       9.587  13.026  19.037  1.00  6.28           P
ATOM    185  OP1  DG A   9       9.944  14.347  19.602  1.00  8.07           O
ATOM    186  OP2  DG A   9      10.654  12.085  18.639  1.00  8.27           O
ATOM    187  O5'  DG A   9       8.717  12.191  20.048  1.00  5.88           O
ATOM    188  C5'  DG A   9       7.723  12.833  20.854  1.00  5.45           C
ATOM    189  C4'  DG A   9       7.145  11.818  21.807  1.00  5.40           C
ATOM    190  O4'  DG A   9       6.435  10.777  21.087  1.00  5.77           O
ATOM    191  C3'  DG A   9       8.142  11.036  22.648  1.00  5.10           C
ATOM    192  O3'  DG A   9       8.612  11.838  23.723  1.00  5.90           O
ATOM    193  C2'  DG A   9       7.300   9.857  23.068  1.00  5.97           C
ATOM    194  C1'  DG A   9       6.619   9.536  21.805  1.00  5.97           C
ATOM    195  N9   DG A   9       7.390   8.643  20.931  1.00  5.97           N
ATOM    196  C8   DG A   9       8.074   8.881  19.775  1.00  6.62           C
ATOM    197  N7   DG A   9       8.647   7.820  19.249  1.00  6.57           N
ATOM    198  C5   DG A   9       8.308   6.806  20.141  1.00  6.22           C
ATOM    199  C6   DG A   9       8.620   5.431  20.136  1.00  6.03           C
ATOM    200  O6   DG A   9       9.297   4.803  19.296  1.00  7.21           O
ATOM    201  N1   DG A   9       8.101   4.773  21.247  1.00  6.10           N
ATOM    202  C2   DG A   9       7.365   5.351  22.260  1.00  6.24           C
ATOM    203  N2   DG A   9       6.948   4.569  23.241  1.00  7.88           N
ATOM    204  N3   DG A   9       7.051   6.652  22.257  1.00  6.53           N
ATOM    205  C4   DG A   9       7.539   7.295  21.184  1.00  5.69           C
ATOM    206  P    DC A  10      10.081  11.538  24.300  1.00  5.91           P
ATOM    207  OP1  DC A  10      10.273  12.645  25.291  1.00  7.27           O
ATOM    208  OP2  DC A  10      11.063  11.363  23.228  1.00  6.84           O
ATOM    209  O5'  DC A  10       9.953  10.128  25.026  1.00  5.75           O
ATOM    210  C5'  DC A  10       9.077   9.959  26.149  1.00  5.87           C
ATOM    211  C4'  DC A  10       9.188   8.549  26.672  1.00  5.56           C
ATOM    212  O4'  DC A  10       8.708   7.612  25.667  1.00  5.70           O
ATOM    213  C3'  DC A  10      10.580   8.059  27.007  1.00  5.27           C
ATOM    214  O3'  DC A  10      11.010   8.447  28.315  1.00  5.83           O
ATOM    215  C2'  DC A  10      10.422   6.549  26.893  1.00  5.34           C
ATOM    216  C1'  DC A  10       9.436   6.405  25.754  1.00  5.23           C
ATOM    217  N1   DC A  10      10.113   6.168  24.448  1.00  5.30           N
ATOM    218  C2   DC A  10      10.514   4.871  24.152  1.00  5.28           C
ATOM    219  O2   DC A  10      10.283   3.972  25.000  1.00  5.75           O
ATOM    220  N3   DC A  10      11.131   4.627  22.965  1.00  5.65           N
ATOM    221  C4   DC A  10      11.395   5.628  22.138  1.00  5.80           C
ATOM    222  N4   DC A  10      12.034   5.327  21.005  1.00  6.75           N
ATOM    223  C5   DC A  10      11.029   6.970  22.449  1.00  5.99           C
ATOM    224  C6   DC A  10      10.394   7.203  23.612  1.00  5.56           C
ATOM    226  O5'  DG B  11      12.424  -4.393  18.427  1.00 22.70           O
ATOM    227  C5'  DG B  11      12.380  -5.516  19.282  1.00 14.75           C
ATOM    228  C4'  DG B  11      11.969  -5.112  20.676  1.00 10.42           C
ATOM    229  O4'  DG B  11      12.972  -4.192  21.210  1.00 10.51           O
ATOM    230  C3'  DG B  11      10.649  -4.394  20.782  1.00  8.57           C
ATOM    231  O3'  DG B  11       9.618  -5.363  20.846  1.00  8.69           O
ATOM    232  C2'  DG B  11      10.822  -3.597  22.051  1.00  8.63           C
ATOM    233  C1'  DG B  11      12.236  -3.233  21.980  1.00  9.81           C
ATOM    234  N9   DG B  11      12.509  -1.902  21.305  1.00  8.66           N
ATOM    235  C8   DG B  11      13.175  -1.667  20.135  1.00  9.57           C
ATOM    236  N7   DG B  11      13.255  -0.407  19.824  1.00  9.04           N
ATOM    237  C5   DG B  11      12.613   0.235  20.869  1.00  7.63           C
ATOM    238  C6   DG B  11      12.388   1.612  21.119  1.00  7.05           C
ATOM    239  O6   DG B  11      12.723   2.590  20.419  1.00  7.81           O
ATOM    240  N1   DG B  11      11.715   1.819  22.317  1.00  6.27           N
ATOM    241  C2   DG B  11      11.264   0.828  23.159  1.00  6.05           C
ATOM    242  N2   DG B  11      10.611   1.219  24.248  1.00  5.85           N
ATOM    243  N3   DG B  11      11.483  -0.457  22.942  1.00  6.55           N
ATOM    244  C4   DG B  11      12.150  -0.687  21.797  1.00  6.84           C
ATOM    245  P    DC B  12       8.134  -5.009  20.350  1.00  8.13           P
ATOM    246  OP1  DC B  12       7.367  -6.252  20.459  1.00 10.02           O
ATOM    247  OP2  DC B  12       8.172  -4.307  19.052  1.00  9.79           O
ATOM    248  O5'  DC B  12       7.564  -3.912  21.389  1.00  8.18           O
ATOM    249  C5'  DC B  12       7.275  -4.296  22.719  1.00  8.00           C
ATOM    250  C4'  DC B  12       6.856  -3.057  23.487  1.00  8.01           C
ATOM    251  O4'  DC B  12       8.006  -2.146  23.615  1.00  7.35           O
ATOM    252  C3'  DC B  12       5.763  -2.208  22.890  1.00  7.04           C
ATOM    253  O3'  DC B  12       4.456  -2.800  23.100  1.00  9.82           O
ATOM    254  C2'  DC B  12       6.019  -0.916  23.630  1.00  6.50           C
ATOM    255  C1'  DC B  12       7.467  -0.808  23.608  1.00  7.35           C
ATOM    256  N1   DC B  12       8.040  -0.143  22.396  1.00  6.64           N
ATOM    257  C2   DC B  12       8.017   1.257  22.382  1.00  5.68           C
ATOM    258  O2   DC B  12       7.524   1.832  23.357  1.00  6.32           O
ATOM    259  N3   DC B  12       8.543   1.930  21.312  1.00  6.18           N
ATOM    260  C4   DC B  12       9.009   1.236  20.266  1.00  6.48           C
ATOM    261  N4   DC B  12       9.518   1.926  19.243  1.00  7.43           N
ATOM    262  C5   DC B  12       9.012  -0.198  20.248  1.00  6.83           C
ATOM    263  C6   DC B  12       8.502  -0.825  21.311  1.00  6.80           C
  """
    identical_portions = [
        """\
  Histogram of bond lengths:
        1.23 -     1.31: 5
        1.31 -     1.39: 25
        1.39 -     1.46: 27
        1.46 -     1.54: 25
        1.54 -     1.61: 5
  Bond restraints: 87""",
        '''\
  Histogram of bond angle deviations from ideal:
       99.49 -   105.87: 23
      105.87 -   112.26: 36
      112.26 -   118.65: 28
      118.65 -   125.04: 30
      125.04 -   131.42: 13
  Bond angle restraints: 130''',
    ]
    open("tst_cctbx_geometry_restraints_2_na.pdb",
         "w").write(pdb_str_1dpl_cutted)
    out1 = StringIO()
    out2 = StringIO()
    from mmtbx.monomer_library.server import MonomerLibraryServerError
    try:
        processed_pdb_file = pdb_interpretation.run(
            args=["tst_cctbx_geometry_restraints_2_na.pdb"],
            strict_conflict_handling=False,
            log=out1)
    except MonomerLibraryServerError:
        print(
            "Skipping exercise_na_restraints_output_to_geo(): Encountered MonomerLibraryServerError.\n"
        )
        print(
            "Is the CCP4 monomer library installed and made available through environment variables MMTBX_CCP4_MONOMER_LIB or CLIBD_MON?"
        )
        return
    geo1 = processed_pdb_file.geometry_restraints_manager()
    hbp = geo1.get_n_hbond_proxies()
    from mmtbx import monomer_library
    params = monomer_library.pdb_interpretation.master_params.extract()
    params.secondary_structure.enabled = True
    processed_pdb_file = pdb_interpretation.run(
        args=["tst_cctbx_geometry_restraints_2_na.pdb"],
        params=params,
        strict_conflict_handling=False,
        log=out2)
    geo2 = processed_pdb_file.geometry_restraints_manager()
    hbp = geo2.get_n_hbond_proxies()
    v_out1 = out1.getvalue()
    v_out2 = out2.getvalue()
    assert v_out2.find("""\
    Restraints generated for nucleic acids:
      6 hydrogen bonds
      12 hydrogen bond angles
      0 basepair planarities
      2 basepair parallelities
      2 stacking parallelities""") > 0
    for v in [v_out1, v_out2]:
        for portion in identical_portions:
            if not v.find(portion) > 0:
                print("This portion was not found:\n%s\n=====End of portion." %
                      portion)
                assert 0, "the portion above does not match expected portion."
    # check .geo output
    geo_identical_portions = [
        "Bond restraints: 87", "Bond angle restraints: 130",
        "Dihedral angle restraints: 33", "Chirality restraints: 15",
        "Planarity restraints: 4"
    ]
    ss_geo_portions = [
        "Bond-like restraints: 6",
        'Secondary Structure restraints around h-bond angle restraints: 12',
        "Parallelity restraints: 4", "Nonbonded interactions: 504"
    ]
    non_ss_geo_portions = [
        #"Bond-like restraints: 0",
        #'Secondary Structure restraints around h-bond angle restraints: 0',
        "Parallelity restraints: 0",
        "Nonbonded interactions: 526"
    ]
    acp = processed_pdb_file.all_chain_proxies
    sites_cart = acp.sites_cart_exact()
    site_labels = [atom.id_str() for atom in acp.pdb_atoms]
    geo_out1 = StringIO()
    geo_out2 = StringIO()
    geo1.show_sorted(sites_cart=sites_cart,
                     site_labels=site_labels,
                     f=geo_out1)
    geo2.show_sorted(sites_cart=sites_cart,
                     site_labels=site_labels,
                     f=geo_out2)
    v_geo_out_noss = geo_out1.getvalue()
    v_geo_out_ss = geo_out2.getvalue()
    for portion in geo_identical_portions + ss_geo_portions:
        assert v_geo_out_ss.find(portion) >= 0
    for portion in geo_identical_portions + non_ss_geo_portions:
        assert v_geo_out_noss.find(portion) >= 0
示例#58
0
def exercise():
    from six.moves import cStringIO as StringIO
    assert approx_equal(1, 1)
    out = StringIO()
    assert not approx_equal(1, 0, out=out)
    assert not show_diff(
        out.getvalue().replace("1e-006", "1e-06"), """\
approx_equal eps: 1e-06
approx_equal multiplier: 10000000000.0
1 approx_equal ERROR
0 approx_equal ERROR

""")
    out = StringIO()
    assert not approx_equal(1, 2, out=out)
    assert not show_diff(
        out.getvalue().replace("1e-006", "1e-06"), """\
approx_equal eps: 1e-06
approx_equal multiplier: 10000000000.0
1 approx_equal ERROR
2 approx_equal ERROR

""")
    out = StringIO()
    assert not approx_equal(1, 1 + 1.e-5, out=out)
    assert approx_equal(1, 1 + 1.e-6)
    out = StringIO()
    assert not approx_equal(0, 1.e-5, out=out)
    assert approx_equal(0, 1.e-6)
    out = StringIO()
    assert not approx_equal(
        [[0, 1], [2j, 3]], [[0, 1], [complex(0, -2), 3]], out=out, prefix="$%")
    assert not show_diff(
        out.getvalue().replace("1e-006", "1e-06"), """\
$%approx_equal eps: 1e-06
$%approx_equal multiplier: 10000000000.0
$%    0
$%    0
$%
$%    1
$%    1
$%
$%    real 0.0
$%    real 0.0
$%    real
$%    imag 2.0 approx_equal ERROR
$%    imag -2.0 approx_equal ERROR
$%    imag
$%    3
$%    3
$%
""")
    assert eps_eq(1, 1)
    out = StringIO()
    assert not eps_eq(1, 0, out=out)
    assert not show_diff(
        out.getvalue().replace("1e-006", "1e-06"), """\
eps_eq eps: 1e-06
1 eps_eq ERROR
0 eps_eq ERROR

""")
    out = StringIO()
    assert not eps_eq(1, 2, out=out)
    assert not show_diff(
        out.getvalue().replace("1e-006", "1e-06"), """\
eps_eq eps: 1e-06
1 eps_eq ERROR
2 eps_eq ERROR

""")
    out = StringIO()
    assert not eps_eq(1, 1 + 1.e-5, out=out)
    assert eps_eq(1, 1 + 1.e-6)
    out = StringIO()
    assert not eps_eq(0, 1.e-5, out=out)
    assert eps_eq(0, 1.e-6)
    out = StringIO()
    assert not eps_eq(
        [[0, 1], [2j, 3]], [[0, 1], [complex(0, -2), 3]], out=out, prefix="$%")
    assert not show_diff(
        out.getvalue().replace("1e-006", "1e-06"), """\
$%eps_eq eps: 1e-06
$%    0
$%    0
$%
$%    1
$%    1
$%
$%    real 0.0
$%    real 0.0
$%    real
$%    imag 2.0 eps_eq ERROR
$%    imag -2.0 eps_eq ERROR
$%    imag
$%    3
$%    3
$%
""")
    assert is_below_limit(value=5, limit=10, eps=2)
    out = StringIO()
    assert is_below_limit(value=5, limit=10, eps=2, info_low_eps=1, out=out)
    assert not show_diff(out.getvalue(),
                         """\
INFO LOW VALUE: is_below_limit(value=5, limit=10, info_low_eps=1)
""",
                         selections=[[-1]],
                         expected_number_of_lines=3)
    out = StringIO()
    assert not is_below_limit(value=15, limit=10, eps=2, out=out)
    assert not show_diff(
        out.getvalue(), """\
ERROR: is_below_limit(value=15, limit=10, eps=2)
""")
    out = StringIO()
    assert not is_below_limit(value=None, limit=3, eps=1, out=out)
    assert not is_below_limit(value=None, limit=-3, eps=1, out=out)
    assert not show_diff(
        out.getvalue(), """\
ERROR: is_below_limit(value=None, limit=3, eps=1)
ERROR: is_below_limit(value=None, limit=-3, eps=1)
""")
    assert is_above_limit(value=10, limit=5, eps=2)
    out = StringIO()
    assert is_above_limit(value=10, limit=5, eps=2, info_high_eps=1, out=out)
    assert not show_diff(out.getvalue(),
                         """\
INFO HIGH VALUE: is_above_limit(value=10, limit=5, info_high_eps=1)
""",
                         selections=[[-1]],
                         expected_number_of_lines=3)
    out = StringIO()
    assert not is_above_limit(value=10, limit=15, eps=2, out=out)
    assert not show_diff(
        out.getvalue(), """\
ERROR: is_above_limit(value=10, limit=15, eps=2)
""")
    out = StringIO()
    assert not is_above_limit(value=None, limit=-3, eps=1, out=out)
    assert not is_above_limit(value=None, limit=3, eps=1, out=out)
    assert not show_diff(
        out.getvalue(), """\
ERROR: is_above_limit(value=None, limit=-3, eps=1)
ERROR: is_above_limit(value=None, limit=3, eps=1)
""")
    #
    import pickle
    from six.moves import cPickle
    for p in [pickle, cPickle]:
        d = pickle_detector()
        assert d.unpickled_counter is None
        assert d.counter == 0
        s = p.dumps(d, 1)
        assert d.unpickled_counter is None
        assert d.counter == 1
        l = p.loads(s)
        assert l.unpickled_counter == 1
        assert l.counter == 0
        p.dumps(d, 1)
        assert d.counter == 2
        assert l.counter == 0
        p.dumps(l, 1)
        assert l.counter == 1
        s = p.dumps(l, 1)
        assert l.counter == 2
        k = p.loads(s)
        assert k.unpickled_counter == 2
        assert k.counter == 0
    #
    assert precision_approx_equal(0.799999, 0.800004, precision=17) == True
    assert precision_approx_equal(0.799999, 0.800004, precision=18) == False
    print("OK")
def run(args, command_name="phenix.fobs_minus_fobs_map", log=None):
    if (len(args) == 0): args = ["--help"]
    examples = """J BAXTER EDITED 2020: Examples:

phenix.fobs_minus_fobs_map f_obs_1_file=data1.mtz f_obs_2_file=data2.sca \
f_obs_1_label=FOBS1 f_obs_2_label=FOBS2 model.pdb

phenix.fobs_minus_fobs_map f_obs_1_file=data.mtz f_obs_2_file=data.mtz \
f_obs_1_label=FOBS1 f_obs_2_label=FOBS2 phase_source=model.pdb \
high_res=2.0 sigma_cutoff=2 scattering_table=neutron"""

    command_line = (iotbx_option_parser(
        usage="%s [options]" % command_name,
        description=examples).option("--silent",
                                     action="store_true",
                                     help="Suppress output to the screen.").
                    enable_symmetry_comprehensive()).process(args=args)
    #
    if (log is None):
        log = sys.stdout
    if (not command_line.options.silent):
        utils.print_header("phenix.fobs_minus_fobs_map", out=log)
        print("Command line arguments: ", file=log)
        print(args, file=log)
        print(file=log)
    #
    processed_args = utils.process_command_line_args(
        args=command_line.args,
        cmd_cs=command_line.symmetry,
        master_params=fo_minus_fo_master_params(),
        absolute_angle_tolerance=5,
        absolute_length_tolerance=1,
        log=log,
        suppress_symmetry_related_errors=True)
    working_phil = processed_args.params
    if (not command_line.options.silent):
        print("*** Parameters:", file=log)
        working_phil.show(out=log)
        print(file=log)
    params = working_phil.extract()
    consensus_symmetry = None
    if (params.ignore_non_isomorphous_unit_cells):
        if (None in [
                params.f_obs_1_file_name, params.f_obs_2_file_name,
                params.phase_source
        ]):
            raise Sorry(
                "The file parameters (f_obs_1_file_name, f_obs_2_file_name, " +
                "phase_source) must be specified explicitly when " +
                "ignore_non_isomorphous_unit_cells=True.")
        symm_manager = iotbx.symmetry.manager()
        pdb_in = iotbx.file_reader.any_file(params.phase_source,
                                            force_type="pdb")
        symm_manager.process_pdb_file(pdb_in)
        hkl_in_1 = iotbx.file_reader.any_file(params.f_obs_1_file_name,
                                              force_type="hkl")
        sg_err_1, uc_err_1 = symm_manager.process_reflections_file(hkl_in_1)
        hkl_in_2 = iotbx.file_reader.any_file(params.f_obs_2_file_name,
                                              force_type="hkl")
        sg_err_2, uc_err_2 = symm_manager.process_reflections_file(hkl_in_2)
        out = StringIO()
        symm_manager.show(out=out)
        if (sg_err_1) or (sg_err_2):
            raise Sorry((
                "Incompatible space groups in input files:\n%s\nAll files " +
                "must have the same point group (and ideally the same space group). "
                +
                "Please note that any symmetry information in the PDB file will be "
                + "used first.") % out.getvalue())
        elif (uc_err_1) or (uc_err_2):
            libtbx.call_back(
                message="warn",
                data=
                ("Crystal symmetry mismatch:\n%s\nCalculations will continue "
                 +
                 "using the symmetry in the PDB file (or if not available, the "
                 +
                 "first reflection file), but the maps should be treated with "
                 + "extreme suspicion.") % out.getvalue())
        crystal_symmetry = symm_manager.as_symmetry_object()
    else:
        processed_args = utils.process_command_line_args(
            args=command_line.args,
            cmd_cs=command_line.symmetry,
            master_params=fo_minus_fo_master_params(),
            suppress_symmetry_related_errors=False,
            absolute_angle_tolerance=5,
            absolute_length_tolerance=1,
            log=StringIO())
        crystal_symmetry = processed_args.crystal_symmetry
    #
    pdb_file_names = processed_args.pdb_file_names
    if (len(processed_args.pdb_file_names) == 0):
        if (params.phase_source is not None):
            pdb_file_names = [params.phase_source]
        else:
            raise Sorry("No PDB file found.")
    # Extaract Fobs1, Fobs2
    f_obss = []
    if (len(processed_args.reflection_files) == 2):
        for reflection_file in processed_args.reflection_files:
            reflection_file_server = reflection_file_utils.reflection_file_server(
                crystal_symmetry=crystal_symmetry,
                force_symmetry=True,
                reflection_files=[reflection_file],
                err=null_out())
            # XXX UGLY !!!
            try:
                parameters = utils.data_and_flags_master_params().extract()
                if (params.f_obs_1_label is not None):
                    parameters.labels = [params.f_obs_1_label]
                determine_data_and_flags_result = utils.determine_data_and_flags(
                    reflection_file_server=reflection_file_server,
                    keep_going=True,
                    parameters=parameters,
                    log=null_out())
            except:  # intentional
                parameters = utils.data_and_flags_master_params().extract()
                if (params.f_obs_2_label is not None):
                    parameters.labels = [params.f_obs_2_label]
                determine_data_and_flags_result = utils.determine_data_and_flags(
                    reflection_file_server=reflection_file_server,
                    keep_going=True,
                    parameters=parameters,
                    log=null_out())
            f_obss.append(determine_data_and_flags_result.f_obs)
    else:
        if ([params.f_obs_1_file_name,
             params.f_obs_2_file_name].count(None) == 2):
            raise Sorry("No reflection data file found.")
        for file_name, label in zip(
            [params.f_obs_1_file_name, params.f_obs_2_file_name],
            [params.f_obs_1_label, params.f_obs_2_label]):
            reflection_file = reflection_file_reader.any_reflection_file(
                file_name=file_name, ensure_read_access=False)
            reflection_file_server = reflection_file_utils.reflection_file_server(
                crystal_symmetry=crystal_symmetry,
                force_symmetry=True,
                reflection_files=[reflection_file],
                err=null_out())
            parameters = utils.data_and_flags_master_params().extract()
            if (label is not None):
                parameters.labels = [label]
            determine_data_and_flags_result = utils.determine_data_and_flags(
                reflection_file_server=reflection_file_server,
                parameters=parameters,
                keep_going=True,
                log=null_out())
            f_obss.append(determine_data_and_flags_result.f_obs)
    if (len(f_obss) != 2):
        raise Sorry(" ".join(errors))
    if (not command_line.options.silent):
        for ifobs, fobs in enumerate(f_obss):
            print("*** Summary for data set %d:" % ifobs, file=log)
            fobs.show_comprehensive_summary(f=log)
            print(file=log)
    pdb_combined = combine_unique_pdb_files(file_names=pdb_file_names)
    pdb_combined.report_non_unique(out=log)
    if (len(pdb_combined.unique_file_names) == 0):
        raise Sorry("No coordinate file given.")
    #
    raw_recs = flex.std_string()
    for rec in pdb_combined.raw_records:
        if (rec.upper().count("CRYST1") == 0):
            raw_recs.append(rec)
    raw_recs.append(
        iotbx.pdb.format_cryst1_record(crystal_symmetry=crystal_symmetry))
    #
    pdb_in = iotbx.pdb.input(source_info=None, lines=raw_recs)
    model = mmtbx.model.manager(model_input=pdb_in)
    d_min = min(f_obss[0].d_min(), f_obss[1].d_min())
    model.setup_scattering_dictionaries(
        scattering_table=params.scattering_table, d_min=d_min)
    xray_structure = model.get_xray_structure()
    hierarchy = model.get_hierarchy()
    #
    omit_sel = flex.bool(hierarchy.atoms_size(), False)
    if (params.advanced.omit_selection is not None):
        print("Will omit selection from phasing model:", file=log)
        print("  " + params.advanced.omit_selection, file=log)
        omit_sel = hierarchy.atom_selection_cache().selection(
            params.advanced.omit_selection)
        print("%d atoms selected for removal" % omit_sel.count(True), file=log)
    del hierarchy
    xray_structure = xray_structure.select(~omit_sel)
    if (not command_line.options.silent):
        print("*** Model summary:", file=log)
        xray_structure.show_summary(f=log)
        print(file=log)
    info0 = f_obss[0].info()
    info1 = f_obss[1].info()
    f_obss[0] = f_obss[0].resolution_filter(
        d_min=params.high_resolution,
        d_max=params.low_resolution).set_info(info0)
    f_obss[1] = f_obss[1].resolution_filter(
        d_min=params.high_resolution,
        d_max=params.low_resolution).set_info(info1)
    if (params.sigma_cutoff is not None):
        for i in [0, 1]:
            if (f_obss[i].sigmas() is not None):
                sel = f_obss[i].data(
                ) > f_obss[i].sigmas() * params.sigma_cutoff
                f_obss[i] = f_obss[i].select(sel).set_info(info0)
    for k, f_obs in enumerate(f_obss):
        if (f_obs.indices().size() == 0):
            raise Sorry(
                "No data left in array %d (labels=%s) after filtering!" %
                (k + 1, f_obs.info().label_string()))
    output_file_name = params.output_file
    if (output_file_name is None) and (params.file_name_prefix is not None):
        output_file_name = "%s_%s.mtz" % (params.file_name_prefix,
                                          params.job_id)
    output_files = compute_fo_minus_fo_map(
        data_arrays=f_obss,
        xray_structure=xray_structure,
        log=log,
        silent=command_line.options.silent,
        output_file=output_file_name,
        peak_search=params.find_peaks_holes,
        map_cutoff=params.map_cutoff,
        peak_search_params=params.peak_search,
        multiscale=params.advanced.multiscale,
        anomalous=params.advanced.anomalous).file_names
    return output_files
示例#60
0
def test_subtest_failure(capfd):
    # Tests that a test fails if a subtest fails

    # Set up the handler.
    output = StringIO()
    logger = structuredlog.StructuredLogger("test_a")
    formatter = ChromiumFormatter()
    logger.add_handler(handlers.StreamHandler(output, formatter))

    # Run a test with some subtest failures.
    logger.suite_start(["t1"], run_info={}, time=123)
    logger.test_start("t1")
    logger.test_status("t1",
                       status="FAIL",
                       subtest="t1_a",
                       message="t1_a_message")
    logger.test_status("t1",
                       status="PASS",
                       subtest="t1_b",
                       message="t1_b_message")
    logger.test_status("t1",
                       status="TIMEOUT",
                       subtest="t1_c",
                       message="t1_c_message")

    # Make sure the test name was added to the set of tests with subtest fails
    assert "t1" in formatter.tests_with_subtest_fails

    # The test status is reported as a pass here because the harness was able to
    # run the test to completion.
    logger.test_end("t1",
                    status="PASS",
                    expected="PASS",
                    message="top_message")
    logger.suite_end()

    # check nothing got output to stdout/stderr
    # (note that mozlog outputs exceptions during handling to stderr!)
    captured = capfd.readouterr()
    assert captured.out == ""
    assert captured.err == ""

    # check the actual output of the formatter
    output.seek(0)
    output_json = json.load(output)

    test_obj = output_json["tests"]["t1"]
    t1_artifacts = test_obj["artifacts"]
    assert t1_artifacts["log"] == [
        "[t1]\n  expected: PASS\n  message: top_message\n",
        "  [t1_a]\n    expected: FAIL\n    message: t1_a_message\n",
        "  [t1_b]\n    expected: PASS\n    message: t1_b_message\n",
        "  [t1_c]\n    expected: TIMEOUT\n    message: t1_c_message\n",
    ]
    assert t1_artifacts["wpt_subtest_failure"] == ["true"]
    # The status of the test in the output is a failure because subtests failed,
    # despite the harness reporting that the test passed. But the harness status
    # is logged as an artifact.
    assert t1_artifacts["wpt_actual_status"] == ["PASS"]
    assert test_obj["actual"] == "FAIL"
    assert test_obj["expected"] == "PASS"
    # Also ensure that the formatter cleaned up its internal state
    assert "t1" not in formatter.tests_with_subtest_fails