예제 #1
0
def processBiGrams(stemList, wordList=None, biGramSet=beerNGrams):
    """ Transforms all BiGrams in word list from pairs of tokens into single tokens.
    :param stemList: the list of tokens
    :type stemList: list of unicode
    :param wordList: the list of words
    :type wordList: list of unicode
    :param biGramSet: the set of biGrams to evaluate against. They are strings, not tuples
    :type biGramSet: set of unicode
    :rtype tuple """
    newSList = []
    newWList = []
    consumed = False
    if wordList is None:
        wordList = stemList
    if isgenerator(stemList):
        stemList = list(stemList)
    if isgenerator(wordList):
        wordList = list(wordList)
    if len(stemList):
        for s1, s2, w1, w2 in izip(stemList[:-1], stemList[1:], wordList[:-1], wordList[1:]):
            if consumed:
                consumed = False
                continue
            if u'{} {}'.format(s1, s2) in biGramSet:
                newSList.append(u'{} {}'.format(s1, s2))
                newWList.append(u'{} {}'.format(w1, w2))
                consumed = True
            else:
                newSList.append(s1)
                newWList.append(w1)
        if not consumed:
            newSList.append(stemList[-1])
            newWList.append(wordList[-1])
    return newSList, newWList
예제 #2
0
파일: xml.py 프로젝트: mahdi-b/spyne
    def gen_members_parent(self, ctx, cls, inst, parent, tag_name, subelts):
        if isinstance(parent, etree._Element):
            elt = etree.SubElement(parent, tag_name)
            elt.extend(subelts)
            ret = self._get_members_etree(ctx, cls, inst, elt)

            if isgenerator(ret):
                try:
                    while True:
                        y = (yield) # may throw Break
                        ret.send(y)

                except Break:
                    try:
                        ret.throw(Break())
                    except StopIteration:
                        pass

        else:
            with parent.element(tag_name):
                for e in subelts:
                    parent.write(e)
                ret = self._get_members_etree(ctx, cls, inst, parent)
                if isgenerator(ret):
                    try:
                        while True:
                            y = (yield)
                            ret.send(y)

                    except Break:
                        try:
                            ret.throw(Break())
                        except StopIteration:
                            pass
예제 #3
0
    def test_get_instances_to_sync(self):
        fake_context = 'fake_context'

        call_info = {'get_all': 0, 'shuffle': 0}

        def random_shuffle(_list):
            call_info['shuffle'] += 1

        @staticmethod
        def instance_get_all_by_filters(context, filters,
                sort_key, sort_dir, limit, marker):
            # Pretend we return a full list the first time otherwise we loop
            # infinitely
            if marker is not None:
                return []
            self.assertEqual(fake_context, context)
            self.assertEqual('deleted', sort_key)
            self.assertEqual('asc', sort_dir)
            call_info['got_filters'] = filters
            call_info['get_all'] += 1
            instances = [fake_instance.fake_db_instance() for i in range(3)]
            return instances

        self.stubs.Set(compute.InstanceList, 'get_by_filters',
                instance_get_all_by_filters)
        self.stubs.Set(random, 'shuffle', random_shuffle)

        instances = cells_utils.get_instances_to_sync(fake_context)
        self.assertTrue(inspect.isgenerator(instances))
        self.assertEqual(3, len([x for x in instances]))
        self.assertEqual(1, call_info['get_all'])
        self.assertEqual({}, call_info['got_filters'])
        self.assertEqual(0, call_info['shuffle'])

        instances = cells_utils.get_instances_to_sync(fake_context,
                                                      shuffle=True)
        self.assertTrue(inspect.isgenerator(instances))
        self.assertEqual(3, len([x for x in instances]))
        self.assertEqual(2, call_info['get_all'])
        self.assertEqual({}, call_info['got_filters'])
        self.assertEqual(1, call_info['shuffle'])

        instances = cells_utils.get_instances_to_sync(fake_context,
                updated_since='fake-updated-since')
        self.assertTrue(inspect.isgenerator(instances))
        self.assertEqual(3, len([x for x in instances]))
        self.assertEqual(3, call_info['get_all'])
        self.assertEqual({'changes-since': 'fake-updated-since'},
                         call_info['got_filters'])
        self.assertEqual(1, call_info['shuffle'])

        instances = cells_utils.get_instances_to_sync(fake_context,
                project_id='fake-project',
                updated_since='fake-updated-since', shuffle=True)
        self.assertTrue(inspect.isgenerator(instances))
        self.assertEqual(3, len([x for x in instances]))
        self.assertEqual(4, call_info['get_all'])
        self.assertEqual({'changes-since': 'fake-updated-since',
                 'project_id': 'fake-project'}, call_info['got_filters'])
        self.assertEqual(2, call_info['shuffle'])
예제 #4
0
파일: to_parent.py 프로젝트: rtindru/spyne
    def array_to_parent(self, ctx, cls, inst, parent, name, **kwargs):
        name = cls.get_type_name()
        if isinstance(inst, PushBase):
            while True:
                sv = (yield)
                print(sv)
                ret = self.to_parent(ctx, cls, sv, parent, name, from_arr=True,
                                                                       **kwargs)
                if isgenerator(ret):
                    try:
                        while True:
                            sv2 = (yield)
                            ret.send(sv2)
                    except Break as e:
                        try:
                            ret.throw(e)
                        except StopIteration:
                            pass

        else:
            for sv in inst:
                ret = self.to_parent(ctx, cls, sv, parent, name, from_arr=True,
                                                                       **kwargs)
                if isgenerator(ret):
                    try:
                        while True:
                            sv2 = (yield)
                            ret.send(sv2)
                    except Break as e:
                        try:
                            ret.throw(e)
                        except StopIteration:
                            pass
예제 #5
0
파일: table.py 프로젝트: jpunwin/spyne
    def complex_model_to_parent(self, ctx, cls, inst, parent, name, 
                                                      tr_child=False, **kwargs):
        attrs = {}
        if tr_child is False:
            with parent.element('tr', attrs):
                ret = self._get_members(ctx, cls, inst, parent, 
                                                    tr_child=True, **kwargs)
                if isgenerator(ret):
                    try:
                        while True:
                            sv2 = (yield)
                            ret.send(sv2)
                    except Break as b:
                        try:
                            ret.throw(b)
                        except StopIteration:
                            pass

        else:
            if self.table_name_attr is not None:
                attrs[self.table_name_attr] = name
            with parent.element('td', attrs):
                ret = self.subserialize(ctx, cls, inst, parent, None, name)
                if isgenerator(ret):
                    while True:
                        sv2 = (yield)
                        ret.send(sv2)
예제 #6
0
    def _gen_table(self, ctx, cls, inst, parent, name, gen_rows, **kwargs):
        logger.debug("Generate table for %r", cls)

        attrib = {}
        if self.table_name_attr is not None:
            attrib[self.table_name_attr] = cls.get_type_name()

        with parent.element('table', attrib, nsmap=NSMAP):
            if self.produce_header:
                self._gen_header(ctx, cls, name, parent)

            with parent.element('tbody'):
                ret = gen_rows(ctx, cls, inst, parent, name, **kwargs)
                if isgenerator(ret):
                    try:
                        while True:
                            sv2 = (yield)
                            ret.send(sv2)
                    except Break as b:
                        try:
                            ret.throw(b)
                        except StopIteration:
                            pass

                ret = self.extend_table(ctx, cls, parent, name, **kwargs)
                if isgenerator(ret):
                    try:
                        while True:
                            sv2 = (yield)
                            ret.send(sv2)
                    except Break as b:
                        try:
                            ret.throw(b)
                        except StopIteration:
                            pass
예제 #7
0
    def wrap_table(self, ctx, cls, inst, parent, name, gen_rows, **kwargs):
        # If this is direct child of an array, table is already set up in
        # array_to_parent.
        if self.label:
            div_attrib = self._gen_label_wrapper_class(ctx, cls, name)
            label = self._gen_label_for(ctx, cls, name)
            with parent.element('div', attrib=div_attrib):
                parent.write(label)
                ret = self._gen_table(ctx, cls, inst, parent, name, gen_rows,
                                                                       **kwargs)
                if isgenerator(ret):
                    try:
                        while True:
                            sv2 = (yield)
                            ret.send(sv2)
                    except Break as b:
                        try:
                            ret.throw(b)
                        except StopIteration:
                            pass
        else:
            ret = self._gen_table(ctx, cls, inst, parent, name, gen_rows,
                                                                       **kwargs)

            if isgenerator(ret):
                try:
                    while True:
                        sv2 = (yield)
                        ret.send(sv2)
                except Break as b:
                    try:
                        ret.throw(b)
                    except StopIteration:
                        pass
예제 #8
0
        def decorated(*args, **kwargs):
            auth = UserInfo(get_config_parameter('IRODSUSER'),
                            get_config_parameter('IRODSPASS'))
            conn = connection_pool.get_connection(auth)
            if conn is None:
                raise NotAuthorizedException('Invalid credentials')

            kwargs.update({'conn': conn.connection})
            try:
                res = f(*args, **kwargs)
            except:
                connection_pool.release_connection(conn)
                raise

            if isgenerator(res):
                #current_app.logger.debug('typical ls() case encountered')
                return wrap_generator(res, connection_pool, conn)
            elif isinstance(res, tuple):
                #current_app.logger.debug('typical read() case encountered')
                if not any(map(isgenerator, res)):
                    connection_pool.release_connection(conn)
                    return res
                else:  # generator is in the result tuple
                    wrapped_res = [wrap_generator(i, connection_pool, conn)
                                   if isgenerator(i) else i
                                   for i in res]
                    return wrapped_res
            else:
                #current_app.logger.debug('other case encountered')
                connection_pool.release_connection(conn)
                return res
예제 #9
0
    def test_builtin_render_indirect(self):

      """ Test indirect template render path with `Templates.render` """

      l = self.test_construct()
      result = l.render(self._spawn_handler(),
                        self._spawn_config(app={
                          'paths': {
                            'templates': {
                              'source': template._FRAMEWORK_TEMPLATE_SOURCES,
                              'compiled': 'canteen.templates.compiled'}}}),
                        'base.html',
                        {'var': 'hi'})

      assert inspect.isgenerator(result)
      chunks = [i for i in result]
      assert '<html></html>' in chunks[0]

      snippet = l.render(self._spawn_handler(),
                        self._spawn_config(app={
                          'paths': {
                            'templates': {
                              'source': template._FRAMEWORK_TEMPLATE_SOURCES,
                              'compiled': 'canteen.templates.compiled'}}}),
                          'snippets/test.html',
                          {'var': 'hi'})

      assert inspect.isgenerator(snippet)
      chunks = [i for i in snippet]
      assert '<b></b>' in chunks[0]
예제 #10
0
    def test_multiplex(self):
        def double(inputs):
            return (2*x for x in inputs)

        def square(inputs):
            return (x*x for x in inputs)

        def fail(_):
            raise Exception("Failing before becoming a generator")

        def fail_later(inputs):
            for x in inputs:
                if x % 2:
                    raise Exception
                else:
                    yield 100 + x

        inputs = range(4)
        processors = [double, square]
        outputs = multiplex(inputs, processors)
        self.assertTrue(inspect.isgenerator(outputs))
        self.assertEquals(list(outputs), [0, 0, 2, 1, 4, 4, 6, 9])

        processors = [double, fail, fail_later]
        outputs = multiplex(inputs, processors, logger=getLogger(self.id()))
        self.assertTrue(inspect.isgenerator(outputs))
        with LogCapture(self.id()) as log:
            self.assertEquals(list(outputs), [0, 100, 2, 4, 6])
            self.assertEquals(len(log.records), 2)
예제 #11
0
파일: _base.py 프로젝트: jpunwin/spyne
    def array_to_parent(self, ctx, cls, inst, parent, name, **kwargs):
        if issubclass(cls, Array):
            cls, = cls._type_info.values()

        name = cls.get_type_name()
        if isinstance(inst, PushBase):
            while True:
                sv = (yield)
                ret = self.to_parent(ctx, cls, sv, parent, name, **kwargs)
                if isgenerator(ret):
                    try:
                        while True:
                            sv2 = (yield)
                            ret.send(sv2)
                    except Break as e:
                        try:
                            ret.throw(e)
                        except StopIteration:
                            pass

        else:
            for sv in inst:
                ret = self.to_parent(ctx, cls, sv, parent, name, **kwargs)
                if isgenerator(ret):
                    try:
                        while True:
                            sv2 = (yield)
                            ret.send(sv2)
                    except Break as e:
                        try:
                            ret.throw(e)
                        except StopIteration:
                            pass
예제 #12
0
파일: table.py 프로젝트: jpunwin/spyne
    def subserialize(self, ctx, cls, inst, parent, ns=None, name=None):
        attrs = {}
        if self.table_name_attr is not None:
            attrs[self.table_name_attr] = name

        with parent.element('table', attrs):
            with parent.element('tbody'):
                if cls.Attributes.max_occurs > 1:
                    ret = self.array_to_parent(ctx, cls, inst, parent, name)
                    if isgenerator(ret):
                        try:
                            while True:
                                sv2 = (yield)
                                ret.send(sv2)
                        except Break as b:
                            try:
                                ret.throw(b)
                            except StopIteration:
                                pass

                else:
                    with parent.element('tr'):
                        ret = self.to_parent(ctx, cls, inst, parent, name)
                        if isgenerator(ret):
                            try:
                                while True:
                                    sv2 = (yield)
                                    ret.send(sv2)
                            except Break as b:
                                try:
                                    ret.throw(b)
                                except StopIteration:
                                    pass
예제 #13
0
def _test_match_basic(matcher, query):
    extracts = dict(
        xpaths={'text': 'text()'},
        csss={'favorite': '.class1::text'}
    )
    m = matcher(query, **extracts)

    mg = m(dict(response="<div></div>"))
    ok_(inspect.isgenerator(mg))
    eq_(list(mg), [])  # there should be no hits

    mg = m(dict(response=sample1.response))
    ok_(inspect.isgenerator(mg))
    hits = list(mg)
    eq_(len(hits), 3)
    for hit, a_html, a_text, class1_text in zip(
            hits, sample1.a_htmls, sample1.a_texts, sample1.class1_texts):
        ok_(hit['response'])
        eq_(hit['match'], a_html)
        eq_(hit['text'], a_text)
        eq_(hit.get('favorite', None), class1_text)

    m = matcher(query, min_count=4, **extracts)
    mg = m(dict(response=sample1.response))
    ok_(inspect.isgenerator(mg))
    assert_raises(ValueError, list, mg)

    m = matcher(query, max_count=2, **extracts)
    mg = m(dict(response=sample1.response))
    ok_(inspect.isgenerator(mg))
    assert_raises(ValueError, list, mg)
예제 #14
0
파일: to_cloth.py 프로젝트: arskom/spyne
    def array_to_cloth(self, ctx, cls, inst, cloth, parent, name=None, **kwargs):
        if isinstance(inst, PushBase):
            while True:
                sv = (yield)
                ret = self.to_cloth(ctx, cls, sv, cloth, parent,
                                             name=name, from_arr=True, **kwargs)
                if isgenerator(ret):
                    try:
                        while True:
                            sv2 = (yield)
                            ret.send(sv2)
                    except Break as e:
                        try:
                            ret.throw(e)
                        except StopIteration:
                            pass

        else:
            for sv in inst:
                ret = self.to_cloth(ctx, cls, sv, cloth, parent,
                                             from_arr=True, name=name, **kwargs)
                if isgenerator(ret):
                    try:
                        while True:
                            sv2 = (yield)
                            ret.send(sv2)
                    except Break as e:
                        try:
                            ret.throw(e)
                        except StopIteration:
                            pass
예제 #15
0
    def tick(self, max_time=None):
        '''
        Process queued messages.
        
        :Parameters:
            - `max_time`: processing time limit so that the event processing does not take too long. 
              not all messages are guranteed to be processed with this limiter

        :Return:
            - true: if all messages ready for processing were completed
            - false: otherwise (i.e.: processing took more than max_time)
        '''
        # swap queues and clear the active_queue
        self.active_queue, self.processing_queue = self.processing_queue, self.active_queue
        self.active_queue.clear()
        coroutines_to_add = []
        startTime = time.time()
        
        # first process each existing coroutine once exactly
        self.coroutines = [x for x in self.coroutines if self.process_coroutine(x)]
        
        while len(self.processing_queue):
            # always pop the message off the queue, if there is no listeners for this message yet
            # then the message will be dropped off the queue
            msg = self.processing_queue.popleft()
            # for receivers that handle all messages let them handle this
            for r in self.message_receiver_map[WildCardMessageType]:
                res = r.handle_message(msg)
                # coroutines will be run 
                if inspect.isgenerator(res):
                    c = self.process_coroutine(res)
                    if c:
                        coroutines_to_add.append(c)
            # now pass msg to message receivers that subscribed to this message type
            for r in self.message_receiver_map.get(msg.message_type, []):
                if not self.designated_to_handle(r, msg):
                    continue
                res = r.handle_message(msg)
                if inspect.isgenerator(res):
                    c = self.process_coroutine(res)
                    if c:
                        coroutines_to_add.append(c)
                else:
                    # finish this message if it was handled or had designated receiver
                    if res or msg.receiverID:
                        break
            if max_time and time.time() - startTime > max_time:
                break
            
        # queue up all pending coroutines
        self.coroutines.extend(coroutines_to_add)
            
        flushed = len(self.processing_queue) == 0
        # push any left over messages to the active queue
        # bottom-up on the processQueue and push to the front of active_queue
        if not flushed:
            while len(self.processing_queue):
                self.active_queue.appendleft(self.processing_queue.pop())
        return flushed
예제 #16
0
파일: row.py 프로젝트: ashleysommer/spyne
    def array_to_parent(self, ctx, cls, inst, parent, name, **kwargs):
        with parent.element('div'):
            if issubclass(cls, ComplexModelBase):
                ret = super(HtmlRowTable, self).array_to_parent(
                                         ctx, cls, inst, parent, name, **kwargs)
                if isgenerator(ret):
                    try:
                        while True:
                            sv2 = (yield)
                            ret.send(sv2)
                    except Break as b:
                        try:
                            ret.throw(b)
                        except StopIteration:
                            pass
            else:
                table_attrib = {}
                if self.table_name_attr:
                    table_attrib = {self.table_name_attr: name}
                if self.table_width is not None:
                    table_attrib['width'] = self.table_width

                with parent.element('table', table_attrib):
                    tr_attrib = {}
                    if self.row_class is not None:
                        tr_attrib['class'] = self.row_class
                    with parent.element('tr', tr_attrib):
                        if self.header:
                            parent.write(E.th(self.trc(cls, ctx.locale,
                                                          cls.get_type_name())))
                        td_attrs = {}

                        if self.cell_class is not None:
                            self.add_html_attr('class', td_attrs,
                                                                self.cell_class)

                        self.add_field_attrs(td_attrs, name, cls)

                        cls_attrs = self.get_cls_attrs(cls)

                        if cls_attrs.hidden:
                            self.add_style(td_attrs, 'display:None')

                        with parent.element('td', td_attrs):
                            with parent.element('table'):
                                ret = super(HtmlRowTable, self) \
                                    .array_to_parent(ctx, cls, inst, parent,
                                                                 name, **kwargs)
                                if isgenerator(ret):
                                    try:
                                        while True:
                                            sv2 = (yield)
                                            ret.send(sv2)
                                    except Break as b:
                                        try:
                                            ret.throw(b)
                                        except StopIteration:
                                            pass
예제 #17
0
    def test_get_instances_to_sync(self):
        fake_context = 'fake_context'

        call_info = {'get_all': 0, 'shuffle': 0}

        def random_shuffle(_list):
            call_info['shuffle'] += 1

        @staticmethod
        def instance_get_all_by_filters(context, filters,
                sort_key, sort_dir):
            self.assertEqual(context, fake_context)
            self.assertEqual(sort_key, 'deleted')
            self.assertEqual(sort_dir, 'asc')
            call_info['got_filters'] = filters
            call_info['get_all'] += 1
            return ['fake_instance1', 'fake_instance2', 'fake_instance3']

        self.stubs.Set(objects.InstanceList, 'get_by_filters',
                instance_get_all_by_filters)
        self.stubs.Set(random, 'shuffle', random_shuffle)

        instances = cells_utils.get_instances_to_sync(fake_context)
        self.assertTrue(inspect.isgenerator(instances))
        self.assertEqual(len([x for x in instances]), 3)
        self.assertEqual(call_info['get_all'], 1)
        self.assertEqual(call_info['got_filters'], {})
        self.assertEqual(call_info['shuffle'], 0)

        instances = cells_utils.get_instances_to_sync(fake_context,
                                                      shuffle=True)
        self.assertTrue(inspect.isgenerator(instances))
        self.assertEqual(len([x for x in instances]), 3)
        self.assertEqual(call_info['get_all'], 2)
        self.assertEqual(call_info['got_filters'], {})
        self.assertEqual(call_info['shuffle'], 1)

        instances = cells_utils.get_instances_to_sync(fake_context,
                updated_since='fake-updated-since')
        self.assertTrue(inspect.isgenerator(instances))
        self.assertEqual(len([x for x in instances]), 3)
        self.assertEqual(call_info['get_all'], 3)
        self.assertEqual(call_info['got_filters'],
                {'changes-since': 'fake-updated-since'})
        self.assertEqual(call_info['shuffle'], 1)

        instances = cells_utils.get_instances_to_sync(fake_context,
                project_id='fake-project',
                updated_since='fake-updated-since', shuffle=True)
        self.assertTrue(inspect.isgenerator(instances))
        self.assertEqual(len([x for x in instances]), 3)
        self.assertEqual(call_info['get_all'], 4)
        self.assertEqual(call_info['got_filters'],
                {'changes-since': 'fake-updated-since',
                 'project_id': 'fake-project'})
        self.assertEqual(call_info['shuffle'], 2)
예제 #18
0
    def _handle_request_coroutine(self):
        ctx = self.ctx

        # TODO: Errors are not going through the context and middleware.

        try:
            for m in middleware:
                result = m.start(ctx)
                if result and inspect.isgenerator(result):
                    # The middleware function is a generator.
                    yield from result

            if not self.route:
                raise HttpError(404, ctx.url)

            ctx.response.body = yield from self.route(self.match, ctx)

        except HttpError as ex:
            ctx.response.status = ex.code
            self.error('HTTP error %s %s url=%s', ex.code, str(ex), ctx.url)
            self._complete_with_error(ex.code)

        except:
            ctx.response.status = 500
            self.error('Unhandled error in %s', self.route, exc_info=True)
            self._complete_with_error(500)

        for m in reversed(middleware):
            result = m.complete(ctx)
            if result and inspect.isgenerator(result):
                # The middleware function is a generator.
                yield from result

        try:
            ctx.response._send(ctx, self.transport)
        except:
            # An error happened while we were sending.  (Try to do as little as
            # possible in _send so there is less chance of getting an exception
            # there.)  At this point our best bet is to abort.
            logger.error('An error occurred while trying to send: %r', ctx, exc_info=True)

        # Reset everything
        self.state = _STATE_READING_HEADERS

        self.request_length = None
        self.method  = None
        self.url     = None
        self.headers = None
        self.route   = None
        self.match   = None # Can we put this into the route or request?
        self.ctx     = None

        if self.buffer:
            self._process_buffer()
예제 #19
0
    def _handle_call(self, request, fn, m, protocol):
        if fn != '__batch__':
            r = self._handle_single_call(request, m)
        else:
            # Batch calls
            r = []
            for call in m['calls']:
                _r = self._handle_single_call(request, call)

                # If the func invoked above is a streaming function, then fail
                # this operation as we don't handle streaming functions in batch mode
                if inspect.isgenerator(_r.get('result')):
                    raise APIException('Cannot invoke streaming API fn in batch mode')

                if isinstance(_r, dict) and 'success' in _r:
                    _r = _r['result'] if _r['success'] else None
                r.append(_r)

        if self.get_status() == 304:
            return

        # Get the API function object
        fnobj = self._get_apifn(fn) if fn != '__batch__' else (lambda: 0)

        # Set response header based on chosen serialization mechanism
        mime = getattr(fnobj, 'mime', self.get_mime(protocol))
        self.set_header('Content-Type', mime)

        is_raw = 'raw' in get_fn_tags(fnobj)
        serializer = (lambda x: x) if is_raw else self.get_serializer(protocol)

        if fn == '__batch__' or not r['success']:
            r = serializer(r)
            self.set_header('Content-Length', len(r))
            self.write(r)
            return

        result = r['result']

        if not inspect.isgenerator(result):
            # Full response is available - Write it out in one shot
            r = serializer(r)
            self.set_header('Content-Length', len(r))
            self.write(r)
            return

        # Streaming response - iterate and write out
        for part in result:
            part = serializer(part)
            self.write(part)
            sep = '\n' if is_raw else self.get_record_separator(protocol)
            if sep: self.write(sep)
            self.flush()
예제 #20
0
    def test_get_instances_to_sync(self):
        fake_context = "fake_context"

        call_info = {"get_all": 0, "shuffle": 0}

        def random_shuffle(_list):
            call_info["shuffle"] += 1

        def instance_get_all_by_filters(context, filters, sort_key, sort_order):
            self.assertEqual(context, fake_context)
            self.assertEqual(sort_key, "deleted")
            self.assertEqual(sort_order, "asc")
            call_info["got_filters"] = filters
            call_info["get_all"] += 1
            return ["fake_instance1", "fake_instance2", "fake_instance3"]

        self.stubs.Set(db, "instance_get_all_by_filters", instance_get_all_by_filters)
        self.stubs.Set(random, "shuffle", random_shuffle)

        instances = cells_utils.get_instances_to_sync(fake_context)
        self.assertTrue(inspect.isgenerator(instances))
        self.assertTrue(len([x for x in instances]), 3)
        self.assertEqual(call_info["get_all"], 1)
        self.assertEqual(call_info["got_filters"], {})
        self.assertEqual(call_info["shuffle"], 0)

        instances = cells_utils.get_instances_to_sync(fake_context, shuffle=True)
        self.assertTrue(inspect.isgenerator(instances))
        self.assertTrue(len([x for x in instances]), 3)
        self.assertEqual(call_info["get_all"], 2)
        self.assertEqual(call_info["got_filters"], {})
        self.assertEqual(call_info["shuffle"], 1)

        instances = cells_utils.get_instances_to_sync(fake_context, updated_since="fake-updated-since")
        self.assertTrue(inspect.isgenerator(instances))
        self.assertTrue(len([x for x in instances]), 3)
        self.assertEqual(call_info["get_all"], 3)
        self.assertEqual(call_info["got_filters"], {"changes-since": "fake-updated-since"})
        self.assertEqual(call_info["shuffle"], 1)

        instances = cells_utils.get_instances_to_sync(
            fake_context, project_id="fake-project", updated_since="fake-updated-since", shuffle=True
        )
        self.assertTrue(inspect.isgenerator(instances))
        self.assertTrue(len([x for x in instances]), 3)
        self.assertEqual(call_info["get_all"], 4)
        self.assertEqual(
            call_info["got_filters"], {"changes-since": "fake-updated-since", "project_id": "fake-project"}
        )
        self.assertEqual(call_info["shuffle"], 2)
예제 #21
0
 def print_item(item, level=0, indent=' '):
     if isfunction(item):
         lazy = item()
         print_item(lazy, level+1)
     elif isgenerator(item):
         print_recursive(item, level+1)
     elif isinstance(item, tuple):
         if isfunction(item[-1]) or isgenerator(item[-1]) or isinstance(item[-1], tuple):
             print(indent*level+repr(item[:-1]))
             print_item(item[-1], level+1)
         else:
             print(indent*level+repr(item))
     else:
         print(indent*level+repr(item))
예제 #22
0
파일: wsgi.py 프로젝트: vishvananda/cinder
    def post_process_extensions(self, extensions, resp_obj, request,
                                action_args):
        for ext in extensions:
            response = None
            if inspect.isgenerator(ext):
                # If it's a generator, run the second half of
                # processing
                try:
                    with ResourceExceptionHandler():
                        response = ext.send(resp_obj)
                except StopIteration:
                    # Normal exit of generator
                    continue
                except Fault as ex:
                    response = ex
            else:
                # Regular functions get post-processing...
                try:
                    with ResourceExceptionHandler():
                        response = ext(req=request, resp_obj=resp_obj,
                                       **action_args)
                except Fault as ex:
                    response = ex

            # We had a response...
            if response:
                return response

        return None
예제 #23
0
    def test_filter_all_recursive_yields(self):
        # Test filter_all() allows generators from previous filter_all()s.
        # filter_all() yields results.  We want to make sure that we can
        # call filter_all() with generators returned from previous calls
        # to filter_all().
        filter_obj_list = ['obj1', 'obj2', 'obj3']
        filter_properties = 'fake_filter_properties'
        base_filter = filters.BaseFilter()

        self.mox.StubOutWithMock(base_filter, '_filter_one')

        total_iterations = 200

        # The order that _filter_one is going to get called gets
        # confusing because we will be recursively yielding things..
        # We are going to simulate the first call to filter_all()
        # returning False for 'obj2'.  So, 'obj1' will get yielded
        # 'total_iterations' number of times before the first filter_all()
        # call gets to processing 'obj2'.  We then return 'False' for it.
        # After that, 'obj3' gets yielded 'total_iterations' number of
        # times.
        for x in xrange(total_iterations):
            base_filter._filter_one('obj1', filter_properties).AndReturn(True)
        base_filter._filter_one('obj2', filter_properties).AndReturn(False)
        for x in xrange(total_iterations):
            base_filter._filter_one('obj3', filter_properties).AndReturn(True)
        self.mox.ReplayAll()

        objs = iter(filter_obj_list)
        for x in xrange(total_iterations):
            # Pass in generators returned from previous calls.
            objs = base_filter.filter_all(objs, filter_properties)
        self.assertTrue(inspect.isgenerator(objs))
        self.assertEqual(['obj1', 'obj3'], list(objs))
예제 #24
0
    def test_data_stream_exc(self):
        fut = asyncio.Future(loop=self.loop)

        def gen():
            yield b'binary data'
            yield from fut
            return b' result'

        req = HttpRequest(
            'POST', 'http://python.org/', data=gen(), loop=self.loop)
        self.assertTrue(req.chunked)
        self.assertTrue(inspect.isgenerator(req.body))
        self.assertEqual(req.headers['transfer-encoding'], 'chunked')

        @asyncio.coroutine
        def exc():
            yield from asyncio.sleep(0.01, loop=self.loop)
            fut.set_exception(ValueError)

        asyncio.async(exc(), loop=self.loop)

        req.send(self.transport)
        self.assertRaises(
            ValueError, self.loop.run_until_complete, req._writer)
        self.assertRaises(self.transport.close.called)
예제 #25
0
 def _call():
     result = getattr(self.client, method)(*args, **kwargs)
     if inspect.isgenerator(result):
         # Convert generator results to a list, so that we can
         # catch any potential exceptions now and retry the call.
         return list(result)
     return result
예제 #26
0
파일: parser.py 프로젝트: arch2/irc
    def set_parser(self, parser):
        """set parser to stream. return parser's DataQueue."""
        if self._parser:
            self.unset_parser()

        output = DataQueue(loop=self._loop)
        if self._exception:
            output.set_exception(self._exception)
            return output

        # init parser
        p = parser(output, self._input)
        assert inspect.isgenerator(p), 'Generator is required'

        try:
            # initialize parser with data and parser buffers
            next(p)
        except StopIteration:
            pass
        except Exception as exc:
            output.set_exception(exc)
        else:
            # parser still require more data
            self._parser = p
            self._output = output

            if self._eof:
                self.unset_parser()

        return output
예제 #27
0
def test_flatten():
    assert isgenerator(flatten([]))
    assert list(flatten([1])) != [[1]]

    assert list(flatten([[]])) == []
    assert list(flatten([1, [2], [3]])) == [1, 2, 3]
    assert list(flatten([1, [2, 3], [[4]]])) == [1, 2, 3, 4]
 def advising_generator_wrapper_py3(*args, **kwargs):
     if bind:
         advisor = advising_function(cutpoint_function, *args, **kwargs)
     else:
         advisor = advising_function(*args, **kwargs)
     if not isgenerator(advisor):
         raise ExpectedGenerator("advising_function %s did not return a generator." % advising_function)
     try:
         advice = next(advisor)
         while True:
             logdebug('Got advice %r from %s', advice, advising_function)
             if advice is Proceed or advice is None or isinstance(advice, Proceed):
                 if isinstance(advice, Proceed):
                     args = advice.args
                     kwargs = advice.kwargs
                 gen = cutpoint_function(*args, **kwargs)
                 try:
                     result = yield from gen
                 except BaseException:
                     advice = advisor.throw(*sys.exc_info())
                 else:
                     try:
                         advice = advisor.send(result)
                     except StopIteration:
                         return
                 finally:
                     gen.close()
             elif advice is Return:
                 return
             elif isinstance(advice, Return):
                 raise StopIteration(advice.value)
             else:
                 raise UnacceptableAdvice("Unknown advice %s" % advice)
     finally:
         advisor.close()
예제 #29
0
    def handle_request(self, message, payload):
        """Handle a single HTTP request"""
        now = self._loop.time()

        if self.readpayload:
            wsgiinput = io.BytesIO()
            wsgiinput.write((yield from payload.read()))
            wsgiinput.seek(0)
            payload = wsgiinput

        environ = self.create_wsgi_environ(message, payload)
        response = self.create_wsgi_response(message)

        riter = self.wsgi(environ, response.start_response)
        if isinstance(riter, asyncio.Future) or inspect.isgenerator(riter):
            riter = yield from riter

        resp = response.response
        try:
            for item in riter:
                if isinstance(item, asyncio.Future):
                    item = yield from item
                yield from resp.write(item)

            yield from resp.write_eof()
        finally:
            if hasattr(riter, 'close'):
                riter.close()

        if resp.keep_alive():
            self.keep_alive(True)

        self.log_access(
            message, environ, response.response, self._loop.time() - now)
예제 #30
0
파일: amqp.py 프로젝트: DragonDM/ceilometer
    def _process_data(self, ctxt, version, method, namespace, args):
        """Process a message in a new thread.

        If the proxy object we have has a dispatch method
        (see rpc.dispatcher.RpcDispatcher), pass it the version,
        method, and args and let it dispatch as appropriate.  If not, use
        the old behavior of magically calling the specified method on the
        proxy we have here.
        """
        ctxt.update_store()
        try:
            rval = self.proxy.dispatch(ctxt, version, method, namespace,
                                       **args)
            # Check if the result was a generator
            if inspect.isgenerator(rval):
                for x in rval:
                    ctxt.reply(x, None, connection_pool=self.connection_pool)
            else:
                ctxt.reply(rval, None, connection_pool=self.connection_pool)
            # This final None tells multicall that it is done.
            ctxt.reply(ending=True, connection_pool=self.connection_pool)
        except rpc_common.ClientException as e:
            LOG.debug(_('Expected exception during message handling (%s)') %
                      e._exc_info[1])
            ctxt.reply(None, e._exc_info,
                       connection_pool=self.connection_pool,
                       log_failure=False)
        except Exception:
            # sys.exc_info() is deleted by LOG.exception().
            exc_info = sys.exc_info()
            LOG.error(_('Exception during message handling'),
                      exc_info=exc_info)
            ctxt.reply(None, exc_info, connection_pool=self.connection_pool)
예제 #31
0
 def _check_if_gen(val):
     test = val()
     if inspect.isgenerator(test):
         return test
     else:
         return val
예제 #32
0
 def _return_executable(val):
     if inspect.isgenerator(val):
         return lambda: next(val)
     return val
예제 #33
0
    def to_cloth(self,
                 ctx,
                 cls,
                 inst,
                 cloth,
                 parent,
                 name=None,
                 from_arr=False,
                 as_attr=False,
                 as_data=False,
                 **kwargs):

        prot_name = self.__class__.__name__

        if issubclass(cls, XmlAttribute):
            cls = cls.type
            as_attr = True

        elif issubclass(cls, XmlData):
            cls = cls.type
            as_data = True

        pushed = False
        if cloth is None:
            logger_c.debug("No cloth fround, switching to to_parent...")
            ret = self.to_parent(ctx, cls, inst, parent, name, **kwargs)

        else:
            cls, _ = self.get_polymorphic_target(cls, inst)
            cls_attrs = self.get_cls_attrs(cls)

            inst = self._sanitize(cls_attrs, inst)

            # if instance is None, use the default factory to generate one
            _df = cls_attrs.default_factory
            if inst is None and callable(_df):
                inst = _df()

            # if instance is still None, use the default value
            if inst is None:
                inst = cls_attrs.default

            # if there's a subprotocol, switch to it
            subprot = cls_attrs.prot
            if subprot is not None and not (subprot is self):
                # we can't do this because subprotocols don't accept cloths.
                # so we need to enter the cloth, which make it too late to
                # set attributes.
                assert not as_attr, "No subprot supported for fields " \
                    "to be serialized as attributes, use type casting with "  \
                    "customized serializers in the current protocol instead."

                self._enter_cloth(ctx,
                                  cloth,
                                  parent,
                                  method=cls_attrs.method,
                                  skip=as_data)

                ret = subprot.subserialize(ctx,
                                           cls,
                                           inst,
                                           parent,
                                           name,
                                           as_attr=as_attr,
                                           as_data=as_data,
                                           **kwargs)

            # if there is no subprotocol, try rendering the value
            else:
                ret = None

                # try rendering the null value
                if inst is None:
                    if cls_attrs.min_occurs > 0:
                        attrs = {}
                        if as_attr:
                            # FIXME: test needed
                            attrs[name] = ''

                        self._enter_cloth(ctx,
                                          cloth,
                                          parent,
                                          attrib=attrs,
                                          method=cls_attrs.method)
                        identifier = "%s.%s" % (prot_name, "null_to_cloth")
                        logger_s.debug("Writing '%s' using %s type: %s.", name,
                                       identifier, cls.get_type_name())
                        parent.write(cloth)

                    else:
                        logger_s.debug("Skipping '%s' type: %s because empty.",
                                       name, cls.get_type_name())
                        self._enter_cloth(ctx,
                                          cloth,
                                          parent,
                                          skip=True,
                                          method=cls_attrs.method)

                elif as_data:
                    # we only support XmlData of a primitive.,. is this a
                    # problem?
                    ret = self.to_unicode(cls, inst)
                    if ret is not None:
                        parent.write(ret)

                elif as_attr:
                    sub_name = cls_attrs.sub_name
                    if sub_name is None:
                        sub_name = name
                    attrs = {sub_name: self.to_unicode(cls, inst)}

                    self._enter_cloth(ctx,
                                      cloth,
                                      parent,
                                      attrib=attrs,
                                      method=cls_attrs.method)

                else:
                    # push the instance at hand to instance stack. this makes it
                    # easier for protocols to make decisions based on parents of
                    # instances at hand.
                    pushed = True
                    logger_c.debug("%s %r pushed %r %r", R("#"), self, cls,
                                   inst)
                    ctx.outprot_ctx.inst_stack.append((cls, inst, from_arr))

                    # try rendering the array value
                    if not from_arr and cls.Attributes.max_occurs > 1:
                        ret = self.array_to_cloth(ctx,
                                                  cls,
                                                  inst,
                                                  cloth,
                                                  parent,
                                                  as_attr=as_attr,
                                                  name=name)
                    else:
                        # try rendering anything else
                        handler = self.rendering_handlers[cls]

                        # disabled for performance reasons
                        # identifier = "%s.%s" % (prot_name, handler.__name__)
                        # from spyne.util.web import log_repr
                        # logger_s.debug("Writing %s using %s for %s. Inst: %r",
                        #              name, identifier, cls.get_type_name(),
                        #              log_repr(inst, cls, from_array=from_arr))

                        ret = handler(ctx,
                                      cls,
                                      inst,
                                      cloth,
                                      parent,
                                      name=name,
                                      as_attr=as_attr)

        if isgenerator(ret):
            try:
                while True:
                    sv2 = (yield)
                    ret.send(sv2)
            except Break as e:
                try:
                    ret.throw(e)
                except (Break, StopIteration, GeneratorExit):
                    pass
                finally:
                    if pushed:
                        logger_c.debug("%s %r popped %r %r", B("#"), self, cls,
                                       inst)
                        ctx.outprot_ctx.inst_stack.pop()

        else:
            if pushed:
                logger_c.debug("%s %r popped %r %r", B("#"), self, cls, inst)
                ctx.outprot_ctx.inst_stack.pop()
예제 #34
0
    def _step(self, exc=None):
        assert not self.done(), f'_step(): already done: {self!r}, {exc!r}'
        if self._must_cancel:
            if not isinstance(exc, futures.CancelledError):
                exc = futures.CancelledError()
            self._must_cancel = False
        coro = self._coro
        self._fut_waiter = None

        _enter_task(self._loop, self)
        # Call either coro.throw(exc) or coro.send(None).
        try:
            if exc is None:
                # We use the `send` method directly, because coroutines
                # don't have `__iter__` and `__next__` methods.
                result = coro.send(None)
            else:
                result = coro.throw(exc)
        except StopIteration as exc:
            if self._must_cancel:
                # Task is cancelled right before coro stops.
                self._must_cancel = False
                self.set_exception(futures.CancelledError())
            else:
                self.set_result(exc.value)
        except futures.CancelledError:
            super().cancel()  # I.e., Future.cancel(self).
        except Exception as exc:
            self.set_exception(exc)
        except BaseException as exc:
            self.set_exception(exc)
            raise
        else:
            blocking = getattr(result, '_asyncio_future_blocking', None)
            if blocking is not None:
                # Yielded Future must come from Future.__iter__().
                if result._loop is not self._loop:
                    new_exc = RuntimeError(
                        f'Task {self!r} got Future '
                        f'{result!r} attached to a different loop')
                    self._loop.call_soon(self._step, new_exc)
                elif blocking:
                    if result is self:
                        new_exc = RuntimeError(
                            f'Task cannot await on itself: {self!r}')
                        self._loop.call_soon(self._step, new_exc)
                    else:
                        result._asyncio_future_blocking = False
                        result.add_done_callback(self._wakeup)
                        self._fut_waiter = result
                        if self._must_cancel:
                            if self._fut_waiter.cancel():
                                self._must_cancel = False
                else:
                    new_exc = RuntimeError(
                        f'yield was used instead of yield from '
                        f'in task {self!r} with {result!r}')
                    self._loop.call_soon(self._step, new_exc)

            elif result is None:
                # Bare yield relinquishes control for one event loop iteration.
                self._loop.call_soon(self._step)
            elif inspect.isgenerator(result):
                # Yielding a generator is just wrong.
                new_exc = RuntimeError(
                    f'yield was used instead of yield from for '
                    f'generator in task {self!r} with {result}')
                self._loop.call_soon(self._step, new_exc)
            else:
                # Yielding something else is an error.
                new_exc = RuntimeError(f'Task got bad yield: {result!r}')
                self._loop.call_soon(self._step, new_exc)
        finally:
            _leave_task(self._loop, self)
            self = None  # Needed to break cycles when an exception occurs.
예제 #35
0
def is_generator(obj):
    """Return True if ``obj`` is a generator
    """
    return inspect.isgeneratorfunction(obj) or inspect.isgenerator(obj)
예제 #36
0
        sig = inspect.signature(add1)
        parameters = sig.parameters
        print(
            parameters
        )  #OrderedDict([('args', <Parameter "*args">), ('kwargs', <Parameter "**kwargs">)])
        ret = fn(*args, **kwargs)
        return ret

    return wrapper


@check  #add1 = check(add1)
def add1(x: int, y: str = 6) -> int:  #定义函数add1的返回值的类型注释为int
    if not (isinstance(x, int) and isinstance(y, str)):
        return 'you  input is wrong'
    return x + y


print(add1(2, 4))
print(add1.__annotations__)  #annotation 查看字典的注解 annotation(注释)__annotations__


def fib(n):
    yield from range(10)


print(inspect.isgenerator(fib))  #检查函数是否是生成器
print(inspect.isgeneratorfunction(add1))  #检查函数是否是生成函数
print(inspect.signature(fib))  #检查函数的签名
print(inspect.signature(add1))  #检查函数的签名
예제 #37
0
def is_iterable(obj):
    """
    :param obj: that we want to determine is a generator
    :return: True if obj can use next(obj)
    """
    return inspect.isgenerator(obj) or inspect.isgeneratorfunction(obj)
예제 #38
0
파일: utils.py 프로젝트: Ocupe/restruc_toga
def process_callback(callback_result):
    "Handle generators in actions"
    if inspect.isgenerator(callback_result):
        task = LongRunningTask.alloc().init()
        task.__dict__['interface'] = callback_result
        task.performIteration_(None)
예제 #39
0
 def _schedule_proc_now(self, ev):
     assert isinstance(ev, (Action, Event)) or isgenerator(ev), ev
     self._current_event_list.append(ev)
예제 #40
0
    def run(self, until: int, extraProcesses=[]) -> None:
        """
        Run simulation for a specified time

        :note: Can be used to run simulation again after it ends from time when it ends.
        :note: Simulator restart is performed by new instantiation of the simulator.
        """

        assert until >= self.now, (until, self.now)
        if until == self.now:
            return

        now = self.now
        time_slot = SimTimeSlot()
        time_slot.write_only = []
        for proc in extraProcesses:
            assert isgenerator(proc), proc
            time_slot.write_only.append(proc)
        # add handle to stop simulation
        self.schedule(now, time_slot)

        end_time_slot = SimTimeSlot()
        end_time_slot.write_only = [raise_StopSimulation(self), ]
        self.schedule(now + until, end_time_slot)

        next_time_slot = self._events.pop
        rtl_sim = self.rtl_simulator
        _run_event_list = self._run_event_list
        END = rtl_sim.END_OF_STEP
        try:
            # for all events
            while True:
                now, time_slot = next_time_slot()
                self._current_time_slot = time_slot
                assert now >= self.now, (now, self.now, time_slot, "Can not go back in time")
                rtl_sim.time = self.now = now

                # run preinitialization of sim. environment
                _run_event_list(time_slot.timeslot_begin)
                time_slot.timeslot_begin = DONE

                # run resolution of combinational lopps
                first_run = True
                while first_run or time_slot.write_only:
                    _run_event_list(time_slot.write_only)
                    time_slot.write_only = None
                    s = rtl_sim.eval()

                    assert s == rtl_sim.COMB_UPDATE_DONE, (self.now, s)
                    if time_slot.comb_read is None:
                        self._current_event_list = time_slot.comb_read = []
                    else:
                        self._current_event_list = time_slot.comb_read
                    self._eval_rtl_events()

                    _run_event_list(time_slot.comb_read)
                    time_slot.comb_read = None

                    if time_slot.write_only is not None:
                        # we have to reevaluate the combinational logic
                        # if write in this time stamp is required
                        rtl_sim.reset_eval()
                    first_run = False

                time_slot.write_only = DONE
                time_slot.comb_read = DONE

                # run evaluation of rest of the circuit
                while not rtl_sim.read_only_not_write_only:
                    rtl_sim.eval()
                    if rtl_sim.pending_event_list:
                        if time_slot.comb_read is None:
                            self._current_event_list = time_slot.comb_stable = []
                        else:
                            self._current_event_list = time_slot.comb_stable
                        self._eval_rtl_events()

                _run_event_list(time_slot.comb_stable)
                time_slot.comb_stable = DONE

                while True:
                    ret = rtl_sim.eval()
                    if rtl_sim.pending_event_list:
                        if time_slot.mem_stable is None:
                            self._current_event_list = time_slot.mem_stable = []
                        else:
                            self._current_event_list = time_slot.mem_stable
                        self._eval_rtl_events()
                    if ret == END:
                        break
                _run_event_list(time_slot.mem_stable)
                time_slot.mem_stable = DONE

                _run_event_list(time_slot.timeslot_end)
                time_slot.timeslot_end = DONE
                rtl_sim.set_write_only()

        except StopSimumulation:
            pass
        finally:
            rtl_sim.finalize()
        # to allow tesbenches to peek in to DUT after sim ended
        rtl_sim.read_only_not_write_only = True
예제 #41
0
파일: __init__.py 프로젝트: lceames/dagster
def generator(obj):
    if not inspect.isgenerator(obj):
        raise ParameterCheckError(
            'Not a generator (return value of function that yields) Got {obj} instead'
            .format(obj=obj))
    return obj
예제 #42
0
파일: glob.py 프로젝트: jan-g/psh
 def ff(gen, *args, **kwargs):
     if not inspect.isgenerator(gen):
         gen = (gen,)
     return f(gen, *args, **kwargs)
예제 #43
0
 def __init__(self, gen, func):
     assert inspect.isgenerator(gen), gen
     self.gen = gen
     self.func = func
예제 #44
0
    def evaluate(cls, estimator, dataset, output_dir=None, name=None):
        """Evaluates an `estimator` on the `mode` benchmark dataset.

    Args:
      estimator: `lambda x: mu_x, uncertainty_x`, an uncertainty estimation
        function, which returns `mean_x` and predictive `uncertainty_x`.
      dataset: `tf.data.Dataset`, on which dataset to performance evaluation.
      output_dir: (optional) `str`, directory to save figures.
      name: (optional) `str`, the name of the method.
    """
        import inspect
        import tqdm
        import numpy as np
        import tensorflow_datasets as tfds
        import matplotlib.pyplot as plt
        COLORS = plt.rcParams['axes.prop_cycle'].by_key()['color']

        # Containers used for caching performance evaluation
        y_true = list()
        y_pred = list()
        y_uncertainty = list()

        # Convert to NumPy iterator if necessary
        ds = dataset if inspect.isgenerator(dataset) else tfds.as_numpy(
            dataset)

        for x, y in tqdm.tqdm(ds):
            # Sample from probabilistic model
            mean, uncertainty = estimator(x)
            # Cache predictions
            y_true.append(y)
            y_pred.append(mean)
            y_uncertainty.append(uncertainty)

        # Use vectorized NumPy containers
        y_true = np.concatenate(y_true).flatten()
        y_pred = np.concatenate(y_pred).flatten()
        y_uncertainty = np.concatenate(y_uncertainty).flatten()
        fractions = np.asarray([0.5, 0.6, 0.7, 0.8, 0.9, 1.0])

        # Metrics for evaluation
        metrics = zip(["accuracy", "auc"], cls.metrics())

        # evaluate
        evaluation = {
            metric: cls._evaluate_metric(
                y_true,
                y_pred,
                y_uncertainty,
                fractions,
                lambda y_true, y_pred: metric_fn(y_true, y_pred).numpy(),
                name,
            )
            for (metric, metric_fn) in metrics
        }

        # save JSON of evaluation
        if output_dir is not None:
            json_evals = dict((key, val.to_dict(orient='record'))
                              for key, val in evaluation.items())
            os.makedirs(output_dir, exist_ok=True)
            json.dump(json_evals,
                      open(os.path.join(output_dir, 'evaluation.json'), 'w'))

        # print evaluation
        for metric, evals in evaluation.items():
            print(metric)
            print(evals)

        return evaluation
예제 #45
0
 def test_start_requests(self):
     spider = self.spider_class('example.com')
     start_requests = spider.start_requests()
     self.assertTrue(inspect.isgenerator(start_requests))
     self.assertEqual(list(start_requests), [])
예제 #46
0
def opt_generator(obj):
    if obj is not None and not inspect.isgenerator(obj):
        raise ParameterCheckError(
            "Not a generator (return value of function that yields) Got {obj} instead"
            .format(obj=obj))
    return obj
예제 #47
0
def test_check_estimator_generate_only():
    all_instance_gen_checks = check_estimator(LogisticRegression(),
                                              generate_only=True)
    assert isgenerator(all_instance_gen_checks)
예제 #48
0
def Initializer(init,
                allow_generators=False,
                treat_sequences_as_mappings=True,
                arg_not_specified=None):
    """Standardized processing of Component keyword arguments

    Component keyword arguments accept a number of possible inputs, from
    scalars to dictionaries, to functions (rules) and generators.  This
    function standardizes the processing of keyword arguments and
    returns "initializer classes" that are specialized to the specific
    data type provided.
    """
    if init.__class__ in native_types:
        if init is arg_not_specified:
            return None
        return ConstantInitializer(init)
    elif inspect.isfunction(init) or inspect.ismethod(init):
        if not allow_generators and inspect.isgeneratorfunction(init):
            raise ValueError("Generator functions are not allowed")
        # Historically pyomo.core.base.misc.apply_indexed_rule
        # accepted rules that took only the parent block (even for
        # indexed components).  We will preserve that functionality
        # here.
        _args = inspect.getfullargspec(init)
        _nargs = len(_args.args)
        if inspect.ismethod(init) and init.__self__ is not None:
            # Ignore 'self' for bound instance methods and 'cls' for
            # @classmethods
            _nargs -= 1
        if _nargs == 1 and _args.varargs is None:
            return ScalarCallInitializer(init)
        else:
            return IndexedCallInitializer(init)
    elif isinstance(init, Mapping):
        return ItemInitializer(init)
    elif isinstance(init, Sequence) and not isinstance(init, str):
        if treat_sequences_as_mappings:
            return ItemInitializer(init)
        else:
            return ConstantInitializer(init)
    elif inspect.isgenerator(init) or (
        (hasattr(init, 'next') or hasattr(init, '__next__'))
            and not hasattr(init, '__len__')):
        # This catches generators and iterators (like enumerate()), but
        # skips "reusable" iterators like range() as well as Pyomo
        # (finite) Set objects.
        if not allow_generators:
            raise ValueError("Generators are not allowed")
        # Deepcopying generators is problematic (e.g., it generates a
        # segfault in pypy3 7.3.0).  We will immediately expand the
        # generator into a tuple and then store it as a constant.
        return ConstantInitializer(tuple(init))
    elif isinstance(init, PyomoObject):
        # TODO: Should IndexedComponent inherit from collections.abc.Mapping?
        if init.is_component_type() and init.is_indexed():
            return ItemInitializer(init)
        else:
            return ConstantInitializer(init)
    elif type(init) is functools.partial:
        _args = inspect.getfullargspec(init.func)
        if len(_args.args) - len(init.args) == 1 and _args.varargs is None:
            return ScalarCallInitializer(init)
        else:
            return IndexedCallInitializer(init)
    elif callable(init) and not isinstance(init, type):
        # We assume any callable thing could be a functor; but, we must
        # filter out types, as isfunction() and ismethod() both return
        # False for type.__call__
        return Initializer(
            init.__call__,
            allow_generators=allow_generators,
            treat_sequences_as_mappings=treat_sequences_as_mappings,
            arg_not_specified=arg_not_specified,
        )
    elif isinstance(init, _ndarray):
        if init.size == 1:
            return ConstantInitializer(init[0])
        else:
            return ItemInitializer(init)
    else:
        return ConstantInitializer(init)
예제 #49
0
 def isgenerator(o):
     if isinstance(o, UnboundMethod):
         o = o._func
     return inspect.isgeneratorfunction(o) or inspect.isgenerator(o)
예제 #50
0
 def __init__(self, result_proxies):
     if not isgenerator(result_proxies):
         result_proxies = iter((result_proxies, ))
     self.result_proxies = result_proxies
     self._iter = None
예제 #51
0
def create_events(pulse_or_list, author=False, server=False, key=False, misp=False, distribution=0, threat_level=4,
                  analysis=2, publish=True, tlp=True, discover_tags=False, to_ids=False, author_tag=False,
                  bulk_tag=None, dedup_titles=False):
    """
    Parse a Pulse or a list of Pulses and add it/them to MISP if server and key are present

    :param pulse_or_list: a Pulse or list of Pulses as returned by `get_pulses`
    :param author: Prepend the author to the Pulse name
    :type author: Boolean
    :param server: MISP server URL
    :param key: MISP API key
    :param misp: MISP connection object
    :type misp: :class:`pymisp.PyMISP`
    :param distribution: distribution of the MISP event (0-4)
    :param threat_level: threat level of the MISP object (1-4)
    :param analysis: analysis stae of the MISP object (0-2)
    :param publish: Is the MISP event should be published?
    :type publish: Boolean
    :param tlp: Add TLP level tag to event
    :type tlp: Boolean
    :param discover_tags: discover MISP tags from Pulse tags
    :type discover_tags: Boolean
    :param to_ids: Flag pulse attributes as being sent to an IDS
    :type to_ids: Boolean
    :param author_tag: Add the pulse author as an event tag
    :type author_tag: Boolean
    :param bulk_tag: A tag that will be added to all events for categorization (e.g. OTX)
    :type bulk_tag: String
    :param dedup_titles: Search MISP for an existing event title and update it, rather than create a new one
    :type dedup_titles: Boolean
    :return: a dict or a list of dict with the selected attributes
    """
    if not misp and (server and key):
        log.debug("Connection to MISP instance: {}".format(server))
        try:
            misp = pymisp.PyMISP(server, key, ssl=False, out_type='json', debug=False)
        except pymisp.PyMISPError as ex:
            raise ImportException("Cannot connect to MISP instance: {}".format(ex.message))
        except Exception as ex:
            raise ImportException("Cannot connect to MISP instance, unknown exception: {}".format(ex.message))
    if discover_tags:
        def get_tag_name(complete):
            parts = complete.split('=')
            if not len(parts):
                return complete
            last = parts[-1]
            if last[0] == '"':
                last = last[1:]
            if last[-1] == '"':
                last = last[:-1]
            return last.lower()
        raw_tags = misp.get_all_tags()
        tags = dict()
        for tag in raw_tags['Tag']:
            tags[get_tag_name(tag['name'])] = tag['name']
        misp.discovered_tags = tags

    if isinstance(pulse_or_list, (list, tuple)) or inspect.isgenerator(pulse_or_list):
        result_event = []
        for pulse in pulse_or_list:
          try:
            result = create_events(pulse, author=author, server=server, key=key, misp=misp, distribution=distribution,
                              threat_level=threat_level, analysis=analysis, publish=publish, tlp=tlp, to_ids=to_ids, 
                              author_tag=author_tag, bulk_tag=bulk_tag, dedup_titles=dedup_titles)
            result_event.append(result)
          except:
            pass
          
        return result_event


    pulse = pulse_or_list
    if author:
        event_name = pulse['author_name'] + ' | ' + pulse['name']
    else:
        event_name = pulse['name']
    try:
        dt = date_parser.parse(pulse['modified'])
    except (ValueError, OverflowError):
        log.error("Cannot parse Pulse 'modified' date.")
        dt = datetime.utcnow()
    event_date = dt.strftime('%Y-%m-%d')
    log.info("## {name} - {date}".format(name=event_name, date=event_date))
    result_event = {
        'name': event_name,
        'date': event_date,
        'tags': list(),
        'attributes': {
            'hashes': {
                'md5': list(),
                'sha1': list(),
                'sha256': list(),
                'imphash': list(),
                'pehash': list()
            },
            'hostnames': list(),
            'domains': list(),
            'urls': list(),
            'ips': list(),
            'emails': list(),
            'mutexes': list(),
            'references': list(),
            'cves': list()
        },
    }

    if misp:
        if not dedup_titles:
            event = misp.new_event(distribution, threat_level, analysis, event_name, date=event_date, published=publish)
        else:
            event=''
            # Check if username is added to title
            # Build the title
            if author:
                event_name = pulse['author_name'] + ' | ' + pulse['name']
            else:
                event_name = pulse['name']
            
            # Search MISP for the title
            result = misp.search_index(eventinfo=event_name)
            if 'message' in result:
                if result['message'] == "No matches.":
                    event = misp.new_event(distribution, threat_level, analysis, event_name, date=event_date,
                                           published=publish)
            else:
                for evt in result['response']:
                    # If it exists, set 'event' to the event
                    if evt['info'] == event_name:
                        event = {'Event': evt}
                        break
                if event == '':
                    # Event not found, even though search results were returned
                    # Build new event
                    event = misp.new_event(distribution, threat_level, analysis, event_name, date=event_date,
                                           published=publish)
            
        time.sleep(0.2)
        if tlp and 'TLP' in pulse:
            tag = "tlp:{}".format(pulse['TLP'])
            log.info("\t - Adding tag: {}".format(tag))
            tag_event(misp, event, tag)
            result_event['tags'].append(tag)

        if author_tag:
            tag_event(misp, event, pulse['author_name'])

        if bulk_tag is not None:
            tag_event(misp, event, bulk_tag)

    if misp and hasattr(misp, 'discovered_tags') and 'tags' in pulse:
        for pulse_tag in pulse['tags']:
            if pulse_tag.lower() in misp.discovered_tags:
                tag = misp.discovered_tags[pulse_tag.lower()]
                log.info("\t - Adding tag: {}".format(tag))
                tag_event(misp, event, tag)
                result_event['tags'].append(tag)

    if 'references' in pulse:
        for reference in pulse['references']:
            if reference:
                log.info("\t - Adding external analysis link: {}".format(reference))
                if misp:
                    misp.add_named_attribute(event, 'link', reference, category='External analysis')
                result_event['attributes']['references'].append(reference)

    if misp and 'description' in pulse and isinstance(pulse['description'], six.text_type) and pulse['description']:
        log.info("\t - Adding external analysis comment")
        misp.add_named_attribute(event, 'comment', pulse['description'], category='External analysis')

    for ind in pulse['indicators']:
        ind_type = ind['type']
        ind_val = ind['indicator']
        ind_kwargs = {'to_ids': to_ids}

        if 'description' in ind and isinstance(ind['description'], six.text_type) and ind['description']:
            ind_kwargs['comment'] = ind['description']

        if ind_type == 'FileHash-SHA256':
            log.info("\t - Adding SH256 hash: {}".format(ind_val))
            if misp:
                misp.add_hashes(event, sha256=ind_val, **ind_kwargs)
            result_event['attributes']['hashes']['sha256'].append(ind_val)

        elif ind_type == 'FileHash-SHA1':
            log.info("\t - Adding SHA1 hash: {}".format(ind_val))
            if misp:
                misp.add_hashes(event, sha1=ind_val, **ind_kwargs)
            result_event['attributes']['hashes']['sha1'].append(ind_val)

        elif ind_type == 'FileHash-MD5':
            log.info("\t - Adding MD5 hash: {}".format(ind_val))
            if misp:
                misp.add_hashes(event, md5=ind_val, **ind_kwargs)
            result_event['attributes']['hashes']['md5'].append(ind_val)

        elif ind_type == 'URI' or ind_type == 'URL':
            log.info("\t - Adding URL: {}".format(ind_val))
            if misp:
                misp.add_url(event, ind_val, **ind_kwargs)
            result_event['attributes']['urls'].append(ind_val)

        elif ind_type == 'domain':
            log.info("\t - Adding domain: {}".format(ind_val))
            if misp:
                misp.add_domain(event, ind_val, **ind_kwargs)
            result_event['attributes']['domains'].append(ind_val)

        elif ind_type == 'hostname':
            log.info("\t - Adding hostname: {}".format(ind_val))
            if misp:
                misp.add_hostname(event, ind_val, **ind_kwargs)
            result_event['attributes']['hostnames'].append(ind_val)

        elif ind_type == 'IPv4' or ind_type == 'IPv6':
            log.info("\t - Adding ip: {}".format(ind_val))
            if misp:
                misp.add_ipdst(event, ind_val, **ind_kwargs)
            result_event['attributes']['ips'].append(ind_val)

        elif ind_type == 'email':
            log.info("\t - Adding email: {}".format(ind_val))
            if misp:
                misp.add_email_dst(event, ind_val, **ind_kwargs)
            result_event['attributes']['emails'].append(ind_val)

        elif ind_type == 'Mutex':
            log.info("\t - Adding mutex: {}".format(ind_val))
            if misp:
                misp.add_mutex(event, ind_val, **ind_kwargs)
            result_event['attributes']['mutexes'].append(ind_val)

        elif ind_type == 'CVE':
            log.info("\t - Adding CVE: {}".format(ind_val))
            if misp:
                misp.add_named_attribute(event, 'vulnerability', ind_val, category='External analysis', **ind_kwargs)
            result_event['attributes']['cves'].append(ind_val)

        elif ind_type == 'FileHash-IMPHASH':
            log.info("\t - Adding IMPHASH hash: {}".format(ind_val))
            if misp:
                misp.add_named_attribute(event, 'imphash', ind_val, category='Artifacts dropped', **ind_kwargs)
            result_event['attributes']['hashes']['imphash'].append(ind_val)

        elif ind_type == 'FileHash-PEHASH':
            log.info("\t - Adding PEHASH hash: {}".format(ind_val))
            if misp:
                misp.add_named_attribute(event, 'pehash', ind_val, category='Artifacts dropped', **ind_kwargs)
            result_event['attributes']['hashes']['pehash'].append(ind_val)

        else:
            log.warning("Unsupported indicator type: %s" % ind_type)

    if misp and publish:
        event['Event']['published'] = False
        misp.publish(event)
    return result_event
예제 #52
0
 def __init__(self, stream: Iterable[bytes]):
     self._stream = stream
     self._is_stream_consumed = False
     self._is_generator = inspect.isgenerator(stream)
예제 #53
0
def opt_generator_param(obj, param_name):
    if obj is not None and not inspect.isgenerator(obj):
        raise ParameterCheckError((
            'Param "{name}" is not a generator (return value of function that yields) Got '
            "{obj} instead").format(name=param_name, obj=obj))
    return obj
예제 #54
0
    def complex_to_cloth(self,
                         ctx,
                         cls,
                         inst,
                         cloth,
                         parent,
                         name=None,
                         as_attr=False,
                         **kwargs):
        fti = cls.get_flat_type_info(cls)
        cls_attrs = self.get_cls_attrs(cls)

        # It's actually an odict but that's irrelevant here.
        fti_check = dict(fti.items())
        elt_check = set()

        attrib = self._gen_attrib_dict(inst, fti)
        self._enter_cloth(ctx,
                          cloth,
                          parent,
                          attrib=attrib,
                          method=cls_attrs.method)

        for elt in self._get_elts(cloth, self.MRPC_ID):
            self._actions_to_cloth(ctx, cls, inst, elt)

        if self._is_tagbag(cloth):
            logger_c.debug("%r(%r) IS a tagbag", cloth, cloth.attrib)
            elts = self._get_elts(cloth)
        else:
            logger_c.debug("%r(%r) is NOT a tagbag", cloth, cloth.attrib)
            elts = self._get_outmost_elts(cloth)

        # Check for xmldata after entering the cloth.
        as_data_field = cloth.attrib.get(self.DATA_ATTR_NAME, None)
        if as_data_field is not None:
            self._process_field(ctx, cls, inst, parent, cloth, fti,
                                as_data_field, as_attr, True, fti_check,
                                elt_check, **kwargs)

        for elt in elts:
            for k_attr, as_attr, as_data in ((self.ID_ATTR_NAME, False, False),
                                             (self.ATTR_ATTR_NAME, True,
                                              False), (self.DATA_ATTR_NAME,
                                                       False, True)):
                field_name = elt.attrib.get(k_attr, None)
                if field_name is None:
                    continue

                if elt.tag == self.DATA_TAG_NAME:
                    as_data = True

                ret = self._process_field(ctx,
                                          cls,
                                          inst,
                                          parent,
                                          elt,
                                          fti,
                                          field_name,
                                          as_attr=as_attr,
                                          as_data=as_data,
                                          fti_check=fti_check,
                                          elt_check=elt_check,
                                          **kwargs)

                if isgenerator(ret):
                    try:
                        while True:
                            sv2 = (yield)
                            ret.send(sv2)
                    except Break as e:
                        try:
                            ret.throw(e)
                        except StopIteration:
                            pass
                        finally:
                            # cf below
                            if not (as_attr or as_data):
                                break
                else:
                    # this is here so that attribute on complex model doesn't get
                    # mixed with in-line attr inside complex model. if an element
                    # has spyne-id, all other attrs are ignored and are processed
                    # by the object's serializer not its parent.
                    if not (as_attr or as_data):
                        break

        if len(fti_check) > 0:
            logger_s.debug("No element found for the following fields: %r",
                           list(fti_check.keys()))
        if len(elt_check) > 0:
            logger_s.debug(
                "No field found for element the following "
                "elements: %r", list(elt_check))
 def test_stream(self):
     self.assertTrue(inspect.isgenerator(serialize.csv([])))
예제 #56
0
    def _step(self, value=None, exc=None):
        assert not self.done(), \
            '_step(): already done: {!r}, {!r}, {!r}'.format(self, value, exc)
        if self._must_cancel:
            if not isinstance(exc, futures.CancelledError):
                exc = futures.CancelledError()
            self._must_cancel = False
        coro = self._coro
        self._fut_waiter = None

        self.__class__._current_tasks[self._loop] = self
        # Call either coro.throw(exc) or coro.send(value).
        try:
            if exc is not None:
                result = coro.throw(exc)
            elif value is not None:
                result = coro.send(value)
            else:
                result = next(coro)
        except StopIteration as exc:
            self.set_result(exc.value)
        except futures.CancelledError as exc:
            super().cancel()  # I.e., Future.cancel(self).
        except Exception as exc:
            self.set_exception(exc)
        except BaseException as exc:
            self.set_exception(exc)
            raise
        else:
            if isinstance(result, futures.Future):
                # Yielded Future must come from Future.__iter__().
                if result._blocking:
                    result._blocking = False
                    result.add_done_callback(self._wakeup)
                    self._fut_waiter = result
                    if self._must_cancel:
                        if self._fut_waiter.cancel():
                            self._must_cancel = False
                else:
                    self._loop.call_soon(
                        self._step, None,
                        RuntimeError('yield was used instead of yield from '
                                     'in task {!r} with {!r}'.format(
                                         self, result)))
            elif result is None:
                # Bare yield relinquishes control for one event loop iteration.
                self._loop.call_soon(self._step)
            elif inspect.isgenerator(result):
                # Yielding a generator is just wrong.
                self._loop.call_soon(
                    self._step, None,
                    RuntimeError('yield was used instead of yield from for '
                                 'generator in task {!r} with {}'.format(
                                     self, result)))
            else:
                # Yielding something else is an error.
                self._loop.call_soon(
                    self._step, None,
                    RuntimeError('Task got bad yield: {!r}'.format(result)))
        finally:
            self.__class__._current_tasks.pop(self._loop)
            self = None  # Needed to break cycles when an exception occurs.
예제 #57
0
def iscoroutine(obj):
    """Return True if obj is a coroutine object."""
    return isinstance(obj, CoroWrapper) or inspect.isgenerator(obj)
예제 #58
0
 def coro(*args, **kw):
     res = func(*args, **kw)
     if isinstance(res, futures.Future) or inspect.isgenerator(res):
         res = yield from res
     return res
예제 #59
0
params = sig.parameters  # 有序字典
print(params)
print('~' * 30)
print(1, sig.parameters['x'])
print(sig.parameters['x'].annotation)  # 关键字为x 的注解信息
print(2, sig.parameters['y'])
print(sig.parameters['y'].annotation)
print(3, sig.parameters['args'])
print(sig.parameters['args'].annotation)
print(4, sig.parameters['kwargs'])
print(sig.parameters['kwargs'].annotation)

# inspect模块
inspect.isfunction(object)  # 是否是函数
inspect.ismethod(object)  # 是否是类方法
inspect.isgenerator(object)  # 是否是生成器对象
inspect.isgeneratorfunction(object)  # 是否是生成器函数
inspect.isclass(object)  # 是否是类
inspect.ismodule(inspect)  # 是否是模块
inspect.isbulitin(print)  # 是否是内建对象

# parameter对象,不同于parameters
# 保存在元组中,是只读的
# name,annotation,default,empty(特殊的类,用来标记default属性或注释annotation属性的空值)
# kind, 实参如何绑定到形参,就是形参的类型,包括:
#  POSITIONAL_ONLY    值必须是位置参数提供
#  POSITIONAL_OR_KEYWORD    值可以是位置参数也可以是关键字参数
#  VAR_POSITIONAL     可变位置参数
#  VAR_KEYWORD        可变关键字参
#  KEYWORD_ONLY       keyword-only参数
예제 #60
0
    def subserialize(self, ctx, cls, inst, parent, ns=None, name=None):
        attrs = {}
        if self.table_name_attr is not None:
            attrs[self.table_name_attr] = name

        locale = ctx.locale
        with parent.element('table', attrs):
            fti = None
            if issubclass(cls, ComplexModelBase):
                fti = cls.get_flat_type_info(cls)
            if self.produce_header:
                with parent.element('thead'):
                    header_row = E.tr()

                    th = {}
                    if self.header_cell_class is not None:
                        th['class'] = self.header_cell_class

                    # fti is none when the type inside Array is not a ComplexModel.
                    if fti is None:
                        if self.field_name_attr is not None:
                            th[self.field_name_attr] = name
                        header_name = self.translate(cls, ctx.locale, name)
                        header_row.append(E.th(header_name, **th))

                    else:
                        if self.field_name_attr is None:
                            for k, v in fti.items():
                                header_name = self.translate(v, ctx.locale, k)
                                header_row.append(E.th(header_name, **th))

                        else:
                            for k, v in fti.items():
                                th[self.field_name_attr] = k
                                header_name = self.translate(v, ctx.locale, k)
                                header_row.append(E.th(header_name, **th))

                    parent.write(header_row)

            with parent.element('tbody'):
                if cls.Attributes.max_occurs > 1:
                    ret = self.array_to_parent(ctx, cls, inst, parent, name)
                    if isgenerator(ret):
                        try:
                            while True:
                                y = (yield)
                                ret.send(y)
                        except Break as b:
                            try:
                                ret.throw(b)
                            except StopIteration:
                                pass

                else:
                    with parent.element('tr'):
                        ret = self.to_parent(ctx, cls, inst, parent, name)
                        if isgenerator(ret):
                            try:
                                while True:
                                    y = (yield)
                                    ret.send(y)
                            except Break as b:
                                try:
                                    ret.throw(b)
                                except StopIteration:
                                    pass