def __init__(self, *args, **kwargs): super(ItemStep, self).__init__(*args, **kwargs) if not self.item_class: raise exceptions.ArgumentError( u'You must define an item_class attribute') if not issubclass(self.item_class, scrapy_item.Item): raise exceptions.ArgumentError( u'The item_class must be a subclass of scrapy.Item')
def __init__(self, spider, *args, **kwargs): self.spider = spider self.parent_step = kwargs.pop('parent_step', None) for key, value in kwargs.iteritems(): if not hasattr(self, key): raise exceptions.ArgumentError( u'Attribute {} not allowed'.format(key)) else: setattr(self, key, value)
def __init__(self, *args, **kwargs): if not self.name: self.name = utils.slugify_name(self.__class__.__name__) if not issubclass(self.initial_step, steps.InitStep): raise exceptions.ArgumentError( (u'The initial_step attribute must' ' be a subclass of scrapy_venom.steps.InitStep')) super(SpiderFlow, self).__init__(*args, **kwargs)
def __init__(self, *args, **kwargs): super(SearchStep, self).__init__(*args, **kwargs) if not self.search_url: raise exceptions.ArgumentError( u'You must define an search_url or get_search_url()')
def __init__(self, *args, **kwargs): super(InitStep, self).__init__(*args, **kwargs) if not self.initial_url: raise exceptions.ArgumentError( u'You must define an initial_url or get_request_url()')
def __init__(self, *args, **kwargs): super(HttpMixin, self).__init__(*args, **kwargs) if self.http_method not in self.ALLOWED_METHODS: raise exceptions.ArgumentError(u'This http_method is not allowed')