def _setup(self, archive_mode=True, max_blobs=100, archive_some=False): ddfs = DDFS() if archive_some: blobs = ('/b13.1', '/b13.2', '/b13.3') ddfs.ddfs['processed:data:chunk:2011-11-13'] = [blobs] self.archiver = Archiver(ddfs=ddfs, archive_prefix='processed', archive_mode=archive_mode, max_blobs=max_blobs, tags=['incoming:data:chunk'])
def _determine_job_blobs(self): self._notify(JOB_BLOBS) tags = self.job_options.tags urls = self.job_options.urls + self.urls if self.urls else self.job_options.urls if tags or urls: log.info('Processing input: %s...', (tags + urls)[:1000]) else: log.info('No input available for %s.' % self.rule.name) archiver = Archiver( ddfs=self.ddfs, archive_prefix=self.rule.archive_tag_prefix, archive_mode=self.rule.archive, max_blobs=self.rule.max_blobs, tags=tags, urls=urls, newest_first=self.rule.newest_first, ) return archiver
def _setup(self, tags=()): self.archiver = Archiver(ddfs=DDFS(), archive_prefix='processed', archive_mode=True, max_blobs=100, tags=tags)