Ejemplo n.º 1
0
    def run(self):
        """See `IRunnableJob`."""
        self.blob.file_alias.open()
        parser = FileBugDataParser(self.blob.file_alias)
        parsed_data = parser.parse()

        # We transform the parsed_data object into a dict, because
        # that's easier to store in JSON.
        parsed_data_dict = parsed_data.asDict()

        # If there are attachments, we loop over them and push them to
        # the Librarian, since it's easier than trying to serialize file
        # data to the ApportJob table.
        if len(parsed_data_dict.get('attachments')) > 0:
            attachments = parsed_data_dict['attachments']
            attachments_to_store = []

            for attachment in attachments:
                file_content = attachment['content'].read()
                file_alias = getUtility(ILibraryFileAliasSet).create(
                    name=attachment['filename'], size=len(file_content),
                    file=StringIO(file_content),
                    contentType=attachment['content_type'])
                attachments_to_store.append({
                    'file_alias_id': file_alias.id,
                    'description': attachment['description']})

            # We cheekily overwrite the 'attachments' value in the
            # parsed_data_dict so as to avoid trying to serialize file
            # objects to JSON.
            parsed_data_dict['attachments'] = attachments_to_store

        metadata = self.metadata
        metadata.update({'processed_data': parsed_data_dict})
        self.metadata = metadata
Ejemplo n.º 2
0
    def test_run(self):
        # IProcessApportBlobJobSource.run() extracts salient data from an
        # Apport BLOB and stores it in the job's metadata attribute.
        job = getUtility(IProcessApportBlobJobSource).create(self.blob)
        job.run()
        transaction.commit()

        # Once the job has been run, its metadata will contain a dict
        # called processed_data, which will contain the data parsed from
        # the BLOB.
        processed_data = job.metadata.get('processed_data', None)
        self.assertNotEqual(
            None, processed_data,
            "processed_data should not be None after the job has run.")

        # The items in the processed_data dict represent the salient
        # information parsed out of the BLOB. We can use our
        # FileBugDataParser to check that the items recorded in the
        # processed_data dict are correct.
        self.blob.file_alias.open()
        data_parser = FileBugDataParser(self.blob.file_alias)
        filebug_data = data_parser.parse()
        self._assertFileBugDataMatchesDict(filebug_data, processed_data)
Ejemplo n.º 3
0
    def test_run(self):
        # IProcessApportBlobJobSource.run() extracts salient data from an
        # Apport BLOB and stores it in the job's metadata attribute.
        job = getUtility(IProcessApportBlobJobSource).create(self.blob)
        job.run()
        transaction.commit()

        # Once the job has been run, its metadata will contain a dict
        # called processed_data, which will contain the data parsed from
        # the BLOB.
        processed_data = job.metadata.get('processed_data', None)
        self.assertNotEqual(
            None, processed_data,
            "processed_data should not be None after the job has run.")

        # The items in the processed_data dict represent the salient
        # information parsed out of the BLOB. We can use our
        # FileBugDataParser to check that the items recorded in the
        # processed_data dict are correct.
        self.blob.file_alias.open()
        data_parser = FileBugDataParser(self.blob.file_alias)
        filebug_data = data_parser.parse()
        self._assertFileBugDataMatchesDict(filebug_data, processed_data)