def testDuplicate(self):
        "Use client API to store near duplicates and find"
        doc_id = document_store(document=self.tempfile.name,
                                document_type='longitudinal')
        duplicate_id = document_store(document=self.tempfile.name,
                                      document_type='longitudinal',
                                      allow_duplicate_filename=True)
        self.doc_list.append(doc_id)
        self.doc_list.append(duplicate_id)

        criteria = {'filename': os.path.basename(self.tempfile.name)}
        results = document_find(criteria)
        self.assertEquals(len(results), 2)
        self.assertNotEquals(results[0]['_id'], results[1]['_id'])
    def testLimit(self):
        "Use client API to store and limit search results"
        doc_id = document_store(document=self.tempfile.name,
                                document_type='longitudinal')
        duplicate_id = document_store(document=self.tempfile.name,
                                      document_type='longitudinal',
                                      allow_duplicate_filename=True)
        self.doc_list.append(doc_id)
        self.doc_list.append(duplicate_id)

        criteria = {'filename': os.path.basename(self.tempfile.name)}
        results = document_find(criteria)
        self.assertEquals(len(results), 2)
        result = document_find(criteria, limit=1)
        # single result returns doc contents
        self.assertEquals(result, self.test_text)
    def testDocumentArchiveAPI(self):
        "Use client API to store, fetch and delete a document"
        doc_id = document_store(self.tempfile.name, 'longitudinal')
        self.doc_list.append(doc_id)
        self.assertTrue(doc_id)

        # Pull from the store and see if metadata stuck
        metadata = document_fetch_metadata(doc_id)
        self.assertEqual(metadata.get('report_type', None),
                         'longitudinal')
    def testFindOne(self):
        "Use client API to find a single doc after storing"
        doc_id = document_store(document=self.tempfile.name,
                                document_type='longitudinal')
        self.doc_list.append(doc_id)

        # Pull from the store and see if compressed
        filename = os.path.basename(self.tempfile.name)
        criteria = {'filename': filename}
        document = document_find(criteria)
        self.assertEquals(document, self.test_text)
 def testUnZippedUpload(self):
     "Can we upload an unzipped copy"
     tmpfile = tempfile.NamedTemporaryFile('w')
     tmpfile.write(lorem_ipsum)
     tmpfile.seek(0)
     doc_id = document_store(tmpfile.name,
                             document_type='essence',
                             compress_with=None)
     self.assertTrue(doc_id)
     agent = PHINMS_client(zip_first=False)
     self.assertFalse(agent.transfer_file(doc_id))
 def testUnZippedUpload(self):
     "Can we upload an unzipped copy"
     tmpfile = tempfile.NamedTemporaryFile('w')
     tmpfile.write(lorem_ipsum)
     tmpfile.seek(0)
     metadata = {'reportable_region': 'wasc', }
     doc_id = document_store(tmpfile.name,
                             document_type='essence',
                             **metadata)
     self.assertTrue(doc_id)
     agent = Distribute_client(zip_first=False)
     self.assertFalse(agent.transfer_file(doc_id))
    def testZipCompression(self):
        "Use client API to compress and store a doc"
        doc_id = document_store(document=self.tempfile.name,
                                document_type='longitudinal',
                                compress_with='zip')
        self.doc_list.append(doc_id)

        # Pull from the store and see if compressed
        metadata = document_fetch_metadata(doc_id)
        self.assertEqual(metadata.get('compression', None),
                         'zip')
        filename = os.path.basename(self.tempfile.name) + '.zip'
        self.assertEqual(self.test_text,
                         document_find({'filename': filename}))
    def testDocumentArchiveMetaData(self):
        "Persist and test additional metadata with a document"
        now = datetime.now()
        metadata = {'include_updates': True, 'jon': 'bon',
                    'report_type': 'foo', 'time_of_day': now}
        doc_id = document_store(self.tempfile.name, 'longitudinal',
                                **metadata)
        self.doc_list.append(doc_id)
        self.assertTrue(doc_id)

        # Pull from the store and see if metadata stuck
        metadata = document_fetch_metadata(doc_id)
        self.assertEqual(metadata['include_updates'], True)
        self.assertEqual(metadata['jon'], 'bon')
        self.assertEqual(metadata['time_of_day'], now.isoformat())
Exemple #9
0
    def _write_report(self, save_report=False):
        """ Write out and potentially store the results.

        Generate results via database queries and write the results to
        self.output.

        :param save_report: If set, persist the document and related
          metadata to the mbds archive.

        returns the document ID, the mbds archive key, if saved

        """
        out = self.output
        print >> out, self._header()
        self._build_join_tables()
        self._select_diagnosis()
        self._select_vitals()
        cursor = self.access.raw_query(self._select_from_essence_view())
        for row in cursor.fetchall():
            # Each row is the colums up to the diagnosis + the
            # comma separated diagnosis + the rest of the columns
            # and finally with vitals if configured for such
            visit_pk = row[self.diagnosis_column_index]  # yuck, but true
            print >> out,\
                '|'.join([strSansNone(column) for column in
                          row[:self.diagnosis_column_index]] +
                         self._diagnosis(visit_pk) +
                         [strSansNone(column) for column in
                          row[self.diagnosis_column_index + 1:]] +
                         self._vitals_for_visit(visit_pk))

        # Close the file and persist to the document archive if
        # requested
        self.output.close()
        if save_report:
            metadata = {
                k: v
                for k, v in self.criteria._crit.items() if v is not None
            }

            # At this point, all documents are of 'essence' type
            return document_store(document=self.output.name,
                                  allow_duplicate_filename=True,
                                  document_type='essence',
                                  **metadata)
    def _write_report(self, save_report=False):
        """ Write out and potentially store the results.

        Generate results via database queries and write the results to
        self.output.

        :param save_report: If set, persist the document and related
          metadata to the mbds archive.

        returns the document ID, the mbds archive key, if saved

        """
        out = self.output
        print >> out, self._header()
        self._build_join_tables()
        self._select_diagnosis()
        self._select_vitals()
        cursor = self.access.raw_query(self._select_from_essence_view())
        for row in cursor.fetchall():
            # Each row is the colums up to the diagnosis + the
            # comma separated diagnosis + the rest of the columns
            # and finally with vitals if configured for such
            visit_pk = row[self.diagnosis_column_index]  # yuck, but true
            print >> out,\
                '|'.join([strSansNone(column) for column in
                          row[:self.diagnosis_column_index]] +
                         self._diagnosis(visit_pk) +
                         [strSansNone(column) for column in
                          row[self.diagnosis_column_index + 1:]] +
                         self._vitals_for_visit(visit_pk))

        # Close the file and persist to the document archive if
        # requested
        self.output.close()
        if save_report:
            metadata = {k: v for k, v in self.criteria._crit.items() if v
                        is not None}

            # At this point, all documents are of 'essence' type
            return document_store(document=self.output.name,
                                  allow_duplicate_filename=True,
                                  document_type='essence', **metadata)