def test_gunzip_file_to_file(self): compressed = self.create_test_file(compression='gzip') expanded = expand_file(filename=compressed, zip_protocol='gzip', output='file') with open(expanded, 'rb') as result: self.assertEqual(result.read(), self.test_text)
def search(self, criteria, limit=0): """Search for documents matching criteria :param criteria: dictionary defining search terms :param limit: curtail lenght of result set Returns empty string on no match, document contents on a perfect match or with limit=1, and a list of document meta-data on multiple matches. """ cursor = self.request.document_store.find(criteria).limit(limit) count = cursor.count(with_limit_and_skip=True) if count == 0: return '' elif count == 1: # with a single document, return contents document = cursor.next() content = self.request.fs.get(document.get('_id')) compression = document.get('compression') if compression: content = expand_file(fileobj=content, zip_protocol=compression) return content.read() return [doc for doc in cursor]
def display_reports(context, request): """View callable method for displaying one or more reports If the request included a filename or object id, display the contents of the file. This is picked up in the traversal process, set to context.filename. NB - it may define either the filename used when persisting a report, or the document (Object) ID. Otherwise, display table of metadata about persisted reports. If the context is a subclass of BaseReport, the list will be limited to those reports of like type. """ # If traversal included a filename, display file contents if hasattr(context, 'filename'): try: # Attempt to access 'filename' as the document ID try: oid = ObjectId(context.filename) except InvalidId: raise NoFile content = request.fs.get(oid) document = request.document_store.find_one(oid) compression = document.get('compression') except NoFile: # If the oid was not found, query filename of this type, # if the context provided adequate data try: document = request.document_store.\ find_one({'filename': context.filename, 'report_type': context.report_type}) except AttributeError: document = None if not document: raise NotFound content = request.fs.get(document['_id']) compression = document.get('compression') if compression: content = expand_file(fileobj=content, zip_protocol=compression) return {'document': content.read()} # Otherwise, query for all reports of this type documents = ({ 'filename': doc['filename'], 'uploadDate': doc['uploadDate'], 'length': doc['length'], 'id': doc['_id'] } for doc in request.document_store.find({ 'report_type': context.report_type }) if 'filename' in doc) return {'documents': documents, 'report_type': context.report_type}
def upload(self, filename, filedate=None): """Upload the file to the PHEME_http_receiver channel :param filename: batch filename to upload :param filedate: needs to be defined for files that have been archived, as the date is necessary to locate the archived file. """ src = os.path.join(self.phinms_receiving_dir, filename) if os.path.exists(src): # Common case, the file is available in the receiving_dir # as it hasn't yet been archived self._feed(src, filename) else: # See if we can find the archived version. If so, it # needs to be expanded before adding to the # channelPath try: archive_dir = archive_by_date(self.phinms_archive_dir, filedate) src = os.path.join(archive_dir, filename + '.gz') if os.path.exists(src): expanded_file = expand_file(filename=src, zip_protocol='gzip', output='file') self._feed(expanded_file, filename) # remove the expanded_file, providing the source # is still intact if os.path.exists(src): os.remove(expanded_file) else: raise RuntimeError("Archived batch file gone " "after expansion") else: logging.error("Couldn't locate hl7 batch file " "'%s' using date %s", filename, str(filedate)) except: logging.error("failed to locate hl7 batch file " "'%s'", filename)
def upload(self, filename, filedate=None): """Upload the file to the PHEME_http_receiver channel :param filename: batch filename to upload :param filedate: needs to be defined for files that have been archived, as the date is necessary to locate the archived file. """ src = os.path.join(self.phinms_receiving_dir, filename) if os.path.exists(src): # Common case, the file is available in the receiving_dir # as it hasn't yet been archived self._feed(src, filename) else: # See if we can find the archived version. If so, it # needs to be expanded before adding to the # channelPath try: archive_dir = archive_by_date(self.phinms_archive_dir, filedate) src = os.path.join(archive_dir, filename + '.gz') if os.path.exists(src): expanded_file = expand_file(filename=src, zip_protocol='gzip', output='file') self._feed(expanded_file, filename) # remove the expanded_file, providing the source # is still intact if os.path.exists(src): os.remove(expanded_file) else: raise RuntimeError("Archived batch file gone " "after expansion") else: logging.error( "Couldn't locate hl7 batch file " "'%s' using date %s", filename, str(filedate)) except: logging.error("failed to locate hl7 batch file " "'%s'", filename)
def test_unzip_file(self): compressed = self.create_test_file(compression='zip') expanded = expand_file(filename=compressed, zip_protocol='zip') self.assertEqual(expanded.read(), self.test_text)
def test_unzip_stream(self): compressed = self.create_test_file(compression='zip') expanded = expand_file(fileobj=open(compressed, 'rb'), zip_protocol='zip') self.assertEqual(expanded.read(), self.test_text)
def test_gunzip_file_to_file(self): compressed = self.create_test_file(compression="gzip") expanded = expand_file(filename=compressed, zip_protocol="gzip", output="file") with open(expanded, "rb") as result: self.assertEqual(result.read(), self.test_text)
def test_gunzip_stream(self): compressed = self.create_test_file(compression="gzip") expanded = expand_file(fileobj=open(compressed, "rb"), zip_protocol="gzip") self.assertEqual(expanded.read(), self.test_text)
def test_gunzip_file(self): compressed = self.create_test_file(compression='gzip') expanded = expand_file(filename=compressed, zip_protocol='gzip') self.assertEqual(expanded.read(), self.test_text)
def test_gunzip_stream(self): compressed = self.create_test_file(compression='gzip') expanded = expand_file(fileobj=open(compressed, 'rb'), zip_protocol='gzip') self.assertEqual(expanded.read(), self.test_text)