def get(self): offset = int(request.args.get('offset', 0)) limit = int(request.args.get('limit', 100)) documents = [] for document in DocumentDB.query(DocumentDB.all(), where=DocumentDB.q.archived == True, orderBy=(DocumentDB.q.id, DocumentDB.q.path, DESC(DocumentDB.q.created)), start=offset, end=offset + limit, distinctOn=DocumentDB.q.id, distinct=True): documents.append({ 'id': document.id, 'uuid': document.uuid, 'name': document.name, 'url': document.url, 'type': document.type, 'path': document.path, 'created': str(document.created), 'published': document.published }) return Response(response=json.dumps({ 'documents': documents, 'meta': { 'offset': offset, 'limit': limit } }), status=200, content_type='application/json')
def _get_document_query(self, documents): uuids = [] for item in documents: document = Document.selectBy(uuid=item['parent_id']).getOne(None) for doc in Document.select(LIKE(Document.q.path, '{0}%'.format(document.path))): uuids.append(doc.uuid) return IN(Document.q.uuid, uuids)
def get(self, document_id=None): documents = [] for document in DocumentDB.query_as_dict(DocumentDB.all(), orderBy=DESC(DocumentDB.q.created), where=(DocumentDB.q.id == document_id)): document['created'] = str(document['created']) documents.append(document) if not documents: return Response(status=404) return Response(response=json.dumps({'documents': documents}), content_type='application/json', status=200)
def navigation(document, depth=None): """ Builds a navigation data structure that can be used to show navigation on a Page Type. :type document: dict :param document: A dictionary object that is stored in S3 :param depth: Valid values of 'all' and 'current_depth' all will show every single page. 'current_depth' will only get documents within the current tree depth :return: dict :rtype: dict """ if depth not in [NAVIGATION_ALL, NAVIGATION_CURRENT_DEPTH]: pass # depth = depth or NAVIGATION_ALL query = ((Document.q.archived == False) & (Document.q.show_in_menu == True) & (Document.q.published == True)) if depth == NAVIGATION_CHILDREN and ( document and not isinstance(document['document'], Undefined)): query &= (LIKE(Document.q.path, '{0}%'.format(document['document']['path']))) results = [] parent = {} for page in Document.query(Document.all(), where=query, orderBy=(Document.q.id, Document.q.path, DESC(Document.q.created)), distinctOn=Document.q.id, distinct=True): current = False if document and not isinstance(document['document'], Undefined): current = document['document']['path'].startswith(page.path) record = { 'url': '/' if page.url == 'index' else '/' + page.url, 'menutitle': cgi.escape(page.menutitle), 'current': current, 'children': [] } parent.update({page.id: record}) if page.parent in parent: parent[page.parent]['children'].append(record) else: results.append(record) return results
def delete(self, document_id): DocumentDB.delete_document(doc_uuid=document_id) document = DocumentDB.selectBy(uuid=document_id).getOne(None) notification = { 'title': 'Deleted', 'message': '{0} has been deleted'.format(document.name.strip()), 'type': 'success' } return Response(response=json.dumps({'notify_msg': notification}), content_type='application/json', status=200)
def do_work(self, message=None): """ :type message: boto.sqs.message.Message | None :param message: :return: """ if not message: return conn = boto.connect_s3() bucket = conn.get_bucket(self.registry.get('files').get('bucket_name')) contents = json.loads(message.get_body()) job_id = str(contents['Message']) job = JobDB.selectBy(uuid=job_id).getOne(None) if not job: log.error('Cannot find job %s', job_id) raise InvalidJobError('Invalid Job ID: {0}'.format(job_id)) job.set(status='running') message = job.message document = Document.selectBy(uuid=job.message['document']).getOne(None) if not document: message['reason'] = 'No Document exists' job.set(status='failed', message=message) raise FatalJobError('No Document Exists') record = Document.get_document(document) fp = StringIO( S3.get_string( self.registry.get('storage').get('bucket_name'), record['file']['key'])) with zipfile.ZipFile(fp, 'r') as zip_handle: for name in zip_handle.namelist(): if name.endswith('/'): continue key_name = '{0}/{1}'.format(document.uuid, name) key = Key(bucket=bucket, name=key_name) key.content_type = mimetypes.guess_type(name)[0] key.set_contents_from_string(zip_handle.read(name)) log.info('Uploaded %s', key_name) job.set(status='complete') if job.message.get('on_complete', {}).get('alter'): document.set(**job.message['on_complete']['alter']) log.info('Setting job=%s to complete', job_id)
def document_get(): record = DocumentDB.selectBy(uuid=document_id).getOne(None) if not record: # todo handle 404 requests correctly return Response(response=json.dumps({}), status=404, content_type='application/json') content = DocumentDB.get_document(record) content['id'] = record.id return Response(response=json.dumps(content), status=200, content_type='application/json')
def navigation(document, depth=None): """ Builds a navigation data structure that can be used to show navigation on a Page Type. :type document: dict :param document: A dictionary object that is stored in S3 :param depth: Valid values of 'all' and 'current_depth' all will show every single page. 'current_depth' will only get documents within the current tree depth :return: dict :rtype: dict """ if depth not in [NAVIGATION_ALL, NAVIGATION_CURRENT_DEPTH]: pass # depth = depth or NAVIGATION_ALL query = ((Document.q.archived == False) & (Document.q.show_in_menu == True) & (Document.q.published == True)) if depth == NAVIGATION_CHILDREN and (document and not isinstance(document['document'], Undefined)): query &= (LIKE(Document.q.path, '{0}%'.format(document['document']['path']))) results = [] parent = {} for page in Document.query(Document.all(), where=query, orderBy=(Document.q.id, Document.q.path, DESC(Document.q.created)), distinctOn=Document.q.id, distinct=True): current = False if document and not isinstance(document['document'], Undefined): current = document['document']['path'].startswith(page.path) record = { 'url': '/' if page.url == 'index' else '/' + page.url, 'menutitle': cgi.escape(page.menutitle), 'current': current, 'children': [] } parent.update({page.id: record}) if page.parent in parent: parent[page.parent]['children'].append(record) else: results.append(record) return results
def do_work(self, message=None): """ :type message: boto.sqs.message.Message | None :param message: :return: """ if not message: return conn = boto.connect_s3() bucket = conn.get_bucket(self.registry.get('files').get('bucket_name')) contents = json.loads(message.get_body()) job_id = str(contents['Message']) job = JobDB.selectBy(uuid=job_id).getOne(None) if not job: log.error('Cannot find job %s', job_id) raise InvalidJobError('Invalid Job ID: {0}'.format(job_id)) job.set(status='running') message = job.message document = Document.selectBy(uuid=job.message['document']).getOne(None) if not document: message['reason'] = 'No Document exists' job.set(status='failed', message=message) raise FatalJobError('No Document Exists') record = Document.get_document(document) fp = StringIO(S3.get_string(self.registry.get('storage').get('bucket_name'), record['file']['key'])) with zipfile.ZipFile(fp, 'r') as zip_handle: for name in zip_handle.namelist(): if name.endswith('/'): continue key_name = '{0}/{1}'.format(document.uuid, name) key = Key(bucket=bucket, name=key_name) key.content_type = mimetypes.guess_type(name)[0] key.set_contents_from_string(zip_handle.read(name)) log.info('Uploaded %s', key_name) job.set(status='complete') if job.message.get('on_complete', {}).get('alter'): document.set(**job.message['on_complete']['alter']) log.info('Setting job=%s to complete', job_id)
def put(self, document_id=None): try: document = DocumentDB.restore_document(document_id) return Response(response=json.dumps({ 'notify_msg': { 'title': 'Restored', 'message': 'Document {0} has been restored'.format( str(document.name).strip()), 'type': 'success' } }), content_type='application/json', status=200) except DocumentNotFound: return Response(response=json.dumps({ 'notify_msg': { 'title': 'Cannot be found', 'message': 'Cannot find document to restore.', 'type': 'success' } }), content_type='application/json', status=404)
def test_db_save_no_parent(document_mock, arrow_mock, registry_mock): conn_s3 = boto.connect_s3() conn_s3.create_bucket('storage-bucket') registry = MagicMock() registry.get.return_value = {'bucket_name': 'storage-bucket'} registry_mock.return_value = registry arrow_mock.utcnow.return_value = arrow.get('2015-01-01 00:00:00', 'YYYY-MM-DD HH:mm:ss') record = { 'document': { 'url': '/', 'name': 'Document Name', 'show_in_menu': True } } document = { 'id': 1, 'uuid': 'some-id', 'url': '/', 'name': 'Document Name', 'show_in_menu': True, 'path': None } document_mock.return_value = MagicMock(**document) result = Document.save(record) assert result.uuid == document['uuid'] assert result.url == document['url']
def route(path): """ @param path @return Response """ urls = [path] tmp_str = str(path) while tmp_str != '': tmp_str = '/'.join(tmp_str.split('/')[0:-1]) if tmp_str: urls.append(tmp_str) log.debug('Attempting to get urls %s', urls) record = Document.select(AND(IN(Document.q.url, urls), Document.q.archived == False, Document.q.published == True), orderBy=(DESC(Document.q.url), DESC(Document.q.created)), limit=1).getOne(None) if not record and 'index' in urls and 'auth_user' not in session: return redirect('/login') if not record: registry_type = REGISTRY['Error'] public = registry_type['public'] document = {'status': 404} controller = common.load_class(public['document_module'], public['document_class'], document, registry_type) return controller.get() document = Document.get_document(record) registry_type = REGISTRY[record.type] page = registry_type['public'] controller = getattr( __import__(page['document_module'], fromlist=page['document_class']), page['document_class']) record_controller = controller(document, registry_type) return getattr(record_controller, str(request.method).lower())()
def _get_document_parent_url(self, parent): """ :param parent: :return: :rtype: hermes_cms.db.document.Document | None """ if not parent: return None return Document.select(Document.q.id == parent, orderBy=DESC(Document.q.created), limit=1).getOne(None)
def put(self, document_id=None): document = DocumentDB.selectBy(uuid=document_id).getOne(None) if not document: return Response(status=404) document_data = DocumentDB.get_document(document) document_data['id'] = document.id document_data['document']['user_id'] = session['auth_user'].get('id', -1) document = DocumentDB.save(document_data) document_type = document_data['document']['type'] helper_class = Registry().get('document').get(document_type, {}).get('admin_helper', {}) print helper_class if helper_class: common.load_class( helper_class.get('document_module'), helper_class.get('document_class'), document ).do_work() return Response(response=json.dumps(document_data), status=200)
def _validate_manifest(documents): lookup = {} for document in documents: if not (document.get('parent_uuid') and document.get('parent_url')): lookup[document['uuid']] = document['url'] continue if not lookup.get(document['parent_uuid']): parent_document = Document.selectBy(url=document['parent_url']).getOne(None) if not parent_document: return False lookup[document['uuid']] = document['url'] return True
def put(self, document_id=None): try: document = DocumentDB.restore_document(document_id) return Response(response=json.dumps({ 'notify_msg': { 'title': 'Restored', 'message': 'Document {0} has been restored'.format(str(document.name).strip()), 'type': 'success' }}), content_type='application/json', status=200) except DocumentNotFound: return Response(response=json.dumps({ 'notify_msg': { 'title': 'Cannot be found', 'message': 'Cannot find document to restore.', 'type': 'success' }}), content_type='application/json', status=404)
def _validate_manifest(documents): lookup = {} for document in documents: if not (document.get('parent_uuid') and document.get('parent_url')): lookup[document['uuid']] = document['url'] continue if not lookup.get(document['parent_uuid']): parent_document = Document.selectBy( url=document['parent_url']).getOne(None) if not parent_document: return False lookup[document['uuid']] = document['url'] return True
def _save_document(self, user_id, contents): """ :param user_id: :param contents: :return: :rtype: arrow.Arrow """ created = arrow.get(contents['document']['created']) contents['document']['created'] = created.datetime contents['document']['user_id'] = user_id document = Document(**contents['document']) path = '{0}{1}/'.format(document.path, document.id) contents['document']['created'] = str(created) contents['document']['path'] = path return created
def post(self): document_data = request.json validation = DocumentValidation(data=document_data) if not validation.validate(): return Response(response=json.dumps( {'fields': validation.errors()}), status=400, content_type='application/json') if 'validate' in request.args: return Response(response=json.dumps(document_data), status=200, content_type='application/json') # todo we should use Auth class to get this document_data['document']['user_id'] = session['auth_user'].get( 'id', -1) document = DocumentDB.save(document_data) document_type = document_data['document']['type'] helper_class = Registry().get('document').get(document_type, {}).get( 'admin_helper', {}) if helper_class: common.load_class(helper_class.get('document_module'), helper_class.get('document_class'), document).do_work() return Response(response=json.dumps({ 'notify_msg': { 'title': 'Document Modified' if document_data.get('id') else 'Document Added', 'message': '{0} has been {1}'.format( str(document.name).strip(), 'modified' if document_data.get('id') else 'added'), 'type': 'success' } }), status=200, content_type='application/json')
def test_get_document(registry_mock, os_mock): conn_s3 = boto.connect_s3() bucket = conn_s3.create_bucket('storage-bucket') key = Key(bucket=bucket, name='25/7/2015/some-id') key.set_contents_from_string(json.dumps({'document': {}})) registry = MagicMock() registry.get.return_value = {'bucket_name': 'storage-bucket'} registry_mock.return_value = registry def side_effect(value): return value == '/tmp/data' os_mock.path.exists = MagicMock(side_effect=side_effect) with patch.object(Document, '_write'): contents = Document.get_document( MagicMock(**{ 'uuid': 'some-id', 'created': datetime(2015, 7, 25) })) assert {'document': {}} == contents
def test_get_document(registry_mock, os_mock): conn_s3 = boto.connect_s3() bucket = conn_s3.create_bucket('storage-bucket') key = Key(bucket=bucket, name='25/7/2015/some-id') key.set_contents_from_string(json.dumps({ 'document': {} })) registry = MagicMock() registry.get.return_value = {'bucket_name': 'storage-bucket'} registry_mock.return_value = registry def side_effect(value): return value == '/tmp/data' os_mock.path.exists = MagicMock(side_effect=side_effect) with patch.object(Document, '_write'): contents = Document.get_document(MagicMock(**{ 'uuid': 'some-id', 'created': datetime(2015, 7, 25) })) assert {'document': {}} == contents
def _update_from_parent(self, contents, parent_url): document = Document.selectBy(url=parent_url, orderBy=DESC(Document.q.created), limit=1).getOne(None) contents['document']['parent'] = document.id contents['document']['path'] = document.path
def test_delete_document_exception(document_mock): document_mock.selectBy.return_value.getOne.return_value = None with pytest.raises(Exception): Document.delete_document('')
def do_work(self, message=None): """ { "documents": [{ "parent_id": "uuid" }], "all_documents": false } { "documents": [], "all_documents": true } uuid as filename { "document": {}, "file": {}, } full key name for file Manifest file structure { 'documents': [ { 'uuid': 'some-uuid', 'url': 'some-url', 'parent_url': 'some-parent-url', 'parent_uuid': 'some-parent-uuid' }, ... ], 'full': bool } :type message: boto.sqs.message.Message | None :param message: :return: """ if not message: return contents = json.loads(message.get_body()) job_id = str(contents['Message']) job = JobDB.selectBy(uuid=job_id).getOne(None) if not job: log.error('Cannot find job %s', job_id) raise InvalidJobError('Invalid Job ID: {0}'.format(job_id)) job.set(status='running') and_ops = [Document.q.archived == False, Document.q.published == True] if not job.message.get('all_documents'): and_ops.append(self._get_document_query(job.message.get('document'))) manifest = { 'documents': [], 'full': job.message.get('all_documents', False) } zip_contents = StringIO() zip_handle = zipfile.ZipFile(zip_contents, 'w', compression=zipfile.ZIP_DEFLATED) for document in Document.query(Document.all(), where=AND(*and_ops)): parent_document = self._get_document_parent_url(document.parent) manifest['documents'].append({ 'uuid': document.uuid, 'url': document.url, 'parent_url': None if not parent_document else parent_document.url, 'parent_uuid': None if not parent_document else parent_document.uuid }) self._handle_document(document, zip_handle) log.info('Adding document uuid=%s to zip archive', str(document.uuid)) zip_handle.writestr('manifest', json.dumps(manifest)) zip_handle.close() zip_key = Key(self.bucket, job_id) zip_key.content_type = 'application/zip' zip_key.set_contents_from_string(zip_contents.getvalue()) log.info("Created ZIP for Job '%s'", str(job_id)) message = job.message message['download'] = { 'bucket': self.bucket.name, 'key': job_id } job.set(status='complete', message=message) log.info('Setting job=%s to complete', job_id)
# setup application sqlhub.threadConnection = connectionForURI(LocalConfig.DATABASE) User.createTable(ifNotExists=True) if not User.select().count(): User( email='*****@*****.**', password='******', first_name='', last_name='', permissions= 'add_document,modify_document,delete_document,restore_deleted_document,restore_version_document,add_user,modify_user,delete_user,restore_user' ) Document.createTable(ifNotExists=True) Job.createTable(ifNotExists=True) sqlhub.threadConnection.close() # close for application to begin app = create_app(blueprints=[{ 'name': 'hermes_cms.views.main', 'from': 'route' }, { 'name': 'hermes_cms.views.admin', 'from': 'route' }]) @app.route('/assets/<path:filename>') def public_static(filename):