def _convert_queries(queries_data): queries = [] for query_data in queries_data: try: snippet = query_data['snippets'][0] if 'guid' in snippet['result']['handle']: # Not failed query original_query_id = '%s:%s' % struct.unpack(b"QQ", base64.decodestring(snippet['result']['handle']['guid'])) execution_time = snippet['result']['executionTime'] * 100 if snippet['status'] in ('available', 'expired') else -1 statement = ' '.join([line for line in _get_statement(query_data).strip().splitlines() if not line.strip().startswith('--')]) queries.append((original_query_id, execution_time, statement, snippet.get('database', 'default').strip())) except Exception, e: LOG.warning('Skipping upload of %s: %s' % (query_data['uuid'], e))
def apps(self, filters): tasks = Document2.objects.get_history( user=self.user).order_by('-last_modified')[:MAX_JOB_FETCH.get()] apps = [] for app in tasks: # Copied, Document class should have a get_history method (via method or inheritance) notebook = Notebook(document=app).get_data() is_notification_manager = False # Supposed SQL Editor query only right now if 'snippets' in notebook: statement = notebook[ 'description'] if is_notification_manager else _get_statement( notebook) history = { 'name': app.name, 'id': app.id, 'uuid': app.uuid, 'type': app.type, 'data': { 'statement': statement[:1001] if statement else '', 'lastExecuted': notebook['snippets'][0].get('lastExecuted', -1), 'status': notebook['snippets'][0]['status'], 'parentSavedQueryUuid': notebook.get('parentSavedQueryUuid', '') } if notebook['snippets'] else {}, 'absoluteUrl': app.get_absolute_url(), } api_status = self._api_status(history) if filters.get( 'states') and api_status.lower() not in filters['states']: continue apps.append({ 'id': 'history-%010d' % history['id'], 'name': history['data']['statement'], 'status': history['data']['status'], 'apiStatus': api_status, 'type': 'history-%s' % history['type'], 'user': self.user.username, 'progress': 50, 'queue': '', 'canWrite': True, 'duration': 1, 'submitted': history['data']['lastExecuted'] }) return {'apps': apps, 'total': len(tasks)}
def _convert_queries(queries_data): queries = [] for query_data in queries_data: try: snippet = query_data['snippets'][0] if 'guid' in snippet['result']['handle']: # Not failed query original_query_id = '%s:%s' % struct.unpack(b"QQ", base64.decodestring(snippet['result']['handle']['guid'])) # unpack_guid uses '%016x:%016x' while optmizer api uses '%s:%s'. execution_time = snippet['result']['executionTime'] * 100 if snippet['status'] in ('available', 'expired') else -1 statement = _clean_query(_get_statement(query_data)) queries.append((original_query_id, execution_time, statement, snippet.get('database', 'default').strip())) except Exception as e: LOG.warning('Skipping upload of %s: %s' % (query_data['uuid'], e)) return queries
def _convert_queries(queries_data): queries = [] for query_data in queries_data: try: original_query_id = '%s:%s' % struct.unpack( b"QQ", base64.decodestring( query_data['snippets'][0]['result']['handle']['guid'])) execution_time = query_data['snippets'][0]['result'][ 'executionTime'] * 100 statement = ' '.join([ line for line in _get_statement(query_data).strip().splitlines() if not line.strip().startswith('--') ]) queries.append( (original_query_id, execution_time, statement, query_data['snippets'][0].get('database', 'default').strip())) except Exception, e: LOG.warning('Skipping upload of %s: %s' % (query_data['uuid'], e))
def run_sync_query(doc_id, user): '''Independently run a query as a user.''' # Add INSERT INTO table if persist result # Add variable substitution # Send notifications: done/on failure if type(user) is str: lookup = {orm_user_lookup(): user} user = User.objects.get(**lookup) user = rewrite_user(user) query_document = Document2.objects.get_by_uuid(user=user, uuid=doc_id) notebook = Notebook(document=query_document).get_data() snippet = notebook['snippets'][0] editor_type = snippet['type'] sql = _get_statement(notebook) request = MockedDjangoRequest(user=user) last_executed = time.mktime(datetime.datetime.now().timetuple()) * 1000 notebook = make_notebook(name='Scheduled query %s at %s' % (query_document.name, last_executed), editor_type=editor_type, statement=sql, status='ready', last_executed=last_executed, is_task=True) task = notebook.execute(request, batch=True) task['uuid'] = task['history_uuid'] status = check_status(task) while status['status'] in ('waiting', 'running'): status = check_status(task) time.sleep(3) return task