def activate_execution( self, uid: str, is_alive: bool = True ) -> Response: """ Adds a fake running command execution to the run status list for use in testing """ r = Response(identifier=uid) def is_thread_alive(): return is_alive r.thread = MagicMock() r.thread.uid = uid r.thread.is_alive = is_thread_alive r.thread.is_running = is_alive r.thread.logs = [] self.deactivate_execution(uid) self.active_responses[uid] = r server_runner.active_execution_responses[uid] = r return r
def test_warn(self): """Should notify if warned is called""" r = Response() r.warn('FAKE_WARN', key='VALUE') self.assertEqual(len(r.warnings), 1) self.assertEqual(r.warnings[0].message, 'FAKE_WARN') self.assertEqual(r.warnings[0].data['key'], 'VALUE')
def test_echo_parented(self): """Should call parent echo""" r = Response() parent = Response().consume(r) func = mock.MagicMock() with patch.object(parent, 'echo', func): r.echo() func.assert_any_call()
def sync_source_file(): """ """ r = Response() args = arguments.from_request() relative_path = args.get('relative_path') chunk = args.get('chunk') index = args.get('index', 0) sync_time = args.get('sync_time', -1) location = args.get('location', 'project') offset = args.get('offset', 0) if None in [relative_path, chunk]: return r.fail( code='INVALID_ARGS', message='Missing or invalid arguments' ).response.flask_serialize() project = cd.project.get_internal_project() if not project: return r.fail( code='NO_OPEN_PROJECT', message='No project is open. Unable to sync' ).response.flask_serialize() parts = relative_path.replace('\\', '/').strip('/').split('/') root_directory = project.source_directory if location == 'shared': root_directory = os.path.realpath(os.path.join( root_directory, '..', '__cauldron_shared_libs' )) file_path = os.path.join(root_directory, *parts) parent_directory = os.path.dirname(file_path) if not os.path.exists(parent_directory): os.makedirs(parent_directory) sync.io.write_file_chunk( file_path=file_path, packed_chunk=chunk, append=index > 0, offset=offset ) sync_status.update({}, time=sync_time) return r.notify( kind='SYNCED', code='SAVED_CHUNK', message='File chunk {} {}'.format(offset, file_path) ).console().response.flask_serialize()
def test_default_message_response(self, execute: MagicMock): """ should handle ResponseMessage returned from commander.execute """ r = Response() execute.return_value = r.notify() cmd = shell.CauldronShell() cmd.default('run something') self.assertEqual(cmd.last_response, r)
def sync_open_project(): """ """ r = Response() args = arguments.from_request() definition = args.get('definition') source_directory = args.get('source_directory') if None in [definition, source_directory]: return r.fail( code='INVALID_ARGS', message='Invalid arguments. Unable to open project' ).response.flask_serialize() # Remove any shared library folders from the library list. These will be # stored using the single shared library folder instead definition['library_folders'] = [ lf for lf in definition.get('library_folders', ['libs']) if lf and not lf.startswith('..') ] definition['library_folders'] += ['../__cauldron_shared_libs'] container_folder = tempfile.mkdtemp(prefix='cd-remote-project-') os.makedirs(os.path.join(container_folder, '__cauldron_shared_libs')) os.makedirs(os.path.join(container_folder, '__cauldron_downloads')) project_folder = os.path.join(container_folder, definition['name']) os.makedirs(project_folder) definition_path = os.path.join(project_folder, 'cauldron.json') writer.write_json_file(definition_path, definition) sync_status.update({}, time=-1, project=None) open_response = project_opener.open_project(project_folder, forget=True) open_response.join() project = cd.project.get_internal_project() project.remote_source_directory = source_directory sync_status.update({}, time=-1, project=project) return r.consume(open_response).update( source_directory=project.source_directory, project=project.kernel_serialize() ).notify( kind='OPENED', code='PROJECT_OPENED', message='Project opened' ).response.flask_serialize()
def server_status(): """ :return: """ r = Response() r.update( success=True, server=server_runner.get_server_data() ).notify( kind='CONNECTED', code='RECEIVED_PING', message='Established remote connection' ).console(whitespace=1) return flask.jsonify(r.serialize())
def touch_project(): """ Touches the project to trigger refreshing its cauldron.json state. """ r = Response() project = cd.project.get_internal_project() if project: project.refresh() else: r.fail( code='NO_PROJECT', message='No open project to refresh' ) return r.update( sync_time=sync_status.get('time', 0) ).flask_serialize()
def project_status(): """...""" r = Response() try: project = cauldron.project.get_internal_project() if project: r.update(project=project.status()) else: r.update(project=None) except Exception as err: r.fail( code='PROJECT_STATUS_ERROR', message='Unable to check status of currently opened project', error=err ) r.update(server=server_runner.get_server_data()) return flask.jsonify(r.serialize())
def fetch_synchronize_status(): """ Returns the synchronization status information for the currently opened project """ r = Response() project = cd.project.get_internal_project() if not project: r.fail( code='NO_PROJECT', message='No open project on which to retrieve status' ) else: with open(project.source_path, 'r') as f: definition = json.load(f) result = status.of_project(project) r.update( sync_time=sync_status.get('time', 0), source_directory=project.source_directory, remote_source_directory=project.remote_source_directory, status=result, definition=definition ) return r.flask_serialize()
def test_update_parented(self): """Should update through parent""" child = Response() parent = Response() parent.consume(child) child.update(banana='orange') self.assertEqual(parent.data['banana'], 'orange')
def test_echo(self): """Should echo information""" r = Response() r.warn('WARNING', something=[1, 2, 3], value=False) r.fail('ERROR') result = r.echo() self.assertGreater(result.find('WARNING'), 0) self.assertGreater(result.find('ERROR'), 0)
def test_end_parented(self): """Should end the parent""" child = Response() parent = Response() parent.consume(child) child.end() self.assertTrue(parent.ended)
def clean_step(step_name: str): """...""" r = Response() project = cauldron.project.get_internal_project() if not project: return flask.jsonify(r.fail( code='PROJECT_FETCH_ERROR', message='No project is currently open' ).response.serialize()) step = project.get_step(step_name) if not step: return flask.jsonify(r.fail( code='STEP_FETCH_ERROR', message='No such step "{}" found'.format(step_name) ).response.serialize()) step.mark_dirty(False, force=True) return flask.jsonify(r.update( project=project.kernel_serialize() ).response.serialize())
def test_grandparent(self): """Should parent correctly if parented""" child = Response() parent = Response() grandparent = Response() grandparent.consume(parent) parent.consume(child) self.assertEqual(child.parent, grandparent)
def test_notify_parented(self): """Should notify through parent""" child = Response() parent = Response() parent.consume(child) child.notify('SUCCESS', 'Good Stuff', 'GO-CAULDRON') self.assertEqual(len(parent.messages), 1) m = parent.messages[0] self.assertEqual(m.code, 'GO-CAULDRON') self.assertEqual(m.kind, 'SUCCESS') self.assertEqual(m.message, 'Good Stuff')
def sync_create_project(): """ """ r = Response() args = arguments.from_request() name = args.get('name') remote_source_directory = args.get('source_directory') optional_args = args.get('args', {}) if None in [name, remote_source_directory]: return r.fail( code='INVALID_ARGS', message='Invalid arguments. Unable to create project' ).response.flask_serialize() container_folder = tempfile.mkdtemp(prefix='cd-remote-project-') os.makedirs(os.path.join(container_folder, '__cauldron_shared_libs')) os.makedirs(os.path.join(container_folder, '__cauldron_downloads')) r.consume(create_command.execute( cli.make_command_context('create'), project_name=name, directory=container_folder, forget=True, **optional_args )) if r.failed: return r.flask_serialize() sync_status.update({}, time=-1, project=None) project = cd.project.get_internal_project() project.remote_source_directory = remote_source_directory with open(project.source_path, 'r') as f: definition = json.load(f) sync_status.update({}, time=-1, project=project) return r.update( source_directory=project.source_directory, remote_source_directory=remote_source_directory, definition=definition, project=project.kernel_serialize() ).notify( kind='SUCCESS', code='PROJECT_CREATED', message='Project created' ).response.flask_serialize()
def test_logging(self): """Should log messages to the log""" r = Response() r.notify( kind='TEST', code='TEST_MESSAGE', message='This is a test', ).console_header( 'Harold' ).console( 'Holly' ).console_raw( 'Handy' ) out = r.get_notification_log() self.assertGreater(out.find('Harold'), -1) self.assertGreater(out.find('Holly'), -1) self.assertGreater(out.find('Handy'), -1) r = Response.deserialize(r.serialize()) compare = r.get_notification_log() self.assertEqual(out, compare)
def send(file_path: str, relative_path: str, file_kind: str = '', chunk_size: int = sync.io.DEFAULT_CHUNK_SIZE, remote_connection: 'environ.RemoteConnection' = None, newer_than: float = 0, progress_callback=None, sync_time: float = -1) -> Response: """ """ response = Response() sync_time = time.time() if sync_time < 0 else sync_time callback = (progress_callback if progress_callback else (lambda x: x)) modified_time = os.path.getmtime(file_path) if modified_time < newer_than: callback( response.notify( kind='SKIP', code='NOT_MODIFIED', message='No changes detected to "{}"'.format(relative_path), data=dict(file_path=file_path, relative_path=relative_path))) return response chunk_count = sync.io.get_file_chunk_count(file_path, chunk_size) chunks = sync.io.read_file_chunks(file_path, chunk_size) def get_progress(complete_count: int = 0) -> typing.Tuple[int, str]: """ """ if chunk_count < 2: return 0, '' progress_value = int(100 * complete_count / chunk_count) display = '({}%)'.format('{}'.format(progress_value).zfill(3)) return progress_value, display progress_display = get_progress(0)[-1] callback( response.notify(kind='SYNC', code='STARTED', message='{} "{}"'.format(progress_display, relative_path), data=dict(progress=0, file_path=file_path, relative_path=relative_path))) for index, chunk in enumerate(chunks): response = send_chunk(chunk=chunk, index=index, relative_path=relative_path, file_kind=file_kind, remote_connection=remote_connection, sync_time=sync_time) if response.failed: return response progress, progress_display = get_progress(index + 1) if chunk_count > 1 and (index + 1) < chunk_count: callback( response.notify(kind='SYNC', code='PROGRESS', message='{} "{}"'.format( progress_display, relative_path), data=dict(progress=0.01 * progress, chunk_count=chunk_count, file_path=file_path, relative_path=relative_path))) progress, progress_display = get_progress(chunk_count) callback( response.notify(kind='SYNC', code='DONE', message='{} "{}"'.format(progress_display, relative_path), data=dict(chunk_count=chunk_count, progress=progress, file_path=file_path, relative_path=relative_path))) return response
def test_join_nothing(self): """Should do nothing and return if no thread exists""" r = Response() self.assertFalse(r.join())
def run_status(uid: str): """ :param uid: :return: """ try: r = server_runner.active_execution_responses.get(uid) if not r: return flask.jsonify( Response().update( run_log=[], run_active_uids=list( server_runner.active_execution_responses.keys() ), run_status='unknown', run_multiple_updates=True, run_uid=uid, server=server_runner.get_server_data() ).serialize() ) if r.thread.is_running: try: step_changes = server_runner.get_running_step_changes(True) except Exception: step_changes = None return flask.jsonify( Response() .update( run_log=r.get_thread_log(), run_status='running', run_multiple_updates=True, run_uid=uid, step_changes=step_changes, server=server_runner.get_server_data() ).serialize() ) del server_runner.active_execution_responses[uid] return flask.jsonify( r.update( run_log=r.get_thread_log(), run_status='complete', run_multiple_updates=True, run_uid=r.thread.uid ).serialize() ) except Exception as err: return flask.jsonify( Response().fail( code='COMMAND_RUN_STATUS_FAILURE', message='Unable to check command execution status', error=err, run_uid=uid ).response.serialize() )
def project_data(): """ :return: """ r = Response() try: project = cauldron.project.get_internal_project() if project: r.update(project=project.kernel_serialize()) else: r.update(project=None) except Exception as err: r.fail( code='PROJECT_FETCH_ERROR', message='Unable to check status of currently opened project', error=err ) r.update(server=server_runner.get_server_data()) return flask.jsonify(r.serialize())
mime_type='{}'.format(request.mimetype), request_data='{}'.format(request.data), request_args=request_args) return name, args def execute(async: bool = False): """ :param async: Whether or not to allow asynchronous command execution that returns before the command is complete with a run_uid that can be used to track the continued execution of the command until completion. """ r = Response() r.update(server=server_runner.get_server_data()) cmd, args = parse_command_args(r) if r.failed: return flask.jsonify(r.serialize()) try: commander.execute(cmd, args, r) if not r.thread: return flask.jsonify(r.serialize()) if not async: r.thread.join() server_runner.active_execution_responses[r.thread.uid] = r
def mock_send_request(*args, **kwargs): return Response().update( remote_source_directory=directory).response
def test_self_consumption(self): """ should not consume itself and cause regression error """ r = Response() r.consume(r)
def test_consume_nothing(self): """Should abort consuming if there is nothing to consume""" r = Response() r.consume(None)
def test_self_consumption(self): """Should not consume itself and cause regression error""" r = Response() r.consume(r)
def execute(asynchronous: bool = False): """ :param asynchronous: Whether or not to allow asynchronous command execution that returns before the command is complete with a run_uid that can be used to track the continued execution of the command until completion. """ r = Response() r.update(server=server_runner.get_server_data()) cmd, args = parse_command_args(r) if r.failed: return flask.jsonify(r.serialize()) try: commander.execute(cmd, args, r) if not r.thread: return flask.jsonify(r.serialize()) if not asynchronous: r.thread.join() server_runner.active_execution_responses[r.thread.uid] = r # Watch the thread for a bit to see if the command finishes in # that time. If it does the command result will be returned directly # to the caller. Otherwise, a waiting command will be issued count = 0 while count < 5: count += 1 r.thread.join(0.25) if not r.thread.is_alive(): break if r.thread.is_alive(): return flask.jsonify( Response() .update( run_log=r.get_thread_log(), run_status='running', run_uid=r.thread.uid, step_changes=server_runner.get_running_step_changes(True), server=server_runner.get_server_data() ) .serialize() ) del server_runner.active_execution_responses[r.thread.uid] r.update( run_log=r.get_thread_log(), run_status='complete', run_multiple_updates=False, run_uid=r.thread.uid ) except Exception as err: r.fail( code='KERNEL_EXECUTION_FAILURE', message='Unable to execute command', cmd=cmd, args=args, error=err ) return flask.jsonify(r.serialize())
def execute(exec_async: bool = False): """ :param exec_async: Whether or not to allow asynchronous command execution that returns before the command is complete with a run_uid that can be used to track the continued execution of the command until completion. """ r = Response() r.update(server=server_runner.get_server_data()) cmd, args = parse_command_args(r) if r.failed: return flask.jsonify(r.serialize()) try: commander.execute(cmd, args, r) if not r.thread: return flask.jsonify(r.serialize()) if not exec_async: r.thread.join() server_runner.active_execution_responses[r.thread.uid] = r # Watch the thread for a bit to see if the command finishes in # that time. If it does the command result will be returned directly # to the caller. Otherwise, a waiting command will be issued count = 0 while count < 5: count += 1 r.thread.join(0.25) if not r.thread.is_alive(): break if r.thread.is_alive(): return flask.jsonify(Response().update( run_log=r.get_thread_log(), run_status='running', run_uid=r.thread.uid, step_changes=server_runner.get_running_step_changes(True), server=server_runner.get_server_data()).serialize()) del server_runner.active_execution_responses[r.thread.uid] r.update(run_log=r.get_thread_log(), run_status='complete', run_multiple_updates=False, run_uid=r.thread.uid) except Exception as err: r.fail(code='KERNEL_EXECUTION_FAILURE', message='Unable to execute command', cmd=cmd, args=args, error=err) return flask.jsonify(r.serialize())
def test_join_thread(self): """Should join the associated thread and return True""" r = Response() r.thread = MagicMock() self.assertTrue(r.join()) self.assertEqual(1, r.thread.join.call_count)
def test_get_response(self): """Should get the response back from the response message""" r = Response() self.assertEqual(r, r.fail().get_response())
def test_debug_echo(self): """Should echo debug information""" r = Response() r.debug_echo()
def mock_send_request(*args, **kwargs): return Response().fail(code='FAKE-ERROR').response
def execute_replacement(name, args, response: Response): response.thread = thread
def test_get_response(self): """ should get the response back from the response message """ r = Response() self.assertEqual(r, r.fail().get_response())
def send( file_path: str, relative_path: str, file_kind: str = '', chunk_size: int = sync.io.DEFAULT_CHUNK_SIZE, remote_connection: 'environ.RemoteConnection' = None, newer_than: float = 0, progress_callback=None, sync_time: float = -1 ) -> Response: """ """ response = Response() sync_time = time.time() if sync_time < 0 else sync_time callback = ( progress_callback if progress_callback else (lambda x: x) ) modified_time = os.path.getmtime(file_path) if modified_time < newer_than: callback(response.notify( kind='SKIP', code='NOT_MODIFIED', message='No changes detected to "{}"'.format(relative_path), data=dict( file_path=file_path, relative_path=relative_path ) )) return response chunk_count = sync.io.get_file_chunk_count(file_path, chunk_size) chunks = sync.io.read_file_chunks(file_path, chunk_size) def get_progress(complete_count: int = 0) -> typing.Tuple[int, str]: """ """ if chunk_count < 2: return 0, '' progress_value = int(100 * complete_count / chunk_count) display = '({}%)'.format('{}'.format(progress_value).zfill(3)) return progress_value, display progress_display = get_progress(0)[-1] callback(response.notify( kind='SYNC', code='STARTED', message='{} "{}"'.format(progress_display, relative_path), data=dict( progress=0, file_path=file_path, relative_path=relative_path ) )) offset = 0 for index, chunk in enumerate(chunks): response = send_chunk( chunk=chunk, index=index, offset=offset, relative_path=relative_path, file_kind=file_kind, remote_connection=remote_connection, sync_time=sync_time ) offset += len(chunk) if response.failed: return response progress, progress_display = get_progress(index + 1) if chunk_count > 1 and (index + 1) < chunk_count: callback(response.notify( kind='SYNC', code='PROGRESS', message='{} "{}"'.format(progress_display, relative_path), data=dict( progress=0.01 * progress, chunk_count=chunk_count, file_path=file_path, relative_path=relative_path ) )) progress, progress_display = get_progress(chunk_count) callback(response.notify( kind='SYNC', code='DONE', message='{} "{}"'.format(progress_display, relative_path), data=dict( chunk_count=chunk_count, progress=progress, file_path=file_path, relative_path=relative_path ) )) return response