def index_plots(out_dir: str, simulation_id: int, step_id: int) -> List[Artifact]: """ Get paths and file sizes of all plot images in directory :param simulation_id: :param step_id: :param out_dir: :return: """ artifact_file_names: List[str] = [ f for f in os.listdir(out_dir) if isfile(join(out_dir, f)) ] plot_artifacts: list = [] for plot_name in artifact_file_names: plot_path = out_dir + "/" + plot_name if os.path.exists(plot_path): plot_artifacts.append( Artifact(path=plot_path, name=plot_name, size_kb=os.path.getsize(plot_path), simulation_id=simulation_id, step_id=step_id, created_utc=datetime.datetime.fromtimestamp( os.path.getmtime(plot_path)), file_type="PNG")) return plot_artifacts
def test_artifact_owner(self): user = User.query.get(1) a = Artifact(name="Item", user_id=1) db.session.add(a) db.session.commit() self.assertEqual(a.owner, user)
def run_cli(self, conf: Artifact) -> None: self.status.step_id = conf.step_id workdir = conf.get_workdir() out_path = '{}/cli_out.csv'.format(workdir) conf_path = conf.path with open(out_path, 'w') as f: """ Run SIMBAD-CLI binary with configuration as argument, and pipe stdout to cli_out.csv file Periodically update runtime info with psutil. """ cli_out = open(out_path, "a") process = subprocess.Popen((self.executable_path, conf_path, out_path), stdout=cli_out, stderr=subprocess.PIPE) progress = threading.Thread(target=self.update_progress, args=[workdir, process]) progress.start() progress.join() cli_out.close() end_timestamp = datetime.datetime.utcnow() self.result = Artifact( created_utc=end_timestamp, size_kb=os.path.getsize(out_path), path=out_path, name='cli_out', step_id=conf.step_id, simulation_id=conf.simulation_id, file_type='CSV' ) log_path = workdir + '/logs/simulator.log' self.log = Artifact( created_utc=end_timestamp, size_kb=os.path.getsize(log_path), path=log_path, name='simulator.log', step_id=conf.step_id, simulation_id=conf.simulation_id, file_type='LOG' ) self.is_finished = True return
def test_attach_many_artifacts(self): user = User.query.get(1) artifacts = [] for item in range(0, 3): a = Artifact(name=f"Item {item+1}", user_id=1) artifacts.append(a) db.session.add(a) db.session.commit() self.assertIsInstance(user.artifacts, list) self.assertListEqual(user.artifacts, artifacts) self.assertEqual(len(user.artifacts), 3)
def setup_workdir(request_data: dict) -> Artifact: """ Creates new dir for simulation and places simulation configuration file in it :param request_data: Flask request with configuration :return: tuple with path to workdir and saved configuration """ conf_name = get_conf_name(request_data['configurationName']) conf = request_data['configuration'] start_time = datetime.datetime.utcnow() simulation = Simulation(started_utc=start_time, name="test_simulation", current_step="CLI", status='ONGOING') db_session.add(simulation) db_session.flush() step = SimulationStep(started_utc=start_time, origin="CLI", simulation_id=simulation.id, status='ONGOING') db_session.add(step) db_session.flush() workdir_path = create_workdir(simulation.id) conf_path = '{}/{}'.format(workdir_path, conf_name) simulation.workdir = workdir_path simulation.current_step_id = step.id with open(conf_path, 'w+') as f: json.dump(conf, f, indent=2) configuration = Artifact(size_kb=os.path.getsize(conf_path), path=conf_path, created_utc=start_time, step_id=step.id, name=conf_name, file_type='JSON', simulation_id=simulation.id) simulation.artifacts.append(configuration) step.artifacts.append(configuration) simulation.steps.append(step) db_session.begin() db_session.add_all([configuration, step, simulation]) db_session.flush() db_session.commit() return configuration
def setUp(self): app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite://" db.create_all() self.client = app.test_client() u1 = User(username="******", email="*****@*****.**") u2 = User(username="******", email="*****@*****.**") for user in range(0, 2): for item in range(0, 3): item = Artifact(name=f"Item {item+1}", user_id=user + 1) db.session.add(item) db.session.commit() db.session.add_all([u1, u2]) db.session.commit()
def index_reports(out_dir: str, simulation_id: int, step_id: int) -> List[Artifact]: report_artifacts = [] report_names = [SUMMARY_REPORT_NAME] for report in report_names: path = join(out_dir, report) if isfile(path): report_artifacts.append( Artifact(path=path, name=report, size_kb=os.path.getsize(path), simulation_id=simulation_id, step_id=step_id, created_utc=datetime.datetime.fromtimestamp( os.path.getmtime(path)), file_type="PDF")) return report_artifacts
def test_attach_artifact_to_user(self): user = User.query.get(1) artifact = Artifact(name="item", user_id=1) db.session.add(artifact) db.session.commit() self.assertEqual(artifact.owner, user)
def test_create_artifact(self): a = Artifact(name="item") self.assertIsInstance(a, Artifact)
def analyzer_step(self, artifact_id: int) -> int: print('analyzer artifact id', artifact_id) cli_out: Artifact = db_session.query(Artifact).get(artifact_id) print('analyzer artifact id', cli_out.__dict__) simulation: Simulation = db_session.query(Simulation).get(cli_out.simulation_id) start_time = datetime.datetime.utcnow() step: SimulationStep = SimulationStep(started_utc=start_time, origin="ANALYZER", simulation_id=simulation.id, status='ONGOING') db_session.add(step) db_session.flush() step.celery_id = self.request.id simulation.current_step = "ANALYZER" simulation.current_step_id = step.id db_session.begin() db_session.add_all([simulation, step]) db_session.commit() db_session.begin() runtime_info: AnalyzerRuntimeInfo = AnalyzerRuntimeInfo( progress=0, is_finished=False, step_id=step.id ) db_session.add(runtime_info) db_session.commit() executor: BaseExecutor = get_analyzer_executor() print('Starting executor') executor.execute(cli_out) print('Starting polling..') while executor.is_finished is not True: db_session.begin() print('status', executor.status.__dict__) runtime_info.progress = executor.status.progress runtime_info.error = executor.status.error db_session.commit() sleep(settings.SIMBAD_ANALYZER_POLLING_PERIOD) db_session.begin() result: List[Artifact] = list(map(lambda path: Artifact(path=path), executor.result)) for artifact in result: artifact.step_id = step.id artifact.name = path_leaf(artifact.path) artifact.file_type = file_extension(artifact.path) artifact.created_utc = datetime.datetime.fromtimestamp(os.path.getmtime(artifact.path)) artifact.simulation_id = step.simulation_id artifact.size_kb = os.path.getsize(artifact.path) if executor.status.error is not None: print('Executor: ', executor.status.__dict__) print('Result', result) step.status = 'FAILURE' simulation.status = 'FAILURE' simulation.finished_utc = datetime.datetime.utcnow() step.finished_utc = datetime.datetime.utcnow() runtime_info.error = executor.status.error db_session.add_all(result) db_session.commit() sleep(settings.SIMBAD_ANALYZER_POLLING_PERIOD) raise RevokeChainRequested('Analyzer step failed') step.finished_utc = datetime.datetime.utcnow() step.status = 'SUCCESS' runtime_info.progress = 100 db_session.add_all(result) db_session.commit() return simulation.id
from sqlalchemy import event from sqlalchemy.orm import mapper from app import db from app.utils import is_json, setup_schema, update_object from models.artifact import Artifact from models.user import User event.listen(mapper, "after_configured", setup_schema(db.Model, db.session)) artifact_ns = Namespace( "artifacts", description="Ops on artifacts attached as evidence." ) artifact = Artifact() user = User() artifact_schema = Artifact.Schema() user_schema = User.Schema() class ArtifactListAPI(Resource): def get(self, user_id): user = User.query.get(user_id) artifacts = [artifact_schema.dump(item) for item in user.artifacts] return {"length": len(artifacts), "items": artifacts}, 200 class ArtifactAPI(Resource): def get(self, user_id, id):