def visible_modules(request) -> Dict[str, ModuleZipfile]: """Load all ModuleZipfiles the user may use.""" ret = dict(MODULE_REGISTRY.all_latest()) # shallow copy if not request.user.is_authenticated and "pythoncode" in ret: del ret["pythoncode"] return ret
def load_from_workflow(cls, workflow: Workflow) -> DependencyGraph: """Create a DependencyGraph using the database. Must be called within a `workflow.cooperative_lock()`. Missing or deleted modules are deemed to have no dependencies. """ from cjwstate.models.module_registry import MODULE_REGISTRY module_zipfiles = MODULE_REGISTRY.all_latest() tabs = [] steps = {} for tab in workflow.live_tabs: tab_step_ids = [] for step in tab.live_steps: tab_step_ids.append(step.id) try: module_zipfile = module_zipfiles[step.module_id_name] except KeyError: steps[step.id] = cls.Step(set()) continue module_spec = module_zipfile.get_spec() schema = module_spec.get_param_schema() # Optimization: don't migrate_params() if we know there are no # tab params. (get_migrated_params() invokes module code, and # we'd prefer for module code to execute only in the renderer.) if all(((not isinstance(dtype, ParamDType.Tab) and not isinstance(dtype, ParamDType.Multitab)) for dtype, v in schema.iter_dfs_dtype_values( schema.coerce(None)))): # There are no tab params. steps[step.id] = cls.Step(set()) continue from cjwstate.params import get_migrated_params params = get_migrated_params(step) # raises ValueError (and we don't handle that right now) schema.validate(params) tab_slugs = frozenset( v for dtype, v in schema.iter_dfs_dtype_values(params) if isinstance(dtype, ParamDType.Tab)) steps[step.id] = cls.Step(tab_slugs) tabs.append(cls.Tab(tab.slug, tab_step_ids)) return cls(tabs, steps)
def create_application() -> ProtocolTypeRouter: """Create an ASGI application.""" # Load static modules on startup. # # This means starting a kernel and validating all static modules. There are # two good reasons to load during startup: # # 1. In dev mode, this reports errors in modules ASAP -- during startup # 2. In production, this import line costs time -- better to incur that # cost during startup than to incur it when responding to some random # request. cjwstate.modules.init_module_system() if not settings.I_AM_TESTING: # Only the test environment, Django runs migrations itself. We can't # use MODULE_REGISTRY until it migrates. MODULE_REGISTRY.all_latest() return ProtocolTypeRouter({ "websocket": AuthMiddlewareStack(SetCurrentLocaleAsgiMiddleware( create_url_router())) })
def post(self, request: HttpRequest, workflow_id_or_secret_id: Union[int, str]): workflow = lookup_workflow_and_auth(authorized_read, workflow_id_or_secret_id, request) workflow2 = workflow.duplicate(request.user) ctx = JsonizeContext(request.locale_id, MODULE_REGISTRY.all_latest()) json_dict = jsonize_clientside_workflow(workflow2.to_clientside(), ctx, is_init=True) async_to_sync(rabbitmq.queue_render)(workflow2.id, workflow2.last_delta_id) return JsonResponse(json_dict, status=status.CREATED)
def load_from_workflow(cls, workflow: Workflow) -> DependencyGraph: """Create a DependencyGraph using the database. Must be called within a `workflow.cooperative_lock()`. Missing or deleted modules are deemed to have no dependencies. """ from cjwstate.models.module_registry import MODULE_REGISTRY module_zipfiles = MODULE_REGISTRY.all_latest() tabs = [] steps = {} for tab in workflow.live_tabs: tab_step_ids = [] for step in tab.live_steps: tab_step_ids.append(step.id) try: module_zipfile = module_zipfiles[step.module_id_name] except KeyError: steps[step.id] = cls.Step(frozenset()) continue module_spec = module_zipfile.get_spec() schema = module_spec.param_schema # Optimization: don't migrate_params() if we know there are no # tab params. (get_migrated_params() invokes module code, so we # prefer to wait and let it run in the renderer. if not _schema_contains_tabs(schema): steps[step.id] = cls.Step(frozenset()) continue from cjwstate.params import get_migrated_params params = get_migrated_params(step) # raises ValueError (and we don't handle that right now) schema.validate(params) steps[step.id] = cls.Step( gather_param_tab_slugs(schema, params)) tabs.append(cls.Tab(tab.slug, tab_step_ids)) return cls(tabs, steps)
def post(self, request: HttpRequest, workflow_id: int): workflow = lookup_workflow_and_auth( Workflow.request_authorized_read, workflow_id, request ) workflow2 = workflow.duplicate(request.user) ctx = _get_request_jsonize_context(request, MODULE_REGISTRY.all_latest()) json_dict = jsonize_clientside_workflow( workflow2.to_clientside(), ctx, is_init=True ) server.utils.log_user_event_from_request( request, "Duplicate Workflow", {"name": workflow.name} ) async_to_sync(rabbitmq.queue_render)(workflow2.id, workflow2.last_delta_id) return JsonResponse(json_dict, status=status.CREATED)
def from_workflow(cls, workflow: Workflow) -> Report.ReportWorkflow: module_zipfiles = MODULE_REGISTRY.all_latest() # prefetch would be nice, but it's tricky because A) we need to # filter out is_deleted; and B) we need to filter for modules that # have .html files. all_tabs = [ Report.TabWithIframes.from_tab(tab, module_zipfiles) for tab in workflow.live_tabs ] tabs = [tab for tab in all_tabs if tab.wf_modules] return cls( id=workflow.id, name=workflow.name, owner_name=workbench_user_display(workflow.owner), updated_at=workflow.last_delta.datetime, tabs=tabs, )
def post(self, request: HttpRequest, workflow: Workflow): workflow2 = workflow.duplicate(request.user) ctx = JsonizeContext( request.user, request.session, request.locale_id, dict(MODULE_REGISTRY.all_latest()), ) json_dict = jsonize_clientside_workflow(workflow2.to_clientside(), ctx, is_init=True) server.utils.log_user_event_from_request(request, "Duplicate Workflow", {"name": workflow.name}) async_to_sync(rabbitmq.queue_render)(workflow2.id, workflow2.last_delta_id) return JsonResponse(json_dict, status=status.HTTP_201_CREATED)
def _load_tab_flows(workflow: Workflow, delta_id: int) -> List[TabFlow]: """Query `workflow` for each tab's `TabFlow` (ordered by tab position). Raise `ModuleError` or `ValueError` if migrate_params() fails. Failed migration means the whole execute can't happen. """ ret = [] with workflow.cooperative_lock(): # reloads workflow if workflow.last_delta_id != delta_id: raise UnneededExecution module_zipfiles = MODULE_REGISTRY.all_latest() for tab_model in workflow.live_tabs.all(): steps = [ _build_execute_step(step, module_zipfiles=module_zipfiles) for step in tab_model.live_steps.all() ] ret.append(TabFlow(Tab(tab_model.slug, tab_model.name), steps)) return ret
def test_db_minio_all_latest_use_max_last_update_time(self): # old version ModuleVersion.create_or_replace_from_spec( { "id_name": "regtest6", "name": "regtest6 v1", "category": "Clean", "parameters": [{ "id_name": "url", "type": "string" }], }, source_version_hash="b1c2d3", ) time.sleep(0.000002) # guarantee new timestamp # new version v2 = ModuleVersion.create_or_replace_from_spec( { "id_name": "regtest6", "name": "regtest6 v2", "category": "Clean", "parameters": [{ "id_name": "url", "type": "string" }], }, source_version_hash="b1c2d2", ) bio = io.BytesIO() with zipfile.ZipFile(bio, mode="w") as zf: zf.writestr("regtest6.yaml", json.dumps(v2.spec).encode("utf-8")) zf.writestr("regtest6.py", b"def render(table, params):\n return table") minio.put_bytes( minio.ExternalModulesBucket, "regtest6/regtest6.b1c2d2.zip", bytes(bio.getbuffer()), ) zf = MODULE_REGISTRY.all_latest()["regtest6"] self.assertEqual(zf.get_spec(), ModuleSpec(**v2.spec))
def test_all_latest_internal(self): zf = MODULE_REGISTRY.all_latest()["pythoncode"] self.assertEqual(zf.get_spec().id_name, "pythoncode")
def _load_latest_modules(): return dict(MODULE_REGISTRY.all_latest())