def wfmodule_render(request: HttpRequest, wf_module: WfModule, format=None): # Get first and last row from query parameters, or default to all if not # specified try: startrow = int_or_none(request.GET.get("startrow")) endrow = int_or_none(request.GET.get("endrow")) except ValueError: return Response( {"message": "bad row number", "status_code": 400}, status=status.HTTP_400_BAD_REQUEST, ) with wf_module.workflow.cooperative_lock(): wf_module.refresh_from_db() cached_result = wf_module.cached_render_result if cached_result is None: # assume we'll get another request after execute finishes return JsonResponse({"start_row": 0, "end_row": 0, "rows": []}) try: startrow, endrow, records = _make_render_tuple( cached_result, startrow, endrow ) except FileNotFoundError: # assume we'll get another request after execute finishes return JsonResponse({"start_row": 0, "end_row": 0, "rows": []}) return JsonResponse({"start_row": startrow, "end_row": endrow, "rows": records})
def _do_set_notifications(scope, wf_module: WfModule, notifications: bool): wf_module.notifications = notifications wf_module.save(update_fields=["notifications"]) if notifications: server.utils.log_user_event_from_scope( scope, "Enabled email notifications", {"wfModuleId": wf_module.id} )
async def commit_result( wf_module: WfModule, result: ProcessResult, stored_object_json: Optional[Dict[str, Any]] = None) -> None: """ Store fetched result, if it is a change from wfm's existing data. Save the WfModule's `status` and `fetch_error`. Set wfm.last_update_check, regardless. If there is no error and there is new data, create (and run) a ChangeDataVersionCommand. Notify the user. """ if result.dataframe.empty and result.error: workflow = wf_module.workflow with workflow.cooperative_lock(): wf_module.last_update_check = timezone.now() wf_module.fetch_error = result.error wf_module.is_busy = False wf_module.save() await websockets.ws_client_rerender_workflow_async(workflow) else: await save_result_if_changed(wf_module, result, stored_object_json=stored_object_json)
def execute_wfmodule(wf_module: WfModule) -> ProcessResult: """ Process all WfModules until the given one; return its result. By default, this will both read and write each WfModule's cached render result. Pass nocache=True to avoid modifying the cache. You must call this within a workflow.cooperative_lock(). """ # Do we already have what we need? If so, return quickly. cached_result = _get_render_cache(wf_module) if cached_result: return cached_result.result # Recurse -- ensuring the smallest possible number of renders input_wf_module = wf_module.previous_in_stack() if input_wf_module: input_result = execute_wfmodule(input_wf_module) else: input_result = ProcessResult() result = dispatch.module_dispatch_render(wf_module, input_result.dataframe) wf_module.cache_render_result(wf_module.last_relevant_delta_id, result) wf_module.save() return result
def _wf_module_delete_secret_and_build_delta( workflow: Workflow, wf_module: WfModule, param: str) -> Optional[Dict[str, Any]]: """ Write a new secret (or `None`) to `wf_module`, or raise. Return a "delta" for websockets.ws_client_send_delta_async(), or `None` if the database has not been modified. Raise Workflow.DoesNotExist if the Workflow was deleted. """ with workflow.cooperative_lock(): # raises Workflow.DoesNotExist try: wf_module.refresh_from_db() except WfModule.DoesNotExist: return None # no-op if wf_module.secrets.get(param) is None: return None # no-op wf_module.secrets = dict(wf_module.secrets) # shallow copy del wf_module.secrets[param] wf_module.save(update_fields=['secrets']) return { 'updateWfModules': { str(wf_module.id): { 'secrets': wf_module.secret_metadata, } } }
def _maybe_add_version( workflow: Workflow, wf_module: WfModule, maybe_result: Optional[ProcessResult], stored_object_json: Optional[Dict[str, Any]] = None ) -> Optional[timezone.datetime]: """ Apply `result` to `wf_module`. Set `is_busy`, `fetch_error` and `last_update_check`. Write a new `StoredObject` and returns its `datetime` if the input `maybe_result` is non-``None`` and the result isn't the same as the previous one. Che caller may create a ``ChangeDataVersionCommand`` to set `wf_module`'s next data version. If the input Workflow or WfModule is deleted, return ``None``. """ # Use Django `update_fields` to only write the fields we're # editing. That's because every value in `wf_module` might be # stale, so we must ignore those stale values. fields = { 'is_busy': False, 'last_update_check': timezone.now(), } if maybe_result is not None: fields['fetch_error'] = maybe_result.error for k, v in fields.items(): setattr(wf_module, k, v) try: with wf_module.workflow.cooperative_lock(): if not WfModule.objects.filter(pk=wf_module.id, is_deleted=False, tab__is_deleted=False).exists(): return None if maybe_result is not None: version_added = wf_module.store_fetched_table_if_different( maybe_result.dataframe, # TODO store entire result metadata=json.dumps(stored_object_json)) else: version_added = None if version_added: enforce_storage_limits(wf_module) wf_module.save(update_fields=fields.keys()) return version_added except Workflow.DoesNotExist: return None
def _do_complete_multipart_upload( workflow: Workflow, wf_module: WfModule, ) -> Tuple[UploadedFile, Dict[str, Any]]: with workflow.cooperative_lock(): wf_module.refresh_from_db() uploaded_file = ( _write_uploaded_file_and_clear_inprogress_file_upload(wf_module) ) return ( uploaded_file.uuid, serializers.WfModuleSerializer(wf_module).data, )
def _wf_module_set_secret_and_build_delta( workflow: Workflow, wf_module: WfModule, param: str, secret: str) -> Optional[Dict[str, Any]]: """ Write a new secret to `wf_module`, or raise. Return a "delta" for websockets.ws_client_send_delta_async(), or `None` if the database is not modified. Raise Workflow.DoesNotExist if the Workflow was deleted. """ with workflow.cooperative_lock(): # raises Workflow.DoesNotExist try: wf_module.refresh_from_db() except WfModule.DoesNotExist: return None # no-op if wf_module.secrets.get(param, {}).get('secret') == secret: return None # no-op module_version = wf_module.module_version if module_version is None: raise HandlerError(f'BadRequest: ModuleVersion does not exist') if not any(p.type == 'secret' and p.secret_logic.provider == 'string' for p in module_version.param_fields): raise HandlerError( f'BadRequest: param is not a secret string parameter') created_at = timezone.now() created_at_str = ( created_at.strftime('%Y-%m-%dT%H:%M:%S') + '.' + created_at.strftime('%f')[0:3] # milliseconds + 'Z') wf_module.secrets = { **wf_module.secrets, param: { 'name': created_at_str, 'secret': secret, } } wf_module.save(update_fields=['secrets']) return { 'updateWfModules': { str(wf_module.id): { 'secrets': wf_module.secret_metadata, } } }
def get_workflow_as_delta_and_needs_render(self): """ Return (apply-delta dict, needs_render), or raise Workflow.DoesNotExist needs_render is a (workflow_id, delta_id) pair. """ with Workflow.authorized_lookup_and_cooperative_lock( 'read', self.scope['user'], self.scope['session'], pk=self.workflow_id) as workflow: request = RequestWrapper(self.scope['user'], self.scope['session']) ret = { 'updateWorkflow': (WorkflowSerializer(workflow, context={ 'request': request }).data), } tabs = list(workflow.live_tabs) ret['updateTabs'] = dict( (tab.slug, TabSerializer(tab).data) for tab in tabs) wf_modules = list(WfModule.live_in_workflow(workflow.id)) ret['updateWfModules'] = dict( (str(wfm.id), WfModuleSerializer(wfm).data) for wfm in wf_modules) if workflow.are_all_render_results_fresh(): needs_render = None else: needs_render = (workflow.id, workflow.last_delta_id) return (ret, needs_render)
def _get_render_cache(wf_module: WfModule) -> CachedRenderResult: revision = wf_module.last_relevant_delta_id or 0 cached_result = wf_module.get_cached_render_result() if cached_result and cached_result.delta_id == revision: return cached_result else: return None
def are_all_render_results_fresh(self): """Query whether all live WfModules are rendered.""" from .WfModule import WfModule for wf_module in WfModule.live_in_workflow(self): if wf_module.cached_render_result is None: return False return True
def post( self, request: HttpRequest, workflow_lock: WorkflowCooperativeLock, wf_module: WfModule, file_param_id_name: str, uuid: UUID, ): """ Create an UploadedFile and delete the InProgressUpload. Authenticate request as documented in `loads_wf_module_for_api_upload`. (That means respond with 400, 403 or 404 on error.) Return 400 Bad Request unless the JSON body looks like: {"filename": "a-filename.csv"} """ try: body_json = json.loads(request.body) # assume UTF-8 except UnicodeDecodeError: return ErrorResponse(400, "body-not-utf8") except json.JSONDecodeError: return ErrorResponse(400, "body-not-json") form = CompleteUploadForm(body_json) if form.errors: return ErrorResponse(400, "body-has-errors", {"errors": form.errors.get_json_data()}) filename = form.cleaned_data["filename"] try: in_progress_upload = wf_module.in_progress_uploads.get( id=uuid, is_completed=False) except InProgressUpload.DoesNotExist: return ErrorResponse(404, "upload-not-found") try: uploaded_file = in_progress_upload.convert_to_uploaded_file( filename) except FileNotFoundError: return ErrorResponse(409, "file-not-uploaded") # After the cooperative lock ends, update the WfModule. want_params = { **wf_module.get_params(), file_param_id_name: uploaded_file.uuid } def create_change_parameters_command(): workflow = workflow_lock.workflow # sends delta to Websockets clients and queues render. async_to_sync(ChangeParametersCommand.create)( workflow=workflow, wf_module=wf_module, new_values=want_params) workflow_lock.after_commit(create_change_parameters_command) return JsonResponse({ "uuid": uploaded_file.uuid, "name": uploaded_file.name, "size": uploaded_file.size, "createdAt": uploaded_file.created_at, })
def wrapper(request: HttpRequest, workflow_id: int, wf_module_slug: str, *args, **kwargs): auth_header = request.headers.get("Authorization", "") auth_header_match = AuthTokenHeaderRegex.match(auth_header) if not auth_header_match: return ErrorResponse(403, "authorization-bearer-token-not-provided") bearer_token = auth_header_match.group(1) try: with Workflow.lookup_and_cooperative_lock( id=workflow_id) as workflow: try: wf_module = WfModule.live_in_workflow(workflow).get( slug=wf_module_slug) except WfModule.DoesNotExist: return ErrorResponse(404, "step-not-found") api_token = wf_module.file_upload_api_token if not api_token: return ErrorResponse(403, "step-has-no-api-token") bearer_token_hash = hashlib.sha256( bearer_token.encode("utf-8")).digest() api_token_hash = hashlib.sha256( api_token.encode("utf-8")).digest() if bearer_token_hash != api_token_hash or bearer_token != api_token: return ErrorResponse(403, "authorization-bearer-token-invalid") return f(request, wf_module, *args, **kwargs) except Workflow.DoesNotExist: return ErrorResponse(404, "workflow-not-found")
def execute_wfmodule(wf_module: WfModule, last_result: ProcessResult) -> CachedRenderResult: """ Render a single WfModule; cache and return output. CONCURRENCY NOTES: This function is reasonably concurrency-friendly: * It locks the workflow, so two renders won't happen on the same workflow at the same time. * It returns a valid cache result immediately. * It checks with the database that `wf_module` hasn't been deleted from its workflow. * It checks with the database that `wf_module` hasn't been deleted from the database entirely. * It checks with the database that `wf_module` hasn't been modified. (It is very common for a user to request a module's output -- kicking off a sequence of `execute_wfmodule` -- and then change a param in a prior module, making all those calls obsolete. * It runs in a transaction (obviously -- FOR UPDATE and all), which will stall `models.Delta` as it tries to write last_relevant_delta_id, effectively stalling users' update HTTP requests until after the `wf_module`'s render is complete. These guarantees mean: * It's relatively cheap to render twice. * Users who modify a WfModule while it's rendering will be stalled -- for as short a duration as possible. * When a user changes a workflow significantly, all prior renders will end relatively cheaply. Raises `UnneededExecution` when the input WfModule should not be rendered. """ with locked_wf_module(wf_module) as safe_wf_module: cached_render_result = wf_module.get_cached_render_result() # If the cache is good, just return it -- skipping the render() call if ( cached_render_result and (cached_render_result.delta_id == wf_module.last_relevant_delta_id) ): return cached_render_result result = dispatch.module_dispatch_render(safe_wf_module, last_result.dataframe) cached_render_result = safe_wf_module.cache_render_result( safe_wf_module.last_relevant_delta_id, result ) # Save safe_wf_module, not wf_module, because we know we've only # changed the cached_render_result columns. (We know because we # locked the row before fetching it.) `wf_module.save()` might # overwrite some newer values. safe_wf_module.save() return cached_render_result
def _do_finish_upload(workflow: Workflow, wf_module: WfModule, uuid: uuidgen.UUID, filename: str) -> Dict[str, Any]: with workflow.cooperative_lock(): wf_module.refresh_from_db() try: in_progress_upload = wf_module.in_progress_uploads.get( id=uuid, is_completed=False) except InProgressUpload.DoesNotExist: raise HandlerError( "BadRequest: key is not being uploaded for this WfModule right now. " "(Even a valid key becomes invalid after you create, finish or abort " "an upload on its WfModule.)") try: in_progress_upload.convert_to_uploaded_file(filename) except FileNotFoundError: raise HandlerError( "BadRequest: file not found. " "You must upload the file before calling finish_upload.") return serializers.WfModuleSerializer(wf_module).data
def fetch(self, wf_module: WfModule) -> None: """Run `call_fetch(wf_module)` and write to `wf_module`. `wf_module` will be set to `busy` until the fetch completes. After, it will be either `ready` or `error`. """ # FIXME database writes probably belong in dispatch.py. Right now, # here, half is dispatch stuff and half is database stuff. if not hasattr(self.module, 'fetch'): return params = wf_module.create_parameter_dict(None) wf_module.set_busy() result = self.call_fetch(params) result.truncate_in_place_if_too_big() result.sanitize_in_place() ModuleImpl.commit_result(wf_module, result)
def _write_wf_module_position(workflow: Workflow, wf_module_id: int) -> None: """Write position in DB, or raise (Workflow|Tab|WfModule).DoesNotExist.""" with workflow.cooperative_lock(): # raises Workflow.DoesNotExist # Raises WfModule.DoesNotExist, e.g. if tab.is_deleted wf_module = WfModule.live_in_workflow(workflow).get(pk=wf_module_id) tab = wf_module.tab tab.selected_wf_module_position = wf_module.order tab.save(update_fields=['selected_wf_module_position']) workflow.selected_tab_position = tab.position workflow.save(update_fields=['selected_tab_position'])
def _do_complete_upload( workflow: Workflow, wf_module: WfModule, key: str ) -> Tuple[UploadedFile, Dict[str, Any]]: with workflow.cooperative_lock(): wf_module.refresh_from_db() if ( wf_module.inprogress_file_upload_id is not None or wf_module.inprogress_file_upload_key != key ): raise HandlerError( 'DoesNotExist: key must point to an incomplete upload' ) uploaded_file = ( _write_uploaded_file_and_clear_inprogress_file_upload(wf_module) ) return ( uploaded_file.uuid, serializers.WfModuleSerializer(wf_module).data, )
def _do_try_set_autofetch( scope, wf_module: WfModule, auto_update_data: bool, update_interval: int ): # We may ROLLBACK; if we do, we need to remember the old values old_auto_update_data = wf_module.auto_update_data old_update_interval = wf_module.update_interval check_quota = ( auto_update_data and wf_module.auto_update_data and update_interval < wf_module.update_interval ) or (auto_update_data and not wf_module.auto_update_data) quota_exceeded = None try: with transaction.atomic() as trans: wf_module.auto_update_data = auto_update_data wf_module.update_interval = update_interval if auto_update_data: wf_module.next_update = timezone.now() + datetime.timedelta( seconds=update_interval ) else: wf_module.next_update = None wf_module.save( update_fields=["auto_update_data", "update_interval", "next_update"] ) # Now before we commit, let's see if we've surpassed the user's limit; # roll back if we have. # # Only rollback if we're _increasing_ our fetch count. If we're # lowering it, allow that -- even if the user is over limit, we still # want to commit because it's an improvement. if check_quota: autofetches = autofetch.list_autofetches_json(scope) if autofetches["nFetchesPerDay"] > autofetches["maxFetchesPerDay"]: raise AutofetchQuotaExceeded(autofetches) except AutofetchQuotaExceeded as err: wf_module.auto_update_data = old_auto_update_data wf_module.update_interval = old_update_interval quota_exceeded = err.autofetches retval = { "isAutofetch": wf_module.auto_update_data, "fetchInterval": wf_module.update_interval, } if quota_exceeded is not None: retval["quotaExceeded"] = quota_exceeded # a dict return retval
def _wf_module_delete_secret_and_build_delta( workflow: Workflow, wf_module: WfModule, param: str ) -> Optional[Dict[str, Any]]: """ Write a new secret (or `None`) to `wf_module`, or raise. Return a "delta" for websockets.ws_client_send_delta_async(), or `None` if the database has not been modified. Raise Workflow.DoesNotExist if the Workflow was deleted. """ with workflow.cooperative_lock(): # raises Workflow.DoesNotExist wf_module.refresh_from_db() # may return None if ( wf_module is None or wf_module.secrets.get(param) is None ): return None wf_module.secrets = dict(wf_module.secrets) del wf_module.secrets[param] wf_module.save(update_fields=['secrets']) return { 'updateWfModules': { str(wf_module.id): { 'params': wf_module.get_params().as_dict() } } }
def wrapper(request: HttpRequest, workflow_id: int, wf_module_slug: str, *args, **kwargs): auth_header = request.headers.get("Authorization", "") auth_header_match = AuthTokenHeaderRegex.match(auth_header) if not auth_header_match: return ErrorResponse(403, "authorization-bearer-token-not-provided") bearer_token = auth_header_match.group(1) try: with Workflow.lookup_and_cooperative_lock( id=workflow_id) as workflow_lock: workflow = workflow_lock.workflow try: wf_module = WfModule.live_in_workflow(workflow).get( slug=wf_module_slug) except WfModule.DoesNotExist: return ErrorResponse(404, "step-not-found") module_version = wf_module.module_version if module_version is None: return ErrorResponse(400, "step-module-deleted") try: file_param_id_name = next( iter(pf.id_name for pf in module_version.param_fields if pf.type == "file")) except StopIteration: return ErrorResponse(400, "step-has-no-file-param") api_token = wf_module.file_upload_api_token if not api_token: return ErrorResponse(403, "step-has-no-api-token") bearer_token_hash = hashlib.sha256( bearer_token.encode("utf-8")).digest() api_token_hash = hashlib.sha256( api_token.encode("utf-8")).digest() if bearer_token_hash != api_token_hash or bearer_token != api_token: return ErrorResponse(403, "authorization-bearer-token-invalid") return f( request, workflow_lock, wf_module, file_param_id_name, *args, **kwargs, ) except Workflow.DoesNotExist: return ErrorResponse(404, "workflow-not-found")
def _write_uploaded_file_and_clear_inprogress_file_upload( wf_module: WfModule ) -> UploadedFile: """ Read metadata from S3; write it to a new UploadedFile; save `wf_module`. Raise FileNotFoundError if `wf_module.inprogress_file_upload_key is None` or the file does not exist on minio. Assumptions: * You hold a cooperative lock on `wf_module.workflow`. * The client PUT a sensible Content-Disposition header. (Failure means icky filename, not crash.) """ key = wf_module.inprogress_file_upload_key uuid: str = key.split('/')[-1].split('.')[0] # TODO raise FileNotFoundError head = minio.client.head_object(Bucket=minio.UserFilesBucket, Key=key) size = int(head['ContentLength']) name = urllib.parse.unquote(head['ContentDisposition'].split("UTF-8''")[-1]) uploaded_file = wf_module.uploaded_files.create( name=name, size=size, uuid=uuid, bucket=minio.UserFilesBucket, key=key, ) wf_module.inprogress_file_upload_id = None wf_module.inprogress_file_upload_key = None wf_module.inprogress_file_upload_last_accessed_at = None wf_module.save( update_fields=['inprogress_file_upload_id', 'inprogress_file_upload_key', 'inprogress_file_upload_last_accessed_at'] ) return uploaded_file
def render(self, params: WfModule, table: Optional[DataFrame], fetch_result: Optional[ProcessResult]) -> ProcessResult: """Process `table` with module `render` method, for a ProcessResult. If the `render` method raises an exception, this method will return an error string. It is always an error for a module to raise an exception. """ if table is None: return None # TODO disallow? if not hasattr(self.module, 'render'): return self._default_render(params, table, fetch_result) return self._call_method('render', table, params.to_painful_dict(table))
def render(self, wf_module: WfModule, table: Optional[DataFrame]) -> ProcessResult: """Process `table` with module `render` method, for a ProcessResult. If the `render` method raises an exception, this method will return an error string. It is always an error for a module to raise an exception. """ if table is None: return None # TODO disallow? if not hasattr(self.module, 'render'): return self._default_render(wf_module, table) params = wf_module.create_parameter_dict(table) return self._call_method('render', table, params)
def _do_create_multipart_upload( workflow: Workflow, wf_module: WfModule, filename: str ) -> Dict[str, str]: key = _generate_key(wf_module, filename) with workflow.cooperative_lock(): wf_module.refresh_from_db() wf_module.abort_inprogress_upload() # in case there is one already upload_id = minio.create_multipart_upload(minio.UserFilesBucket, key, filename) wf_module.inprogress_file_upload_id = upload_id wf_module.inprogress_file_upload_key = key wf_module.inprogress_file_upload_last_accessed_at = timezone.now() wf_module.save( update_fields=['inprogress_file_upload_id', 'inprogress_file_upload_key', 'inprogress_file_upload_last_accessed_at'] ) return {'key': key, 'uploadId': upload_id}
async def save_result_if_changed( wfm: WfModule, new_result: ProcessResult, stored_object_json: Optional[Dict[str, Any]] = None) -> None: """ Store fetched table, if it is a change from wfm's existing data. "Change" here means either a changed table or changed error message. Set `fetch_error` to `new_result.error`. Set wfm.is_busy to False. Set wfm.last_update_check. Create (and run) a ChangeDataVersionCommand if something changed. This will kick off an execute cycle, which will render each module and email the owner if data has changed and notifications are enabled. Otherwise, notify the user of the wfm.last_update_check. Return the timestamp (if changed) or None (if not). """ with wfm.workflow.cooperative_lock(): wfm.last_update_check = timezone.now() # Store this data only if it's different from most recent data new_table = new_result.dataframe version_added = wfm.store_fetched_table_if_different( new_table, metadata=json.dumps(stored_object_json)) if version_added: enforce_storage_limits(wfm) wfm.is_busy = False # TODO store fetch_error along with the data wfm.fetch_error = new_result.error wfm.save() # un-indent: COMMIT so we notify the client _after_ COMMIT if version_added: # notifies client of status+error_msg+last_update_check await ChangeDataVersionCommand.create(wfm, version_added) else: await websockets.ws_client_send_delta_async( wfm.workflow_id, { 'updateWfModules': { str(wfm.id): { 'status': wfm.status, 'error_msg': wfm.error_msg, 'last_update_check': wfm.last_update_check.isoformat(), } } })
def make_init_state(request, workflow=None, modules=None): """ Build a dict to embed as JSON in `window.initState` in HTML. Raise Http404 if the workflow disappeared. """ ret = {} if workflow: try: with workflow.cooperative_lock(): # raise DoesNotExist on race ret['workflowId'] = workflow.id ret['workflow'] = WorkflowSerializer(workflow, context={ 'request': request }).data tabs = list(workflow.live_tabs) ret['tabs'] = dict( (str(tab.slug), TabSerializer(tab).data) for tab in tabs) wf_modules = list(WfModule.live_in_workflow(workflow)) ret['wfModules'] = { str(wfm.id): WfModuleSerializer(wfm).data for wfm in wf_modules } except Workflow.DoesNotExist: raise Http404('Workflow was recently deleted') ret['uploadConfig'] = { 'bucket': minio.UserFilesBucket, 'accessKey': settings.MINIO_ACCESS_KEY, # never _SECRET_KEY 'server': settings.MINIO_EXTERNAL_URL } ret['user_files_bucket'] = minio.UserFilesBucket if modules: modules_data_list = ModuleSerializer(modules, many=True).data ret['modules'] = dict([(str(m['id_name']), m) for m in modules_data_list]) if request.user.is_authenticated: ret['loggedInUser'] = UserSerializer(request.user).data return ret
def render(wfm: WfModule, table: pd.DataFrame) -> Union[str, pd.DataFrame]: edits = parse_json(wfm.get_param_raw('celledits', 'custom')) if isinstance(edits, str): return edits # Ignore missing columns and rows: delete them from the Array of edits edits = [ edit for edit in edits if edit.col in table.columns and edit.row >= 0 and edit.row < len(table) ] for column, column_edits in groupby(edits, lambda e: e.col): series = table[column] series2 = apply_edits(series, list(column_edits)) if series2 is not series: table[column] = series2 return table
def _do_prepare_upload( workflow: Workflow, wf_module: WfModule, filename: str, n_bytes: int, base64Md5sum: str, ) -> Dict[str, str]: key = _generate_key(wf_module, filename) with workflow.cooperative_lock(): wf_module.refresh_from_db() wf_module.abort_inprogress_upload() url, headers = minio.presign_upload(minio.UserFilesBucket, key, filename, n_bytes, base64Md5sum) wf_module.inprogress_file_upload_id = None wf_module.inprogress_file_upload_key = key wf_module.inprogress_file_upload_last_accessed_at = timezone.now() wf_module.save( update_fields=['inprogress_file_upload_id', 'inprogress_file_upload_key', 'inprogress_file_upload_last_accessed_at'] ) return {'key': key, 'url': url, 'headers': headers}
def make_init_state(request, workflow=None, modules=None): """ Build a dict to embed as JSON in `window.initState` in HTML. Raise Http404 if the workflow disappeared. Side-effect: update workflow.last_viewed_at. """ ret = {} if workflow: try: with workflow.cooperative_lock(): # raise DoesNotExist on race ret['workflowId'] = workflow.id ret['workflow'] = WorkflowSerializer(workflow, context={ 'request': request }).data tabs = list(workflow.live_tabs) ret['tabs'] = dict( (str(tab.slug), TabSerializer(tab).data) for tab in tabs) wf_modules = list(WfModule.live_in_workflow(workflow)) ret['wfModules'] = { str(wfm.id): WfModuleSerializer(wfm).data for wfm in wf_modules } workflow.last_viewed_at = timezone.now() workflow.save(update_fields=['last_viewed_at']) except Workflow.DoesNotExist: raise Http404('Workflow was recently deleted') if modules: modules_data_list = ModuleSerializer(modules, many=True).data ret['modules'] = dict([(str(m['id_name']), m) for m in modules_data_list]) if request.user.is_authenticated: ret['loggedInUser'] = UserSerializer(request.user).data return ret