def save_samples(samples: Iterable[str], form: FormData, adapter: StatinaAdapter, user: User): """Function to update sample.comment and sample.include.""" time_stamp: str = datetime.now().strftime("%Y/%m/%d %H:%M:%S") for sample_id in samples: sample: DataBaseSample = find.sample(sample_id=sample_id, adapter=adapter) comment: str = form.get(f"comment_{sample_id}") include: bool = form.get(f"include_{sample_id}") if comment != sample.comment: sample.comment = comment if include and not sample.include: sample.include = True sample.change_include_date = f"{user.username} {time_stamp}" elif not include and sample.include: sample.include = False update.sample(adapter=adapter, sample=sample)
async def __handle_dev_env(self): """ Here we just inject a fake number for testing, this so we can test from the Twilio Autopilot Simulator through an SSH tunnel. """ form = await super().form() new_form = FormData(dict(form.items()), UserIdentifier=config.FAKE_NUMBER) return new_form
async def resolve_data(cls, request: Request, data: FormData): ret = {} m2m_ret = {} for field in cls.get_fields(is_display=False): input_ = field.input if input_.context.get("disabled") or isinstance( input_, inputs.DisplayOnly): continue name = input_.context.get("name") if isinstance(input_, inputs.ManyToMany): v = data.getlist(name) value = await input_.parse_value(request, v) m2m_ret[name] = await input_.model.filter(pk__in=value) else: v = data.get(name) value = await input_.parse_value(request, v) ret[name] = value return ret, m2m_ret
async def __handle_non_dev_env(self): """ In production or staging, validate that the request comes from Twilio """ validator = RequestValidator(config.TWILIO_AUTH_TOKEN) params = await super().form() x_twilio_signature = super().headers.get("X-Twilio-Signature", "no-header") is_valid = validator.validate(str(super().url), params, x_twilio_signature) if not is_valid: raise HTTPException(status_code=403) return FormData(dict(params.items()))
async def form(self) -> FormData: if not hasattr(self, "_form"): assert ( parse_options_header is not None ), "The `python-multipart` library must be installed to use form parsing." content_type_header = self.headers.get("Content-Type") content_type, options = parse_options_header(content_type_header) if content_type == b"multipart/form-data": multipart_parser = MultiPartParser(self.headers, self.stream()) self._form = await multipart_parser.parse() elif content_type == b"application/x-www-form-urlencoded": form_parser = FormParser(self.headers, self.stream()) self._form = await form_parser.parse() else: self._form = FormData() return self._form
async def parse(self) -> FormData: # Callbacks dictionary. callbacks = { "on_field_start": self.on_field_start, "on_field_name": self.on_field_name, "on_field_data": self.on_field_data, "on_field_end": self.on_field_end, "on_end": self.on_end, } # Create the parser. parser = multipart.QuerystringParser(callbacks) field_name = b"" field_value = b"" items = ( [] ) # type: typing.List[typing.Tuple[str, typing.Union[str, UploadFile]]] # Feed the parser with data from the request. async for chunk in self.stream: if chunk: parser.write(chunk) else: parser.finalize() messages = list(self.messages) self.messages.clear() for message_type, message_bytes in messages: if message_type == FormMessage.FIELD_START: field_name = b"" field_value = b"" elif message_type == FormMessage.FIELD_NAME: field_name += message_bytes elif message_type == FormMessage.FIELD_DATA: field_value += message_bytes elif message_type == FormMessage.FIELD_END: name = unquote_plus(field_name.decode("latin-1")) value = unquote_plus(field_value.decode("latin-1")) items.append((name, value)) elif message_type == FormMessage.END: pass return FormData(items)
async def form(self) -> FormData: if not hasattr(self, "_form"): assert ( parse_options_header is not None ), "The `python-multipart` library must be installed to use form parsing." content_type_header = self.headers.get("Content-Type") content_type: bytes content_type, _ = parse_options_header(content_type_header) if content_type == b"multipart/form-data": try: multipart_parser = MultiPartParser(self.headers, self.stream()) self._form = await multipart_parser.parse() except MultiPartException as exc: if "app" in self.scope: raise HTTPException(status_code=400, detail=exc.message) raise exc elif content_type == b"application/x-www-form-urlencoded": form_parser = FormParser(self.headers, self.stream()) self._form = await form_parser.parse() else: self._form = FormData() return self._form
async def parse(self) -> FormData: # Parse the Content-Type header to get the multipart boundary. content_type, params = parse_options_header( self.headers["Content-Type"]) charset = params.get(b"charset", "utf-8") if type(charset) == bytes: charset = charset.decode("latin-1") boundary = params[b"boundary"] # Callbacks dictionary. callbacks = { "on_part_begin": self.on_part_begin, "on_part_data": self.on_part_data, "on_part_end": self.on_part_end, "on_header_field": self.on_header_field, "on_header_value": self.on_header_value, "on_header_end": self.on_header_end, "on_headers_finished": self.on_headers_finished, "on_end": self.on_end, } # Create the parser. parser = multipart.MultipartParser(boundary, callbacks) header_field = b"" header_value = b"" content_disposition = None content_type = b"" field_name = "" data = b"" file: typing.Optional[UploadFile] = None items: typing.List[typing.Tuple[str, typing.Union[str, UploadFile]]] = [] item_headers: typing.List[typing.Tuple[bytes, bytes]] = [] # Feed the parser with data from the request. async for chunk in self.stream: parser.write(chunk) messages = list(self.messages) self.messages.clear() for message_type, message_bytes in messages: if message_type == MultiPartMessage.PART_BEGIN: content_disposition = None content_type = b"" data = b"" item_headers = [] elif message_type == MultiPartMessage.HEADER_FIELD: header_field += message_bytes elif message_type == MultiPartMessage.HEADER_VALUE: header_value += message_bytes elif message_type == MultiPartMessage.HEADER_END: field = header_field.lower() if field == b"content-disposition": content_disposition = header_value elif field == b"content-type": content_type = header_value item_headers.append((field, header_value)) header_field = b"" header_value = b"" elif message_type == MultiPartMessage.HEADERS_FINISHED: disposition, options = parse_options_header( content_disposition) field_name = _user_safe_decode(options[b"name"], charset) if b"filename" in options: filename = _user_safe_decode(options[b"filename"], charset) file = UploadFile( filename=filename, content_type=content_type.decode("latin-1"), headers=Headers(raw=item_headers), ) else: file = None elif message_type == MultiPartMessage.PART_DATA: if file is None: data += message_bytes else: await file.write(message_bytes) elif message_type == MultiPartMessage.PART_END: if file is None: items.append( (field_name, _user_safe_decode(data, charset))) else: await file.seek(0) items.append((field_name, file)) parser.finalize() return FormData(items)
def test_formdata(): upload = io.BytesIO(b"test") form = FormData([("a", "123"), ("a", "456"), ("b", upload)]) assert "a" in form assert "A" not in form assert "c" not in form assert form["a"] == "456" assert form.get("a") == "456" assert form.get("nope", default=None) is None assert form.getlist("a") == ["123", "456"] assert list(form.keys()) == ["a", "b"] assert list(form.values()) == ["456", upload] assert list(form.items()) == [("a", "456"), ("b", upload)] assert len(form) == 2 assert list(form) == ["a", "b"] assert dict(form) == {"a": "456", "b": upload} assert (repr(form) == "FormData([('a', '123'), ('a', '456'), ('b', " + repr(upload) + ")])") assert FormData(form) == form assert FormData({ "a": "123", "b": "789" }) == FormData([("a", "123"), ("b", "789")]) assert FormData({"a": "123", "b": "789"}) != {"a": "123", "b": "789"}
async def parse(self) -> FormData: # Parse the Content-Type header to get the multipart boundary. content_type, params = parse_options_header(self.headers["Content-Type"]) boundary = params.get(b"boundary") # Callbacks dictionary. callbacks = { "on_part_begin": self.on_part_begin, "on_part_data": self.on_part_data, "on_part_end": self.on_part_end, "on_header_field": self.on_header_field, "on_header_value": self.on_header_value, "on_header_end": self.on_header_end, "on_headers_finished": self.on_headers_finished, "on_end": self.on_end, } # Create the parser. parser = multipart.MultipartParser(boundary, callbacks) header_field = b"" header_value = b"" raw_headers = [] # type: typing.List[typing.Tuple[bytes, bytes]] field_name = "" data = b"" file = None # type: typing.Optional[UploadFile] items = ( [] ) # type: typing.List[typing.Tuple[str, typing.Union[str, UploadFile]]] # Feed the parser with data from the request. async for chunk in self.stream: parser.write(chunk) messages = list(self.messages) self.messages.clear() for message_type, message_bytes in messages: if message_type == MultiPartMessage.PART_BEGIN: raw_headers = [] data = b"" elif message_type == MultiPartMessage.HEADER_FIELD: header_field += message_bytes elif message_type == MultiPartMessage.HEADER_VALUE: header_value += message_bytes elif message_type == MultiPartMessage.HEADER_END: raw_headers.append((header_field.lower(), header_value)) header_field = b"" header_value = b"" elif message_type == MultiPartMessage.HEADERS_FINISHED: headers = Headers(raw=raw_headers) content_disposition = headers.get("Content-Disposition") content_type = headers.get("Content-Type", "") disposition, options = parse_options_header(content_disposition) field_name = options[b"name"].decode("latin-1") if b"filename" in options: filename = options[b"filename"].decode("latin-1") file = UploadFile(filename=filename, content_type=content_type) else: file = None elif message_type == MultiPartMessage.PART_DATA: if file is None: data += message_bytes else: await file.write(message_bytes) elif message_type == MultiPartMessage.PART_END: if file is None: items.append((field_name, data.decode("latin-1"))) else: await file.seek(0) items.append((field_name, file)) elif message_type == MultiPartMessage.END: pass parser.finalize() return FormData(items)
async def post_receive_json(self, form_data: dict): # when i passed in data= as a dict, InputRequired failed. # i guess it looks in formdata to see if an input was made. form = CreateSessionForm(formdata=FormData(form_data)) if not form.validate(): await self.send_json({'validation_errors': form.errors}) return session_config_name = form.session_config.data is_mturk = form.is_mturk.data config = SESSION_CONFIGS_DICT[session_config_name] num_participants = form.num_participants.data if is_mturk: num_participants *= settings.MTURK_NUM_PARTICIPANTS_MULTIPLE modified_session_config_fields = {} for field in config.editable_fields(): html_field_name = config.html_field_name(field) old_value = config[field] # to allow concise unit tests, we can simply omit any fields we don't # want to change. this allows us to write more concise # unit tests. # EXCEPT for boolean fields -- omitting # it means we turn it off. # ideally we could interpret omitted boolean fields as unchanged # and False as unchecked, but HTML & serializeArray omits # unchecked checkboxes from form data. if isinstance(old_value, bool): new_value = bool(form_data.get(html_field_name)) if old_value != new_value: modified_session_config_fields[field] = new_value else: new_value_raw = form_data.get(html_field_name, '') if new_value_raw != '': # don't use isinstance because that will catch bool also if type(old_value) is int: # in case someone enters 1.0 instead of 1 new_value = int(float(new_value_raw)) else: new_value = type(old_value)(new_value_raw) if old_value != new_value: modified_session_config_fields[field] = new_value use_browser_bots = modified_session_config_fields.get( 'use_browser_bots', config.get('use_browser_bots', False)) # if room_name is missing, it will be empty string room_name = form.room_name.data or None await self.create_session_then_send_start_link( session_config_name=session_config_name, num_participants=num_participants, is_demo=False, is_mturk=is_mturk, modified_session_config_fields=modified_session_config_fields, use_browser_bots=use_browser_bots, room_name=room_name, ) if room_name: await channel_utils.group_send( group=channel_utils.room_participants_group_name(room_name), data=SESSION_READY_PAYLOAD, )
def advance_last_place_participants(self): """the problem with using the test client to make get/post requests is (1) this request already has the global asyncio.lock (2) there are apparently some issues with async/await and event loops. """ from otree.lookup import get_page_lookup from otree.api import WaitPage, Page participants = self.get_participants() # in case some participants haven't started unvisited_participants = False for p in participants: if p._index_in_pages == 0: p.initialize(None) if unvisited_participants: # that's it -- just visit the start URL, advancing by 1 return last_place_page_index = min([p._index_in_pages for p in participants]) last_place_participants = [ p for p in participants if p._index_in_pages == last_place_page_index ] for p in last_place_participants: page_index = p._index_in_pages if page_index >= p._max_page_index: return page = get_page_lookup( self.code, page_index).page_class.instantiate_without_request() page.set_attributes(p) if isinstance(page, Page): from starlette.datastructures import FormData page._is_frozen = False page._form_data = FormData({ otree.constants.admin_secret_code: ADMIN_SECRET_CODE, otree.constants.timeout_happened: '1', }) # TODO: should we also call .get() so that _update_monitor_table will also get run? resp = page.post() if resp.status_code >= 400: msg = ( f'Submitting page {p._current_form_page_url} failed, ' f'returned HTTP status code {resp.status_code}. ' 'Check the logs') raise AssertionError(msg) else: # it's possible that the slowest user is on a wait page, # especially if their browser is closed. # because they were waiting for another user who then # advanced past the wait page, but they were never # advanced themselves. resp = page.inner_dispatch(request=None) # do the auto-advancing here, # rather than in increment_index_in_pages, # because it's only needed here. otree.channels.utils.sync_group_send(group=auto_advance_group( p.code), data={'auto_advanced': True})