Exemplo n.º 1
0
    def _preprocess_post_data(self, data):
        project_id = data["project_id"]
        info = data["info"]
        duplicate = task_repo.find_duplicate(project_id=project_id, info=info)
        if duplicate:
            message = {
                'reason': 'DUPLICATE_TASK',
                'task_id': duplicate
            }
            raise Conflict(json.dumps(message))


        if 'n_answers' not in data:
            project = project_repo.get(project_id)
            data['n_answers'] = project.get_default_n_answers()
        user_pref = data.get('user_pref', {})
        if user_pref.get('languages'):
            user_pref['languages'] = [s.lower() for s in user_pref.get('languages', [])]
        if user_pref.get('locations'):
            user_pref['locations'] = [s.lower() for s in user_pref.get('locations', [])]
        if user_pref.get('assign_user'):
            user_pref['assign_user'] = [s.lower() for s in user_pref.get('assign_user', [])]
        invalid_fields = validate_required_fields(info)
        if invalid_fields:
            raise BadRequest('Missing or incorrect required fields: {}'
                            .format(','.join(invalid_fields)))
        if data.get('gold_answers'):
            try:
                gold_answers = data['gold_answers']
                if type(gold_answers) is dict:
                    data['calibration'] = 1
                    data['exported'] = True
            except Exception as e:
                raise BadRequest('Invalid gold_answers')
Exemplo n.º 2
0
    def _import_csv_tasks(self, csvreader):
        """Import CSV tasks."""
        fields = set(['state', 'quorum', 'calibration', 'priority_0',
                      'n_answers', 'user_pref', 'expiration'])
        self._headers = []
        self.field_header_index = []
        row_number = 0
        for row in csvreader:
            if not self._headers:
                self._headers = row
                self._check_no_duplicated_headers()
                self._check_no_empty_headers()
                self._check_required_headers()
                field_headers = set(self._headers) & fields
                for field in field_headers:
                    self.field_header_index.append(self._headers.index(field))
            else:
                row_number += 1
                self._check_valid_row_length(row, row_number)

                # check required fields
                fvals = {self._headers[idx]: cell for idx, cell in enumerate(row)}
                invalid_fields = validate_required_fields(fvals)
                if invalid_fields:
                    msg = gettext('The file you uploaded has incorrect/missing '
                                  'values for required header(s): {0}'
                                  .format(','.join(invalid_fields)))
                    raise BulkImportException(msg)
                task_data = self._convert_row_to_task_data(row, row_number)
                yield task_data
Exemplo n.º 3
0
    def csv_validate_required_fields(self, config, callback):
        """Test validate_required_fields against csv data
        with data_access, data_source_id, data_owner."""
        with patch.dict(self.flask_app.config, config):
            fake_csv = ('line,data_access,data_source_id,data_owner\n'
                'test,"[""L4""]",123.0,456\n'
                'test,"[""L4""]",123.6,456\n'
                'test,"[""L4""]",abc,456\n'
                'test,"[""L4""]",,456\n'
                'test,"[""L4""]",123,456.0\n'
                'test,"[""L4""]",123,456.6\n'
                'test,"[""L4""]",123,abc\n'
                'test,"[""L4""]",123,\n'
                'test,,123,456\n'
                'test,"[""L4""]",123,456')
            csvreader = csv.reader(fake_csv.splitlines())
            csviterator = iter(csvreader)

            for index, row in enumerate(csviterator):
                if index == 0:
                    # Read csv header.
                    headers = row
                else:
                    # Read csv data and check required fields.
                    fvals = {headers[idx]: cell for idx, cell in enumerate(row)}
                    invalid_fields = util.validate_required_fields(fvals)

                    # Allow client to assert on result.
                    callback(index, invalid_fields)
Exemplo n.º 4
0
    def test_integer_required_cast_string(self):
        """Test importing an integer (BBDS) with validate_required_fields."""
        config = {'TASK_REQUIRED_FIELDS': {
            'data_access': {'val': None, 'check_val': False},
            'data_owner': {'val': None, 'check_val': False, 'require_int': True},
            'data_source_id': {'val': None, 'check_val': False, 'require_int': True}}}

        # While csv imports as string values, we explicitly set an integer value.
        data = {'data_access': "1", 'data_owner': 5, 'data_source_id': '2'}

        with patch.dict(self.flask_app.config, config):
            invalid_fields = util.validate_required_fields(data)
            assert len(invalid_fields) == 0
Exemplo n.º 5
0
 def _preprocess_post_data(self, data):
     project_id = data["project_id"]
     info = data["info"]
     duplicate = task_repo.find_duplicate(project_id=project_id, info=info)
     if duplicate:
         message = {'reason': 'DUPLICATE_TASK', 'task_id': duplicate}
         raise Conflict(json.dumps(message))
     if 'n_answers' not in data:
         project = Project(**get_project_data(project_id))
         data['n_answers'] = project.get_default_n_answers()
     invalid_fields = validate_required_fields(info)
     if invalid_fields:
         raise BadRequest('Missing or incorrect required fields: {}'.format(
             ','.join(invalid_fields)))
Exemplo n.º 6
0
    def _import_csv_tasks(self, csvreader):
        """Import CSV tasks."""
        headers = []
        fields = set([
            'state', 'quorum', 'calibration', 'priority_0', 'n_answers',
            'user_pref'
        ])
        field_header_index = []
        row_number = 0
        for row in csvreader:
            if not headers:
                headers = self._headers = row
                self._check_no_duplicated_headers(headers)
                self._check_no_empty_headers(headers)
                self._check_required_headers(headers)
                field_headers = set(headers) & fields
                for field in field_headers:
                    field_header_index.append(headers.index(field))
            else:
                row_number += 1
                self._check_valid_row_length(row, row_number, headers)

                # check required fields
                fvals = {headers[idx]: cell for idx, cell in enumerate(row)}
                invalid_fields = validate_required_fields(fvals)
                if invalid_fields:
                    msg = gettext(
                        'The file you uploaded has incorrect/missing '
                        'values for required header(s): {0}'.format(
                            ','.join(invalid_fields)))
                    raise BulkImportException(msg)

                task_data = {"info": {}}
                for idx, cell in enumerate(row):
                    if idx in field_header_index:
                        if headers[idx] == 'user_pref':
                            if len(cell) > 0:
                                task_data[headers[idx]] = json.loads(
                                    cell.lower())
                            else:
                                task_data[headers[idx]] = {}
                        else:
                            task_data[headers[idx]] = cell
                    else:
                        task_data["info"][headers[idx]] = cell
                yield task_data
Exemplo n.º 7
0
        def task_generator():
            row_number = 0
            for row in csviterator:
                row_number += 1
                self._check_valid_row_length(row, row_number)

                # check required fields
                fvals = {
                    self._headers[idx]: cell
                    for idx, cell in enumerate(row)
                }
                invalid_fields = validate_required_fields(fvals)
                if invalid_fields:
                    msg = gettext(
                        'The file you uploaded has incorrect/missing '
                        'values for required header(s): {0}'.format(
                            ','.join(invalid_fields)))
                    raise BulkImportException(msg)
                task_data = self._convert_row_to_task_data(row, row_number)
                yield task_data