Beispiel #1
0
    def test_no_input(self):
        """Test that no input returns expected error"""
        with self.assertRaises(Exception) as context:
            MetadataTable()

        #print("The exception is", context.exception)
        emsg = "MetadataTable requires a name"
        self.assertTrue(emsg in str(context.exception))
Beispiel #2
0
    def test_no_module(self):
        """Test that __init__ with module=None returns expected error"""
        with self.assertRaises(Exception) as context:
            MetadataTable(table_name_in=None, table_type_in=None, dependencies=None, \
                relative_path=None, known_ddts=None, var_dict=None, module=None, \
                parse_object=None, logger=None)

        #print("The exception is", context.exception)
        emsg = "MetadataTable requires a name"
        self.assertTrue(emsg in str(context.exception))
Beispiel #3
0
    def test_bad_header_type(self):
        """Test that __init__ with table_type_in=banana returns expected error"""
        with self.assertRaises(Exception) as context:
            MetadataTable(table_name_in="something", table_type_in="banana", dependencies=None, \
                relative_path=None, known_ddts=None, var_dict=None, module=None, \
                parse_object=None, logger=None)

        #print("The exception is", context.exception)
        emsg = "Invalid metadata arg table type, 'banana'"
        self.assertTrue(emsg in str(context.exception))
    def test_no_table_type(self):
        """Test that __init__ with table_type_in=None returns expected error"""
        with self.assertRaises(Exception) as context:
            MetadataTable(self._DUMMY_RUN_ENV,
                          table_name_in="something",
                          table_type_in=None,
                          dependencies=None,
                          relative_path=None,
                          known_ddts=None,
                          var_dict=None,
                          module=None,
                          parse_object=None)

        #print("The exception is", context.exception)
        emsg = "MetadataTable requires a table type"
        self.assertTrue(emsg in str(context.exception))
Beispiel #5
0
def parse_type_def(statements, type_def, mod_name, pobj, logger):
    """Parse a type definition from <statements> and return the
    remaining statements along with a MetadataTable object representing
    the type's variables."""
    psrc = ParseSource(mod_name, 'ddt', pobj)
    seen_contains = False
    mheader = None
    var_dict = VarDictionary(type_def[0])
    inspec = True
    while inspec and (statements is not None):
        while len(statements) > 0:
            statement = statements.pop(0)
            # End program or module
            pmatch = _END_TYPE_RE.match(statement)
            if pmatch is not None:
                # We hit the end of the type, make a header
                mheader = MetadataTable(table_name_in=type_def[0],
                                        table_type_in='ddt',
                                        module=mod_name, var_dict=var_dict,
                                        logger=logger)
                inspec = False
            elif is_contains_statement(statement, inspec):
                seen_contains = True
            elif not seen_contains:
                # Comment of variable
                if ((not is_comment_statement(statement)) and
                    (not parse_use_statement(statement, logger))):
                    dvars = parse_fortran_var_decl(statement, psrc,
                                                   logger=logger)
                    for var in dvars:
                        var_dict.add_variable(var)
                    # End for
                # End if
            else:
                # We are just skipping lines until the end type
                pass
            # End if
        # End while
        if inspec and (len(statements) == 0):
            statements = read_statements(pobj)
        # End if
    # End while
    return statements, mheader
Beispiel #6
0
def add_constituent_vars(cap, host_model, suite_list, logger):
###############################################################################
    """Create a DDT library containing array reference variables
    for each constituent field for all suites in <suite_list>.
    Create and return a dictionary containing an index variable for each of the
    constituents as well as the variables from the DDT object.
    Also, write declarations for these variables to <cap>.
    Since the constituents are in a DDT (ccpp_constituent_properties_t),
    create a metadata table with the required information, then parse it
    to create the dictionary.
    """
    # First create a MetadataTable for the constituents DDT
    stdname_layer = "ccpp_constituents_num_layer_consts"
    stdname_interface = "ccpp_constituents_num_interface_consts"
    stdname_2d = "ccpp_constituents_num_2d_consts"
    horiz_dim = "horizontal_dimension"
    vert_layer_dim = "vertical_layer_dimension"
    vert_interface_dim = "vertical_interface_dimension"
    array_layer = "vars_layer"
    array_interface = "vars_interface"
    array_2d = "vars_2d"
    # Table preamble (leave off ccpp-table-properties header)
    ddt_mdata = [
        #"[ccpp-table-properties]",
        " name = {}".format(CONST_DDT_NAME), " type = ddt",
        "[ccpp-arg-table]",
        " name = {}".format(CONST_DDT_NAME), " type = ddt",
        "[ num_layer_vars ]",
        " standard_name = {}".format(stdname_layer),
        " units = count", " dimensions = ()", " type = integer",
        "[ num_interface_vars ]",
        " standard_name = {}".format(stdname_interface),
        " units = count", " dimensions = ()", " type = integer",
        "[ num_2d_vars ]",
        " standard_name = {}".format(stdname_2d),
        " units = count", " dimensions = ()", " type = integer",
        "[ {} ]".format(array_layer),
        " standard_name = ccpp_constituents_array_of_layer_consts",
        " units = none",
        " dimensions = ({}, {}, {})".format(horiz_dim, vert_layer_dim,
                                            stdname_layer),
        " type = real", " kind = kind_phys",
        "[ {} ]".format(array_interface),
        " standard_name = ccpp_constituents_array_of_interface_consts",
        " units = none",
        " dimensions = ({}, {}, {})".format(horiz_dim,
                                            vert_interface_dim,
                                            stdname_interface),
        " type = real", " kind = kind_phys",
        "[ {} ]".format(array_2d),
        " standard_name = ccpp_constituents_array_of_2d_consts",
        " units = none",
        " dimensions = ({}, {})".format(horiz_dim, stdname_2d),
        " type = real", " kind = kind_phys"]
    # Add entries for each constituent (once per standard name)
    const_stdnames = set()
    for suite in suite_list:
        if logger is not None:
            lmsg = "Adding constituents from {} to {}"
            logger.debug(lmsg.format(suite.name, host_model.name))
        # end if
        scdict = suite.constituent_dictionary()
        for cvar in scdict.variable_list():
            std_name = cvar.get_prop_value('standard_name')
            if std_name not in const_stdnames:
                # Add a metadata entry for this constituent
                # Check dimensions and figure vertical dimension
                # Currently, we only support variables with first dimension,
                #   horizontal_dimension, and second (optional) dimension,
                #   vertical_layer_dimension or vertical_interface_dimension
                dims = cvar.get_dimensions()
                if (len(dims) < 1) or (len(dims) > 2):
                    emsg = "Unsupported constituent dimensions, '{}'"
                    dimstr = "({})".format(", ".join(dims))
                    raise CCPPError(emsg.format(dimstr))
                # end if
                hdim = dims[0].split(':')[-1]
                if hdim != 'horizontal_dimension':
                    emsg = "Unsupported first constituent dimension, '{}', "
                    emsg += "must be 'horizontal_dimension'"
                    raise CCPPError(emsg.format(hdim))
                # end if
                if len(dims) > 1:
                    vdim = dims[1].split(':')[-1]
                    if vdim == vert_layer_dim:
                        cvar_array_name = array_layer
                    elif vdim == vert_interface_dim:
                        cvar_array_name = array_interface
                    else:
                        emsg = "Unsupported vertical constituent dimension, "
                        emsg += "'{}', must be '{}' or '{}'"
                        raise CCPPError(emsg.format(vdim, vert_layer_dim,
                                                    vert_interface_dim))
                    # end if
                else:
                    cvar_array_name = array_2d
                # end if
                # First, create an index variable for <cvar>
                ind_std_name = "index_of_{}".format(std_name)
                loc_name = "{}(:,:,{})".format(cvar_array_name, ind_std_name)
                ddt_mdata.append("[ {} ]".format(loc_name))
                ddt_mdata.append(" standard_name = {}".format(std_name))
                units = cvar.get_prop_value('units')
                ddt_mdata.append(" units = {}".format(units))
                dimstr = "({})".format(", ".join(dims))
                ddt_mdata.append(" dimensions = {}".format(dimstr))
                vtype = cvar.get_prop_value('type')
                vkind = cvar.get_prop_value('kind')
                ddt_mdata.append(" type = {} | kind = {}".format(vtype, vkind))
                const_stdnames.add(std_name)
            # end if
        # end for
    # end for
    # Parse this table using a fake filename
    parse_obj = ParseObject("{}_constituent_mod.meta".format(host_model.name),
                            ddt_mdata)
    ddt_table = MetadataTable(parse_object=parse_obj, logger=logger)
    ddt_name = ddt_table.sections()[0].title
    ddt_lib = DDTLibrary('{}_constituent_ddtlib'.format(host_model.name),
                         ddts=ddt_table.sections(), logger=logger)
    # A bit of cleanup
    del parse_obj
    del ddt_mdata
    # Now, create the "host constituent module" dictionary
    const_dict = VarDictionary("{}_constituents".format(host_model.name),
                               parent_dict=host_model)
    # Add in the constituents object
    prop_dict = {'standard_name' : "ccpp_model_constituents_object",
                 'local_name' : constituent_model_object_name(host_model),
                 'dimensions' : '()', 'units' : "None", 'ddt_type' : ddt_name}
    const_var = Var(prop_dict, _API_SOURCE)
    const_var.write_def(cap, 1, const_dict)
    ddt_lib.collect_ddt_fields(const_dict, const_var)
    # Declare variable for the constituent standard names array
    max_csname = max([len(x) for x in const_stdnames]) if const_stdnames else 0
    num_const_fields = len(const_stdnames)
    cs_stdname = constituent_model_const_stdnames(host_model)
    const_list = sorted(const_stdnames)
    if const_list:
        const_strs = ['"{}{}"'.format(x, ' '*(max_csname - len(x)))
                      for x in const_list]
        cs_stdame_initstr = " = (/ " + ", ".join(const_strs) + " /)"
    else:
        cs_stdame_initstr = ""
    # end if
    cap.write("character(len={}) :: {}({}){}".format(max_csname, cs_stdname,
                                                     num_const_fields,
                                                     cs_stdame_initstr), 1)
    # Declare variable for the constituent standard names array
    array_name = constituent_model_const_indices(host_model)
    cap.write("integer :: {}({}) = -1".format(array_name, num_const_fields), 1)
    # Add individual variables for each index var to the const_dict
    for index, std_name in enumerate(const_list):
        ind_std_name = "index_of_{}".format(std_name)
        ind_loc_name = "{}({})".format(array_name, index + 1)
        prop_dict = {'standard_name' : ind_std_name,
                     'local_name' : ind_loc_name, 'dimensions' : '()',
                     'units' : 'index', 'protected' : "True",
                     'type' : 'integer', 'kind' : ''}
        ind_var = Var(prop_dict, _API_SOURCE)
        const_dict.add_variable(ind_var)
    # end for
    # Add vertical dimensions for DDT call strings
    pver = host_model.find_variable(standard_name=vert_layer_dim,
                                    any_scope=False)
    if pver is not None:
        prop_dict = {'standard_name' : vert_layer_dim,
                     'local_name' : pver.get_prop_value('local_name'),
                     'units' : 'count', 'type' : 'integer',
                     'protected' : 'True', 'dimensions' : '()'}
        if const_dict.find_variable(standard_name=vert_layer_dim,
                                    any_scope=False) is None:
            ind_var = Var(prop_dict, _API_SOURCE)
            const_dict.add_variable(ind_var)
        # end if
    # end if
    pver = host_model.find_variable(standard_name=vert_interface_dim,
                                    any_scope=False)
    if pver is not None:
        prop_dict = {'standard_name' : vert_interface_dim,
                     'local_name' : pver.get_prop_value('local_name'),
                     'units' : 'count', 'type' : 'integer',
                     'protected' : 'True', 'dimensions' : '()'}
        if const_dict.find_variable(standard_name=vert_interface_dim,
                                    any_scope=False) is None:
            ind_var = Var(prop_dict, _API_SOURCE)
            const_dict.add_variable(ind_var)
        # end if
    # end if

    return const_dict
def validateFileContents(study_id, portal_type, sess, form, req, web_app_user_id, data_access):
    """
    Process the uploaded archive. If valid, write files out to the filesystem
    and validate the contents of each.
    """
    
    # A nested FieldStorage instance holds the file
    fileitem = form['file']
    
    # Sample validation variables
    samples_missing = False
    
    # Set a default value for key_fields_changed
    key_fields_changed = False
    
    # Test if the file was uploaded
    if fileitem.filename:
        # strip leading path from file name to avoid directory traversal attacks
        fname = form['output_fname']+fileitem.filename.strip().replace(" ","")
        dir_path = os.path.join(os.path.dirname(req.filename), form['output_dir'])

        # write the zipped file on the server
        zippedf = open(os.path.join(dir_path, fname), 'wb')
        zippedf.write(fileitem.file.read())
        zippedf.close()
    
        # create a zipfile object
        t = zipfile.ZipFile(os.path.join(dir_path, fname),'r')
        
        # Do some error checking of the archive's contents
        errors = []
        templates = []
        
        sample_template_found = False
        prep_template_found = False
        timeseries_template_found = False
        
        sample_mdtable = None
        prep_mdtable = None
        
        # Figure out if this study has timeseries data if study_id > 0.
        # study_id = 0 means it's validation not associated to a study
        if study_id > 0:
            study_info = data_access.getStudyInfo(study_id, web_app_user_id)
            includes_timeseries = study_info['includes_timeseries']
        else:
            includes_timeseries = 0
        
        for fullname in t.namelist():
            # Ignore directories
            if fullname.endswith('/'):
                continue

            filename = os.path.basename(fullname).lower()
            # Ignore files that start with '.' - seem to be an artifact of the built-in
            # Mac file compression option within the Finder
            if filename.startswith('.'):
                continue

            # Make sure ends with .xls or .txt
            if filename.endswith('.xls') or filename.endswith('.txt'):
                pass
            else:
                continue

            # Validate that it's one of the three expected templates
            if 'sample_template' in filename:
                sample_template_found = True
            elif 'prep_template' in filename:
                prep_template_found = True
            elif 'timeseries_template' in filename:
                timeseries_template_found = True
            else:
                continue

            # Add the file to the list of templates
            templates.append(filename)
                            
            # Looks like we're good, write it out to the filesystem
            outfile_filename = os.path.join(dir_path, filename)
            try:
                outfile = open(outfile_filename, 'w')
                outfile.write(t.read(fullname))
                outfile.flush()
                outfile.close()
            except IOError as e:
                errors.append("""Could not open file "%s". The error was: %s""" % (filename, e))
                continue
                
            # Check to see if it's a binary file
            if is_binary(outfile_filename):
                errors.append('The file "%s" is not a tab-delimited text file. Please resave this file in this format and try again.' % filename)
                continue
                
            # Check to see if columns are valid in this file
            mdtable = MetadataTable(outfile_filename, study_id)
            table_errors, bad_columns = mdtable.validateColumnNames()
            if len(table_errors) > 0:
                for e in table_errors:
                    errors.append(e)
            
            # Make sure there's at least one row of data in the file
            i = 0
            with open(outfile_filename, 'rU') as f:
                for line in f:
                    i += 1
            if i < 2:
                errors.append('The file "%s" contains no data.' % filename)
                continue

            # Perform specific validations
            if 'sample_template' in outfile_filename:
                sample_mdtable = mdtable
                sample_errors, samples_missing = validateSampleFile(mdtable, study_id, web_app_user_id, data_access)
                logErrors(errors, sample_errors)
            elif 'prep_template' in outfile_filename:
                prep_mdtable = mdtable
                prep_errors, key_fields_changed = validatePrepFile(mdtable, req, study_id, data_access)
                logErrors(errors, prep_errors)
            elif 'timeseries_template' in outfile_filename:
                timeseries_mdtable = mdtable
                prep_errors = validateTimeseriesFile(mdtable, req, study_id, data_access)
                logErrors(errors, prep_errors)

        # Make sure we have one of each template type
        if not sample_template_found:
            errors.append('Sample template was not found.')
            
        if portal_type =='emp':
            pass
        elif not prep_template_found:
            errors.append('Prep template was not found.')
            
        if includes_timeseries == 1 and not timeseries_template_found:
            errors.append('This study includes timeseries data however the timeseries file is missing from this upload.')
                
        # Perform multi-file validations
        if portal_type in ['emp', 'qiime'] and sample_mdtable and prep_mdtable:
            logErrors(errors, multiFileValidation(sample_mdtable, prep_mdtable))

        # Check that the archive contains the correct number of files:
        required_file_count = 1
        
        if includes_timeseries == 1:
            required_file_count += 1
            
        if portal_type == 'qiime':
            required_file_count += 1
                    
        if len(templates) < required_file_count:
            errors.append('One or more required files were not included in this upload.')
            errors.append('{0} files supplied. {1} file expected.'.format(len(templates), required_file_count))
        
        # If there were errors, report them and stop processing. Note that writing to the req 
        # object is the signal for the JumpLoader to flag and error
        if errors:
            req.write('<h3>The following errors were found:</h3><ul>')
            for e in errors:    
                req.write('<li style="color:#FF0000">%s</li>\n' % e)
            req.write('</ul>')
            
            return None, errors
        # study_id > 0 ensures that this is not the anonymous case. If so, we skip the
        # next two validations.
        elif study_id > 0:            
            # Handle sample database validation issues
            if samples_missing:
                # Do not change this string. It's checked for on the respose page.
                if study_info['locked'] == 1:
                    req.write('locked - missing samples')
                else:
                    req.write('missing samples')
                return templates, errors
            
            # Handle immutable field issues
            if key_fields_changed:
                # Do not change this string. It's checked for on the respose page.
                if study_info['locked'] == 1:
                    req.write('locked - immutable fields changed')
                else:
                    req.write('immutable fields changed')
                return templates, errors
            
            # Delete the old files
            files = os.listdir(dir_path)
            for file_name in files:
                if file_name.endswith('.xls') or file_name.endswith('.zip'):
                    if os.path.basename(file_name) not in templates:
                        os.remove(os.path.join(dir_path, file_name))

            # Assuming all went well, return the list of templates
            return templates, errors
        elif study_id == 0:
            return templates, errors
Beispiel #8
0
def parse_scheme_metadata(statements, pobj, spec_name, table_name, logger):
    "Parse dummy argument information from a subroutine"
    psrc = None
    mheader = None
    var_dict = None
    scheme_name = None
    # Find the subroutine line, should be first executable statement
    inpreamble = False
    insub = True
    if logger is not None:
        ctx = context_string(pobj, nodir=True)
        msg = "Parsing specification of {}{}"
        logger.debug(msg.format(table_name, ctx))
    # End if
    ctx = context_string(pobj) # Save initial context with directory
    vdict = None # Initialized when we parse the subroutine arguments
    while insub and (statements is not None):
        while statements:
            statement = statements.pop(0)
            smatch = _SUBROUTINE_RE.match(statement)
            esmatch = _END_SUBROUTINE_RE.match(statement)
            pmatch = _ENDMODULE_RE.match(statement)
            asmatch = _ARG_TABLE_START_RE.match(statement)
            if asmatch is not None:
                # We have run off the end of something, hope that is okay
                # Put this statement back for the caller to deal with
                statements.insert(0, statement)
                insub = False
                break
            # End if
            if pmatch is not None:
                # We have run off the end of the module, hope that is okay
                pobj.leave_region('MODULE', region_name=spec_name)
                insub = False
                break
            # End if
            if smatch is not None:
                scheme_name = smatch.group(1)
                inpreamble = scheme_name.lower() == table_name.lower()
                if inpreamble:
                    if smatch.group(2) is not None:
                        smstr = smatch.group(2).strip()
                        if len(smstr) > 0:
                            smlist = smstr.strip().split(',')
                        else:
                            smlist = list()
                        # End if
                        scheme_args = [x.strip().lower() for x in smlist]
                    else:
                        scheme_args = list()
                    # End if
                    # Create a dict template with all the scheme's arguments
                    # in the correct order
                    vdict = OrderedDict()
                    for arg in scheme_args:
                        if len(arg) == 0:
                            errmsg = 'Empty argument{}'
                            raise ParseInternalError(errmsg.format(pobj))
                        # End if
                        if arg in vdict:
                            errmsg = 'Duplicate dummy argument, {}'
                            raise ParseSyntaxError(errmsg.format(arg),
                                                   context=pobj)
                        # End if
                        vdict[arg] = None
                    # End for
                    psrc = ParseSource(scheme_name, 'scheme', pobj)
                # End if
            elif inpreamble:
                # Process a preamble statement (use or argument declaration)
                if esmatch is not None:
                    inpreamble = False
                    insub = False
                elif ((not is_comment_statement(statement)) and
                      (not parse_use_statement(statement, logger)) and
                      is_dummy_argument_statement(statement)):
                    dvars = parse_fortran_var_decl(statement, psrc,
                                                   logger=logger)
                    for var in dvars:
                        lname = var.get_prop_value('local_name').lower()
                        if lname in vdict:
                            if vdict[lname] is not None:
                                emsg = "Error: duplicate dummy argument, {}"
                                raise ParseSyntaxError(emsg.format(lname),
                                                       context=pobj)
                            # End if
                            vdict[lname] = var
                        else:
                            raise ParseSyntaxError('dummy argument',
                                                   token=lname, context=pobj)
                        # End if
                    # End for
                # End if
            # End if
        # End while
        if insub and (len(statements) == 0):
            statements = read_statements(pobj)
        # End if
    # End while
    # Check for missing declarations
    missing = list()
    if vdict is None:
        errmsg = 'Subroutine, {}, not found{}'
        raise CCPPError(errmsg.format(scheme_name, ctx))
    # End if
    for lname in vdict.keys():
        if vdict[lname] is None:
            missing.append(lname)
        # End if
    # End for
    if len(missing) > 0:
        errmsg = 'Missing local_variables, {} in {}'
        raise CCPPError(errmsg.format(missing, scheme_name))
    # End if
    var_dict = VarDictionary(scheme_name, variables=vdict)
    if (scheme_name is not None) and (var_dict is not None):
        mheader = MetadataTable(table_name_in=scheme_name,
                                table_type_in='scheme', module=spec_name,
                                var_dict=var_dict, logger=logger)
    # End if
    return statements, mheader
Beispiel #9
0
def parse_preamble_data(statements, pobj, spec_name, endmatch, logger):
    """Parse module variables or DDT definitions from a module preamble
    or parse program variables from the beginning of a program.
    """
    inspec = True
    mheaders = list()
    var_dict = VarDictionary(spec_name)
    psrc = ParseSource(spec_name, 'MODULE', pobj)
    active_table = None
    if logger is not None:
        ctx = context_string(pobj, nodir=True)
        msg = "Parsing preamble variables of {}{}"
        logger.debug(msg.format(spec_name, ctx))
    # End if
    while inspec and (statements is not None):
        while len(statements) > 0:
            statement = statements.pop(0)
            # End program or module
            pmatch = endmatch.match(statement)
            asmatch = _ARG_TABLE_START_RE.match(statement)
            type_def = fortran_type_definition(statement)
            if asmatch is not None:
                active_table = asmatch.group(1)
            elif (pmatch is not None) or is_contains_statement(statement,
                                                               inspec):
                # We are done with the specification
                inspec = False
                # Put statement back so caller knows where we are
                statements.insert(0, statement)
                # Add the header (even if we found no variables)
                mheader = MetadataTable(table_name_in=spec_name,
                                        table_type_in='module',
                                        module=spec_name,
                                        var_dict=var_dict, logger=logger)
                mheaders.append(mheader)
                if logger is not None:
                    ctx = context_string(pobj, nodir=True)
                    msg = 'Adding header {}{}'
                    logger.debug(msg.format(mheader.table_name, ctx))
                break
            elif ((type_def is not None) and
                  (type_def[0].lower() == active_table.lower())):
                # Put statement back so caller knows where we are
                statements.insert(0, statement)
                statements, ddt = parse_type_def(statements, type_def,
                                                 spec_name, pobj, logger)
                if ddt is None:
                    ctx = context_string(pobj, nodir=True)
                    msg = "No DDT found at '{}'{}"
                    raise CCPPError(msg.format(statement, ctx))
                # End if
                mheaders.append(ddt)
                if logger is not None:
                    ctx = context_string(pobj, nodir=True)
                    msg = 'Adding DDT {}{}'
                    logger.debug(msg.format(ddt.table_name, ctx))
                # End if
                active_table = None
            elif active_table is not None:
                # We should have a variable definition to add
                if ((not is_comment_statement(statement)) and
                    (not parse_use_statement(statement, logger)) and
                    (active_table.lower() == spec_name.lower())):
                    dvars = parse_fortran_var_decl(statement, psrc,
                                                   logger=logger)
                    for var in dvars:
                        var_dict.add_variable(var)
                    # End for
                # End if
            # End if (else we are not in an active table so just skip)
        # End while
        if inspec and (len(statements) == 0):
            statements = read_statements(pobj)
        # End if
    # End while
    return statements, mheaders