Esempio n. 1
0
def entry_print(ret_object):
    """ 打印单条记录
  通过读取全局变量(来自配置文件装载)
  利用apply机制,执行每个打印配置项的gain元素方法进行打印控制
  具体输出时,使用"string format".format(**dict)方法
  其中"string format"内容,由两部分组成,组成方式与标题打印方法中实现的一致
  """
    _entry = {}
    _full_entry = {}
    pf_fmt, pf_obj = apply(GainOption('patchset.files'), [ret_object])
    for i in _fmt:
        _entry[i[0]] = apply(GainOption(i[1]), [ret_object])
    for pf in pf_obj:
        _full_entry = dict(_entry, **pf)
        fmtstr = sjoin(map(lambda x: sjoin(['"{', x[0], '}"'], ''), _fmt), ',')
        fullfmtstr = fmtstr + ',' + sjoin(
            map(lambda x: sjoin(['"{', x, '}"'], ''), pf_fmt), ',')
        try:
            _f = open(OUTFILE, 'a')
            _f.write(fullfmtstr.format(**_full_entry))
            _f.write('\r\n')
            _f.flush()
        except IOError as e:
            logger.error('Output Entry Error')
            logger.error(e)
        finally:
            _f.close()
Esempio n. 2
0
 def _reviewsMinus(ret):
     _res = []
     if not (ret.get('currentPatchSet', None)
             and ret.get('currentPatchSet', None).get('approvals', None)):
         return None
     if not ret.get('comments', None):
         return sjoin(_res, '\n')
     _re = None
     for c in ret.get('comments', None):
         _re = re.compile('Code-Review\-2').search(c.get('message', None),
                                                   1)
         if _re:
             if c.get('reviewer', None).get('email', None):
                 if c.get('reviewer', None).get('email', None) in _ciowners:
                     continue
                 _res.append('' +
                             c.get('reviewer', None).get('email', None))
             else:
                 if c.get('reviewer',
                          None).get('username',
                                    None) in map(lambda x: x.split('@')[0],
                                                 _ciowners):
                     continue
                 _res.append('' +
                             c.get('reviewer', None).get('username', None))
     return sjoin(_res, '\n')
Esempio n. 3
0
def title_print(title):
    """ 标题打印,标题由两部分组成
  第一部分是配置文件装载的同时生成
  第二部分是根据文件列表现实样式,动态生成
  两部分合并后,生成新的标题。
  """
    _pft = _config['patchset']\
        ['files']\
        ['title']\
        [_config['patchset']['files']['using']]
    _k_pft = _pft.keys()
    _k_pft.sort()
    fulltitle = dict(title, **_pft)
    fmtstr = sjoin(map(lambda x: sjoin(['"{', x[0], '}"'], ''), _fmt), ',')
    fullfmtstr = fmtstr + ',' + sjoin(
        map(lambda x: sjoin(['"{', x, '}"'], ''), _k_pft), ',')
    try:
        _f = open(OUTFILE, 'w')
        _f.write(fullfmtstr.format(**fulltitle))
        _f.write('\r\n')
        _f.flush()
    except IOError:
        logger.error('Output Title Error')
        logger.error(e)
    finally:
        _f.close()
Esempio n. 4
0
def dates_info(cf=CONF):
    '''
  Configuration start_date and end_date in the form yyyymmdd
  '''
    start_date, e = get_conf(cf, 'DATES', 'start_date', type=str)
    end_date, e = get_conf(cf, 'DATES', 'end_date', type=str)
    start_date = sjoin(start_date.split('-'), '')
    end_date = sjoin(end_date.split('-'), '')
    return dateu.parse_date(start_date), dateu.parse_date(end_date)
Esempio n. 5
0
def dates_info(cf=CONF):
  '''
  Configuration start_date and end_date in the form yyyymmdd
  '''
  start_date,e = get_conf(cf,'DATES','start_date',type=str)
  end_date,e   = get_conf(cf,'DATES','end_date',type=str)
  start_date = sjoin(start_date.split('-'),'')
  end_date   = sjoin(end_date.split('-'),'')
  return dateu.parse_date(start_date),dateu.parse_date(end_date)
Esempio n. 6
0
def check_line(templet, tlist, mlist, output, opts):
    new_opts = copy.deepcopy(opts)
    check_mlist_with_key = filter(
        lambda x: x['msg'].find(templet['keywords']) >= 0, mlist)
    check_mlist_real = mlist if templet.get('keywords',
                                            '') == "" else check_mlist_with_key
    check_mlist = filter(lambda x: x['line'] in templet['rel_scope'],
                         check_mlist_real)
    new_opts.update({
        "key_repeated":
        len(check_mlist if templet.get('keywords', '') ==
            "" else check_mlist_with_key)
    })
    if len(templet['rel_scope']) > 1 and len(check_mlist) == 0:
        elog.error(
            str("{msg} [{ctg}] [{confidence}]" if debug_mode else "{msg}").
            format(msg="'{}' not found at context.".format(
                templet['keywords']),
                   ctg="scope/notfound",
                   confidence=7))
        return False
    pattern_check_result = filter(
        lambda x: x is not None,
        map(
            lambda x: check_line_with_pattern(x, templet, mlist, output,
                                              new_opts), check_mlist))
    if len(pattern_check_result) != 0:
        fields_output = list(
            set(
                reduce(
                    lambda x, y: x + y,
                    map(lambda x: x.get('field_output', ''),
                        pattern_check_result))))
        if len(fields_output) > 0:
            print sjoin(fields_output, ',')
    empty_templet = copy.deepcopy(templet)
    empty_templet.update({"message": {"line": -1}})
    check_result_list = map(
        lambda x: HandlerSetUp(x, output),
        list([empty_templet])
        if len(pattern_check_result) == 0 else pattern_check_result)
    # 某个模板,可能对应多行msg,所以这里要根据范围、是否强制再判断一下
    max_confidence = max(
        map(lambda x: int(x["check_result"]["confidence"]), check_result_list))
    error_result = filter(
        lambda x: int(x["check_result"]["confidence"]) == max_confidence,
        check_result_list)[0]
    if max_confidence <= 6: return True
    elog.error(
        str("{msg} [{ctg}] [{confidence}]" if debug_mode else "{msg}").format(
            **error_result["check_result"]))
    return False
Esempio n. 7
0
 def _sumup(ret):
     l = []
     for i in ret.get('submitRecords', None)[0]['labels']:
         if i.get('status', None) == 'NEED':
             l.append(i.get('label', None))
     if l:
         return 'Wait For:' + sjoin(l)
     d = []
     if ret.get('dependsOn', None):
         for i in ret.get('dependsOn', None):
             if i.get('isCurrentPatchSet', None):
                 d.append(i.get('number', None))
     if d:
         return 'Wait For:' + sjoin(d)
     return ret.get('status', None)
Esempio n. 8
0
def parse_locations(files):
  '''Reads locations file(s)
     locations file first line must be:
     #<label>, ex: #CITIES, #RIVERS
     Then each line should include <name>, <country>, <lon> <lat>
   '''

  if isinstance(files,basestring): files=[files]  

  locs=[]
  for c in files:
    f=open(c).readlines()
    for i in f:
      if i.strip().startswith('#'): continue
      tmp0=i.strip().split(',')
      name=tmp0[0].strip()

      tmp=tmp0[1].strip().split()
      country=tmp[1]

      for j in range(len(tmp)):
        if tmp[j].isdigit(): break

      country=sjoin(tmp[:j])

      lat=float(tmp[j])+float(tmp[j+1])/60.
      slat=tmp[j+2]
      if slat=='S': lat=-lat
      lon=float(tmp[j+3])+float(tmp[j+4])/60.
      slon=tmp[j+5]
      if slon=='W': lon=-lon

      locs+=[{'name': name,'country':country,'lon':lon,'lat':lat}]

  return locs 
Esempio n. 9
0
 def render_a2s(self, string):
     p = Popen(['php', '/usr/share/asciitosvg/a2s'],
               stdout=PIPE, stdin=PIPE)
                 # , stderr=STDOUT)
     # r = '<svg width="237px" height="45px" version="1.1">'
     r = sjoin(p.communicate(input=string)[0].split('\n')[4:], '\n')
     return r
Esempio n. 10
0
    def gen_log(f, L):
        i = open(f, 'w')
        keys = L.keys()
        keys.sort()
        for d in keys:
            scontents = sjoin(L[d], ' + ')
            i.write('%s %s\n' % (d, scontents))

        i.close()
Esempio n. 11
0
  def gen_log(f,L):
    i=open(f,'w')
    keys=L.keys()
    keys.sort()
    for d in keys:
      scontents=sjoin(L[d],' + ')
      i.write('%s %s\n' % (d,scontents))

    i.close()
Esempio n. 12
0
def query():
  cmd = []
  cmd.append("ssh {}".format("192.168.9.142"))
  cmd.append("-p {}".format("29448"))
  cmd.append("-l {}".format("lutx0528"))
  cmd.append("gerrit query --current-patch-set")
  cmd.append("--format={}".format("JSON"))
  cmd.append('"{}"'.format('826'))
  print sjoin(cmd)
  changes = []
  try:
    p = Popen(sjoin(cmd),shell=True,stdout=PIPE,stderr=PIPE)
    _out,_err = p.communicate()
    _ret_objects = filter(lambda x:x.get('project',None)
      ,map(lambda x:encodeJson(json.loads(x,"UTF-8")),_out.splitlines()))
    changes = map(lambda x:changeAttribute('change',x),_ret_objects)
  except Exception as e:
    print e
  return changes
Esempio n. 13
0
def do_query(querydict=None, limit=-1):
    _limit = limit
    if _limit == 0: return
    current_ret_len = -1
    current_sortkey = None
    total_ret_len = 0
    query_str = None
    try:
        _vcmd = ['ssh', '-p', _PORT, '-l', _USER, _HOST, 'gerrit', 'version']
        p = Popen(sjoin(_vcmd), shell=True, stdout=PIPE, stderr=PIPE)
        _out, _err = p.communicate()
        _vstr = _out.replace('\n', '').split(' ')[2]
        if _vstr not in _version:
            raise Exception("version {} not have handler".format(_vstr))
    except Exception as e:
        logger.error("do_query[0]{}".format(e))
    while (current_ret_len != 0 or total_ret_len == _limit):
        vf = _version.get(_vstr, None)
        query_str = sjoin(globals()[vf](querydict, total_ret_len,
                                        current_sortkey))
        try:
            p = Popen(query_str, shell=True, stdout=PIPE, stderr=PIPE)
            _out, _err = p.communicate()
            query_ret_list = _out.splitlines()
            query_ret_json = map(lambda x: json.loads(x, "UTF-8"),
                                 query_ret_list)
            ret_stat = filter(lambda x: x.get('rowCount', None),
                              query_ret_json)
            if len(ret_stat) == 0: return
            ret_objs = filter(lambda x: x.get('project', None), query_ret_json)
            map(entry_handler, ret_objs)
            current_ret_len = len(ret_objs)
            tmp_objs = ret_objs
            tmp_objs.reverse()
            tmp_obj = tmp_objs.pop(0)
            current_sortkey = tmp_obj.get('sortKey', None)
            if total_ret_len is None: total_ret_len = 0
            total_ret_len = total_ret_len + current_ret_len
        except Exception as e:
            logger.error("do_query[x]{}".format(e))
            break
Esempio n. 14
0
def query():
    cmd = []
    cmd.append("ssh {}".format("192.168.9.142"))
    cmd.append("-p {}".format("29448"))
    cmd.append("-l {}".format("?????"))
    cmd.append("gerrit query --current-patch-set")
    cmd.append("--format={}".format("JSON"))
    cmd.append('"{}"'.format('826'))
    print sjoin(cmd)
    changes = []
    try:
        p = Popen(sjoin(cmd), shell=True, stdout=PIPE, stderr=PIPE)
        _out, _err = p.communicate()
        _ret_objects = filter(
            lambda x: x.get('project', None),
            map(lambda x: encodeJson(json.loads(x, "UTF-8")),
                _out.splitlines()))
        changes = map(lambda x: changeAttribute('change', x), _ret_objects)
    except Exception as e:
        print e
    return changes
Esempio n. 15
0
def get_version_info():
    row={}
    row['os'] = _get_os()
    row['nvidia'] = _get_nvrm()
    row['cuda_device'] = _get_cudadev()
    row['cuda_toolkit'] = _get_cudatoolkit()
    row['gcc'] = _get_gcc()
    row['python'] = PYVERSION.split()[0]
    row['numpy'] = numpy_version.version
    row['pycuda'] = _get_pycuda()
    row['pytables'] = getPyTablesVersion()
    row['code_git'] = sjoin( _gitVersionDetector(), ':')
    return row
Esempio n. 16
0
def _test_genParams():
    from string import join as sjoin
    print ' -----------------------------------------------------------------------'
    print ' |   blk,thd:: |    bestBlk  |  px_per_blk |    bstThd   |  px_per_thd | '
    print ' -----------------------------------------------------------------------'
    for blocks in range(1,2049):
        for threads in range(1,1025):
            try:
                ps = genParameters(blocks, threads, [2000,1000], silent=True, useOrigChk=False)
                ps = (ps[0], ps[2], ps[1], ps[3])
                psOut = [ '%4d x %4d'%(arr[0], arr[1]) for arr in ps ]
                print ' |%4d, %4d:: | %s |'%(blocks, threads, sjoin(psOut, ' | '))
            except ValueError:
                pass
Esempio n. 17
0
def CheckProgram(ctxt,progname,varname,moredirs=[],critical=1):
  ctxt.Message("Checking for Program " + progname + "...")
  if ctxt.env[varname] is not None and exists(ctxt.env[varname]):
    ret = 1
  else:
    paths = sjoin(moredirs,':') + ':' + ctxt.env['ENV']['PATH'] 
    fname = ctxt.env.WhereIs(progname,paths)
    ret = fname != None
    if ret:
      ctxt.env[varname] = fname
    else:
      ctxt.env[varname] = None
  ctxt.Result(ret)
  if critical and not ret:
    print "Required Program '" + progname + "' is missing."
    ctxt.env.Exit(1)
  return ret
Esempio n. 18
0
    def __config2cp(self):
        config = ConfigParser.RawConfigParser(dict_type=OrderedDict)

        keys = self.config.keys()
        keys.sort()
        print keys
        for k in keys:
            if k.find('.') > 0:
                tmp = k.split('.')
                section = tmp[0]
                name = sjoin(tmp[1:], '')
            else:
                section = 'unknown'
                name = k

            if not config.has_section(section):
                config.add_section(section)
                print 'adding sec ', section
            config.set(section, name, self.config[k])

        self.cp = config
Esempio n. 19
0
  def __config2cp(self):
    config=ConfigParser.RawConfigParser(dict_type=OrderedDict)


    keys=self.config.keys()
    keys.sort()
    for k in keys:
      if k.find('.')>0:
         tmp=k.split('.')
         section=tmp[0]
         name=sjoin(tmp[1:],'')
      else:
         section='unknown'
         name=k

      if not config.has_section(section):
        config.add_section(section)
        #print 'adding sec ',section
      config.set(section,name,self.config[k])

    self.cp=config
Esempio n. 20
0
def use(filename,varname,interface='auto',**kargs):
  nc,close=__open(filename,interface)

  if varname not in nc.varnames: return

  v=nc.vars[varname]
  shape=v.shape()

  if not shape:
    try: return v[:][0] # may be needed for dtype='|S1'
    except: return v[:]


  d=v.dims
  dimStr=[':' for i in d.keys()]
  for k in kargs.keys():
    dval=kargs[k]

    if k.isdigit(): k=d.keys()[int(k)] # allow use dim indice
    elif k.startswith('SEARCH'):  # allow *dimname or dimname*
      kk=k[len('SEARCH'):]
      for dn in nc.vars[varname].dimnames:
        if dn.find(kk)>=0:
          k=dn
          break
    elif k.endswith('SEARCH'):
      kk=k[:-len('SEARCH')]
      for dn in nc.vars[varname].dimnames:
        if dn.find(kk)>=0:
          k=dn
          break

    if k in d.keys():
      i=d.index(k)
      if isinstance(dval,basestring):
        dimStr[i]=dval
      elif isinstance(dval,int):
        if dval<0: dval=shape[i]+dval
        if  dval>shape[i]-1:
          print ':: max allowed '+k+' = '+str(shape[i]-1)
          return
        else:
          dimStr[i]=str(dval)
      elif calc.isiterable(dval):
        exec(k+'_val=dval')
        dimStr[i]=k+'_val'
      else:
        dimStr[i]=str(dval)

  cmd='res=v['+sjoin(dimStr,',')+']'
  exec(cmd)

  if interface in ('pycdf','scientific'):
    # about missing value:
    miss=False
    if   '_FillValue'    in v.attnames: miss = v.atts['_FillValue']['value']
    elif 'missing_value' in v.attnames: miss = v.atts['missing_value']['value']
    maskMissing=kargs.get('maskMissing',True)
    if not miss is False and maskMissing and v.nctype()!='STRING':
      res=np.ma.masked_where(res==miss,res)

    ## ensure strings have no mask:
    #if v.nctype()=='STRING' and np.ma.isMA(res):
    #  res=np.array(res)


    # about scale and offset:
    if v.nctype()!='STRING':
      scale=1
      offset=0
      if 'scale_factor' in v.attnames: scale=v.atts['scale_factor']['value']
      if 'add_offset'   in v.attnames: offset=v.atts['add_offset']['value']
      if (scale,offset)!=(1,0): res = res*scale + offset

  if close: nc.close()

  if 1 in res.shape and res.ndim>1: res=np.squeeze(res)

  # mask nan
  maskNaN=kargs.get('maskNaN',True)
  if maskNaN and not res.dtype.type==np.string_ and not np.ma.isMA(res) and np.any(np.isnan(res)):
    res=np.ma.masked_where(np.isnan(res),res)

  return res
Esempio n. 21
0
  def load(self):
    if not self.logfile:
      self.rout=False
      return False

    f=open(self.logfile)
    L=f.readlines()

    # tend and dt only written at the end of model run, so give some
    # invalid values:
    self.tend = -1
    self.tend_str = 'not ended'
    self.dt   = -1

    for i in range(len(L)-1):
      k=L[i].strip()
      val=L[i+1].strip()
      if k.find('[job/process id]')==0: self.pid      = val
      elif k.find('[queue/local]')==0:  self.job_type = val
#     elif k.find('[date]')==0:         self.date     = val
#     elif k.find('[FA]')==0:           self.FA       = val
      elif k.find('[tstart]')==0:       self.tstart,self.tstart_str  = int(val.split()[0]),sjoin(val.split()[1:])
      elif k.find('[tend]')==0:         self.tend,self.tend_str  = int(val.split()[0]),sjoin(val.split()[1:])
      elif k.find('[dt (min)]')==0:     self.dt=float(val)

    self.rout=opt.nameof('out','rout',date=self.date,FA=self.FA,cf=self.conf)

    return True
Esempio n. 22
0
def event_design(event_spec, t, order=2, hrfs=[glover]):
    """
    Create a design matrix for a GLM analysis based
    on an event specification, evaluating
    it a sequence of time values. Each column
    in the design matrix will be convolved with each HRF in hrfs.

    Parameters
    ----------

    event_spec : np.recarray
        A recarray having at least a field named 'time' signifying
        the event time, and all other fields will be treated as factors
        in an ANOVA-type model.

    t : np.ndarray
        An array of np.float values at which to evaluate
        the design. Common examples would be the acquisition
        times of an fMRI image.

    order : int
        The highest order interaction to be considered in
        constructing the contrast matrices.

    hrfs : seq
        A sequence of (symbolic) HRF that will be convolved
        with each event. If empty, glover is used.

    Outputs 
    -------
    
    X : np.ndarray
        The design matrix with X.shape[0] == t.shape[0]. The number
        of columns will depend on the other fields of event_spec.

    contrasts : dict
        Dictionary of contrasts that is expected to be of interest
        from the event specification. For each interaction / effect
        up to a given order will be returned. Also, a contrast
        is generated for each interaction / effect for each HRF
        specified in hrfs.
    
    """

    fields = list(event_spec.dtype.names)
    if 'time' not in fields:
        raise ValueError('expecting a field called "time"')

    fields.pop(fields.index('time'))
    e_factors = [formula.Factor(n, np.unique(event_spec[n])) for n in fields]
    
    e_formula = np.product(e_factors)

    e_contrasts = {}
    if len(e_factors) > 1:
        for i in range(1, order+1):
            for comb in combinations(zip(fields, e_factors), i):
                names = [c[0] for c in comb]
                fs = [c[1].main_effect for c in comb]
                e_contrasts[sjoin(names, ':')] = np.product(fs).design(event_spec)

    e_contrasts['constant'] = formula.I.design(event_spec)

    # Design and contrasts in event space
    # TODO: make it so I don't have to call design twice here
    # to get both the contrasts and the e_X matrix as a recarray

    e_X = e_formula.design(event_spec)
    e_dtype = e_formula.dtype

    # Now construct the design in time space

    t_terms = []
    t_contrasts = {}
    for l, h in enumerate(hrfs):
        t_terms += [events(event_spec['time'], \
            amplitudes=e_X[n], f=h) for i, n in enumerate(e_dtype.names)]
        for n, c in e_contrasts.items():
            t_contrasts["%s_%d" % (n, l)] = formula.Formula([ \
                 events(event_spec['time'], amplitudes=c[nn], f=h) for i, nn in enumerate(c.dtype.names)])
    t_formula = formula.Formula(t_terms)
    
    tval = formula.make_recarray(t, ['t'])
    X_t, c_t = t_formula.design(tval, contrasts=t_contrasts)
    return X_t, c_t
Esempio n. 23
0
def _get_os():
    _os = _doItLineNo('uname -a', 0).split()
    _os = sjoin( _os[2:5], ':') + ":" + _os[11]
    return _os
Esempio n. 24
0
def _get_cudadev():
    dev = _doItLineNo('cat /proc/driver/nvidia/gpus/0/information ', 0)
    return sjoin(  dev.split()[1:], ' ')
Esempio n. 25
0
File: netcdf.py Progetto: jcmt/okean
def use(filename, varname, interface="auto", **kargs):
    nc, close = __open(filename, interface)

    if varname not in nc.varnames:
        return

    v = nc.vars[varname]
    shape = v.shape()

    if not shape:
        try:
            return v[:][0]  # may be needed for dtype='|S1'
        except:
            return v[:]

    d = v.dims
    dimStr = [":" for i in d.keys()]
    for k in kargs.keys():
        dval = kargs[k]

        if k.isdigit():
            k = d.keys()[int(k)]  # allow use dim indice
        elif k.startswith("SEARCH"):  # allow *dimname or dimname*
            kk = k[len("SEARCH") :]
            for dn in nc.vars[varname].dimnames:
                if dn.find(kk) >= 0:
                    k = dn
                    break
        elif k.endswith("SEARCH"):
            kk = k[: -len("SEARCH")]
            for dn in nc.vars[varname].dimnames:
                if dn.find(kk) >= 0:
                    k = dn
                    break

        if k in d.keys():
            i = d.index(k)
            if isinstance(dval, basestring):
                dimStr[i] = dval
            elif isinstance(dval, int):
                if dval < 0:
                    dval = shape[i] + dval
                if dval > shape[i] - 1:
                    print ":: max allowed " + k + " = " + str(shape[i] - 1)
                    return
                else:
                    dimStr[i] = str(dval)
            elif calc.isiterable(dval):
                exec (k + "_val=dval")
                dimStr[i] = k + "_val"
            else:
                dimStr[i] = str(dval)

    cmd = "res=v[" + sjoin(dimStr, ",") + "]"
    exec (cmd)

    if interface in ("pycdf", "scientific"):
        # about missing value:
        miss = False
        if "_FillValue" in v.attnames:
            miss = v.atts["_FillValue"]["value"]
        elif "missing_value" in v.attnames:
            miss = v.atts["missing_value"]["value"]
        maskMissing = kargs.get("maskMissing", True)
        if not miss is False and maskMissing and v.nctype() != "STRING":
            res = np.ma.masked_where(res == miss, res)

        ## ensure strings have no mask:
        # if v.nctype()=='STRING' and np.ma.isMA(res):
        #  res=np.array(res)

        # about scale and offset:
        if v.nctype() != "STRING":
            scale = 1
            offset = 0
            if "scale_factor" in v.attnames:
                scale = v.atts["scale_factor"]["value"]
            if "add_offset" in v.attnames:
                offset = v.atts["add_offset"]["value"]
            if (scale, offset) != (1, 0):
                res = res * scale + offset

    if close:
        nc.close()

    if 1 in res.shape and res.ndim > 1:
        res = np.squeeze(res)

    # mask nan
    maskNaN = kargs.get("maskNaN", True)
    if maskNaN and not res.dtype.type == np.string_ and not np.ma.isMA(res) and np.any(np.isnan(res)):
        res = np.ma.masked_where(np.isnan(res), res)

    return res