Exemplo n.º 1
1
    def run(self):  
        # initialize windows side   
        tosdb.admin_init(self._addr, AINIT_TIMEOUT)   
        tosdb.vinit(root=self._dll_root)

        # create block
        blk = tosdb.VTOSDB_DataBlock(self._addr, BLOCK_SIZE, date_time=True)
        blk.add_items(*(self._symbols))
        blk.add_topics('last')
        if args.vol:
            blk.add_topics('volume')
     
        # generate filename             
        dprfx = _strftime("%Y%m%d", _localtime())
        isec = int(self._intrvl * 60)  
        for s in self._symbols:
            #
            # create GetOnTimeInterval object for each symbol
            # 
            p = self._out_dir + '/' + dprfx + '_' \
                + s.replace('/','-S-').replace('$','-D-').replace('.','-P-') \
                +'_' + _val_type + '_' + str(self._intrvl) + 'min.tosdb'           
            iobj = _Goti.send_to_file(blk, s, p, _TI.vals[isec], isec/10)
            print(repr(iobj), file=_stderr)
            self._iobjs.append(iobj)

        for i in self._iobjs:
            if i:
                i.join()

        while(True): # why??
            _sleep(10)
Exemplo n.º 2
0
 def __init__(self, addr, auth, outdir, pidfile, errorfile, interval, itype,
              symbols):
     _Daemon.__init__(self, pidfile, stderr=errorfile)
     self._addr = addr
     self._auth = auth
     self._outdir = _path(outdir)
     self._interval = interval
     self._has_vol = 'V' in itype
     self._is_ohlc = 'OHLC' in itype
     self._symbols = symbols
     self._iobj = None
     # date prefix for filename
     dprfx = _strftime("%Y%m%d", _localtime())
     # generate paths from filenames
     self._paths = {s.upper() : (_path(self._outdir) + '/' + dprfx + '_' \
                                + s.replace('/','-S-').replace('$','-D-').replace('.','-P-') \
                                + '_' + itype + '_' + str(self._interval) + 'sec.tosdb') \
                                  for s in self._symbols}
     # create callback object
     if self._has_vol:
         self._callback = _ohlcv_callbacks._Matcher(
             'ohlc' if self._is_ohlc else 'c', self._write)
     else:
         l = (lambda o: str((o.o, o.h, o.l, o.c))) if self._is_ohlc else (
             lambda o: str(o.c))
         self._callback = _ohlcv_callbacks._Basic(l, self._write)
Exemplo n.º 3
0
def spawn(dllroot,outdir,intrvl,val_type,*symbols):   
    if val_type not in ['OHLCV','OHLC','CV','C']:
        raise ValueError("invalid val_type (OHLCV,OHLC,CV or C)")

    exc_cmd = "from tosdb.intervalize import " + CLS_BASE + val_type + " as _Goti"
    exec(exc_cmd, globals())
    
    tosdb.init(root=dllroot)
    
    # create block
    blk = tosdb.TOSDB_DataBlock(BLOCK_SIZE, date_time=True)
    blk.add_items(*(symbols))
    blk.add_topics('last')
    if 'V' in val_type:
        blk.add_topics('volume')
        
    # generate filename          
    dprfx = _strftime("%Y%m%d", _localtime())
    isec = int(intrvl * 60)  
    iobjs = list()

    for s in symbols:
        #
        # create GetOnTimeInterval object for each symbol
        # 
        p = _path(outdir) + '/' + dprfx + '_' \
            + s.replace('/','-S-').replace('$','-D-').replace('.','-P-') \
            + '_' + val_type + '_' + str(intrvl) + 'min.tosdb'       
        iobj = _Goti.send_to_file(blk, s, p, getattr(_TI,_TI.val_dict[isec]), isec/10)
        print( repr(iobj) )
        iobjs.append( iobj )
    
    return iobjs
Exemplo n.º 4
0
def spawn(dllroot,outdir,intrvl,val_type,*symbols):
   
    if val_type not in ['OHLCV','OHLC','CV','C']:
         raise ValueError("invalid val_type (OHLCV,OHLC,CV or C)")

    exec( "from tosdb.intervalize import " + CLS_BASE + val_type + " as _Goti",
          globals() )
    
    tosdb.init( root=dllroot )
    
    # create block
    blk = tosdb.TOSDB_DataBlock( BLOCK_SIZE, date_time=True)
    blk.add_items( *(symbols) )
    blk.add_topics( 'last' )
    if 'V' in val_type:
        blk.add_topics( 'volume' )
        
    # generate filename                
    dprfx = _strftime("%Y%m%d", _localtime())
    isec = int(intrvl * 60)    
    iobjs = list()
    for s in symbols:
        #
        # create GetOnTimeInterval object for each symbol
        # 
        p = _path(outdir) + '/' + dprfx + '_' + \
                s.replace('/','-S-').replace('$','-D-').replace('.','-P-') + \
                '_' + val_type + '_' + str(intrvl) + 'min.tosdb'           
        iobj = _Goti.send_to_file( blk, s, p, getattr(_TI,_TI.val_dict[ isec ]),
                                   isec/10)
        print( repr(iobj) )
        iobjs.append( iobj )
    
    return iobjs
Exemplo n.º 5
0
def log_conn(msg, addr, file=_stdout, **info):
    t = _strftime("%m/%d/%Y %H:%M:%S")
    msg = msg[:40].ljust(40)  
    s = "+" if file == _stdout else "-"
    print(s, t, " " + s + " ", msg, str(addr), file=file)
    for k in info:
        print('    ', k + ":", str(info[k]), file=file)
Exemplo n.º 6
0
    def run(self):
        # initialize windows side
        tosdb.admin_init(self._addr, AINIT_TIMEOUT)
        tosdb.vinit(root=self._dll_root)

        # create block
        blk = tosdb.VTOSDB_DataBlock(self._addr, BLOCK_SIZE, date_time=True)
        blk.add_items(*(self._symbols))
        blk.add_topics('last')
        if args.vol:
            blk.add_topics('volume')

        # generate filename
        dprfx = _strftime("%Y%m%d", _localtime())
        isec = int(self._intrvl * 60)
        for s in self._symbols:
            #
            # create GetOnTimeInterval object for each symbol
            #
            p = self._out_dir + '/' + dprfx + '_' + \
                    s.replace('/','-S-').replace('$','-D-').replace('.','-P-') + \
                    '_' + _val_type + '_' + str(self._intrvl) + 'min.tosdb'
            iobj = _Goti.send_to_file(blk, s, p, _TI.vals[isec], isec / 10)
            print(repr(iobj), file=_stderr)
            self._iobjs.append(iobj)

        for i in self._iobjs:
            if i:
                i.join()

        while (True):
            _sleep(10)
Exemplo n.º 7
0
def edit_file(time: tuple, directory=_todo_conf.get_directory()):
    filepath = _path.join(directory, ''.join((_strftime('%Y_%m_%d',
                                                        time), '.md')))
    try:
        create_file(time, directory)
    except FileExistsError:
        pass
    _call(['editor', filepath])
Exemplo n.º 8
0
 def addLog(self):
     argvs = _sys.argv
     command = "%s " % _strftime("%B-%d-%Y %H:%M", _gmtime())
     for ii in range(len(argvs)):
         command = command + " " + argvs[ii]
     # end for
     self.comments.append("   ")
     self.comments.append(command)
Exemplo n.º 9
0
def encrypt(in_file,
            passwd='',
            cipher='seed',
            iter=0,
            out_file=_strftime('enc_%y%m%d_%H%M%S'),
            pbkdf2=False,
            b64=False):

    # eliminate

    if (not _isfile(in_file)):
        return {"status": "negative", "error": f"file not found: {in_file}"}

    if (not str(iter).isdigit() and not iter >= 0):
        return {"status": "negative", "error": "Iter: Invalied Itration"}

    if (cipher not in CIPHERS):
        return {"status": "negative", "error": "Cipher: Invalied Cipher"}

    if (len(out_file) == 0):
        return {"status": "negative", "error": "OutFile: Nofile given"}

    # Proceed

    const_string = f'openssl {cipher} -in {in_file} -out {out_file} -k "{passwd}"'

    if (iter > 0):
        const_string += f' -iter {str(iter)}'

    if (pbkdf2):
        const_string += ' -pbkdf2'

    if (b64):
        const_string += ' -a'

    # exec

    _system(const_string)

    return {
        "status": "positive",
        "cipher": cipher,
        "in_file": in_file,
        "out_file": out_file,
        "iter": iter,
        "passwd": "*" * len(passwd),
        "pbkdf2": pbkdf2,
        "base64": b64
    }
Exemplo n.º 10
0
    def convert2to1(self, source, idx=1, idz=[1]):
        idy = 2
        if idx == 2:
            idy = 1
        # end if
        nx = source.DimSize[idx - 1]
        ny = source.DimSize[idy - 1]
        nz = len(idz)
        self.new(dimno=1, valno=ny * nz, dimsize=[nx])
        self.Name = source.Name
        self.ShotNo = source.ShotNo
        self.SubNo = source.SubNo
        self.Date = _strftime("%m/%d/%Y %H:%M", _gmtime())
        self.DimName = [source.DimName[idx - 1]]
        self.DimUnit = [source.DimUnit[idx - 1]]
        self.ValName = []
        self.ValUnit = []
        self.comments = source.comments
        if idx == 1:
            xx = source.getDimData(dimid=idx - 1)
            yy = source.getDimData(dimid=idy - 1)
            for ii in range(nz):
                self.data[:, ii + 1::nz] = source.getValData(valid=idz[ii] - 1)
            # end for
        else:
            xx = source.getDimData(dimid=idx).transpose()
            yy = source.getDimData(dimid=idy).transpose()
            for ii in range(nz):
                self.data[:, ii + 1::nz] = source.getValData(valid=idz[ii] -
                                                             1).transpose()
            # end for
        # end if

        self.data[:, 0] = xx[:, 0]
        for j in range(ny):
            for ii in range(nz):
                valname = source.ValName[idz[ii] - 1] + "@%.6E" % yy[
                    0, j] + "(%s)" % source.DimUnit[idy - 1]
                self.ValName.append(valname)
                self.ValUnit.append(source.ValUnit[idz[ii] - 1])
Exemplo n.º 11
0
 def __init__(self, addr, auth, outdir, pidfile, errorfile, interval, itype, symbols):
     _Daemon.__init__(self, pidfile, stderr = errorfile)
     self._addr = addr    
     self._auth = auth
     self._outdir = _path(outdir)
     self._interval = interval
     self._has_vol = 'V' in itype
     self._is_ohlc = 'OHLC' in itype
     self._symbols = symbols   
     self._iobj = None     
     # date prefix for filename          
     dprfx = _strftime("%Y%m%d", _localtime())
     # generate paths from filenames
     self._paths = {s.upper() : (_path(self._outdir) + '/' + dprfx + '_' \
                                + s.replace('/','-S-').replace('$','-D-').replace('.','-P-') \
                                + '_' + itype + '_' + str(self._interval) + 'sec.tosdb') \
                                  for s in self._symbols}
     # create callback object
     if self._has_vol:            
         self._callback = _ohlcv_callbacks._Matcher('ohlc' if self._is_ohlc else 'c', self._write)
     else:
         l = (lambda o: str((o.o, o.h, o.l, o.c))) if self._is_ohlc else (lambda o: str(o.c))
         self._callback = _ohlcv_callbacks._Basic(l, self._write)       
Exemplo n.º 12
0
    def convert1to2(self,
                    source,
                    ydata,
                    yname="",
                    yunit="",
                    zname="",
                    zunit=""):
        self.new(dimno=2, valno=1, dimsize=[source.DimSize[0], source.ValNo])
        self.Name = source.Name
        self.ShotNo = source.ShotNo
        self.SubNo = source.SubNo
        self.Date = _strftime("%m/%d/%Y %H:%M", _gmtime())
        self.DimName.append(source.DimName)
        self.DimName.append(yname)
        self.DimUnit.append(source.DimUnit)
        self.DimUnit.append(yunit)
        self.ValName.append(zname)
        self.ValUnit.append(zunit)
        self.comments = source.comments

        xx, yy = _np.meshgrid(source.data[:, 0], ydata)
        self.data[:, 0] = xx.reshape(_np.prod(self.DimSize), order='F')
        self.data[:, 1] = yy.reshape(_np.prod(self.DimSize), order='F')
        self.data[:, 2] = source.data[:, 1:].reshape(_np.prod(self.DimSize))
Exemplo n.º 13
0
def spawn(outdir, interval, is_ohlc, has_vol, *symbols):
    global _paths

    itype = ("OHLC" if is_ohlc else "C") + ("V" if has_vol else "")    
    # date prefix for filename                    
    dprfx = _strftime("%Y%m%d", _localtime())
    # generate paths from filenames
    _paths = {s.upper() : (_path(outdir) + '/' + dprfx + '_' \
                          + s.replace('/','-S-').replace('$','-D-').replace('.','-P-') \
                          + '_' + itype + '_' + str(interval) + 'sec.tosdb')
                          for s in symbols}  

    # create callback object
    if has_vol:
        callback = _ohlcv_callbacks._Matcher('ohlc' if is_ohlc else 'c', _write)
    else:
        l = (lambda o: str((o.o, o.h, o.l, o.c))) if is_ohlc else (lambda o: str(o.c))
        callback = _ohlcv_callbacks._Basic(l, _write)   
  
    # create block
    blk = tosdb.TOSDB_ThreadSafeDataBlock(BLOCK_SIZE, date_time=True)
    blk.add_items(*(symbols))
    blk.add_topics('last')        
    if has_vol:
        blk.add_topics('volume')  
     
    # create interval object
    IObj = OHLCIntervals if is_ohlc else CIntervals
    iobj = IObj(blk, interval, interval_cb=callback.callback)
    try:
        while iobj.running():
            _sleep(1)
    except:
        iobj.stop()
    finally:
        blk.close()     
Exemplo n.º 14
0
 def strftime(self, frmt):
     return _strftime(frmt,
                      self._tfunc(self._intervals_since_epoch * self._isec))
Exemplo n.º 15
0
 def __str__( self ):
     return _strftime( "%m/%d/%y %H:%M:%S", 
                       _localtime( self._mktime ) ) + " " + str(self.micro)
Exemplo n.º 16
0
def create_file(time: tuple, directory=_todo_conf.get_directory()):
    filepath = _path.join(directory, ''.join((_strftime('%Y_%m_%d',
                                                        time), '.md')))
    with open(filepath, mode='x') as f:
        f.write(''.join(('# Séance du ', _strftime('%Y-%m-%d', time))))
Exemplo n.º 17
0
def commit_all(time=None, directory=_todo_conf.get_directory()):
    if time is not None:
        filepath = ''.join((_strftime('%Y_%m_%d', time), '.md'))
        _call(['git', '-C', directory, 'add', filepath])
    _call(['git', '-C', directory, 'commit', '-a'])
Exemplo n.º 18
0
	def time(self):
		from time import strftime as _strftime
		return _strftime("%H:%M:%S")
Exemplo n.º 19
0
 def strftime(format, t=None):
     return py2_decode(_strftime(py2_encode(format)))
Exemplo n.º 20
0
 def __str__(self):
     ftime = _strftime("%m/%d/%y %H:%M:%S", _localtime(self._mktime))
     return ftime + " " + str(self.micro)
Exemplo n.º 21
0
Arquivo: lhd_io.py Projeto: gmweir/IO
    def writeHeader(self, filename=None):
        self.Date = _strftime("%m/%d/%Y %H:%M", _gmtime())
        for ii in range(self.DimNo):
            if ii == 0:
                dimname = "'%s'" % self.DimName[ii]
                dimsize = "%d" % self.DimSize[ii]
                dimunit = "'%s'" % self.DimUnit[ii]
            else:
                dimname = dimname + ", '%s'" % self.DimName[ii]
                dimsize = dimsize + ", %d" % self.DimSize[ii]
                dimunit = dimunit + ", '%s'" % self.DimUnit[ii]
            # end if
        # end for range dimensions

        for ii in range(self.ValNo):
            if ii == 0:
                valname = "'%s'" % self.ValName[ii]
                valunit = "'%s'" % self.ValUnit[ii]
            else:
                valname = valname + ", '%s'" % self.ValName[ii]
                valunit = valunit + ", '%s'" % self.ValUnit[ii]
            # end if
        # end for in range value number

        if filename is None:
            """printing to stdout"""
            print("# [Parameters]")
            print("# Name = '%s'" % self.Name)
            print("# ShotNo = %d" % self.ShotNo)
            print("# SubNo = %d" % self.SubNo)
            print("# Date = '%s'" % self.Date)
            print("#")
            print("# DimNo = %d" % self.DimNo)
            print("# DimName = %s" % dimname)
            print("# DimSize = %s" % dimsize)
            print("# DimUnit = %s" % dimunit)
            print("#")
            print("# ValNo = %d" % self.ValNo)
            print("# ValName = %s" % valname)
            print("# ValUnit = %s" % valunit)
            print("#")
            print("# [Comments]")
            for line in self.comments:
                print("# %s" % line)
            print("#")
            print("# [Data]")
        else:
            """ printting into a file """
            try:
                with open(filename, 'w') as fp:
                    print("# [Parameters]", file=fp)
                    print("# Name = '%s'" % self.Name, file=fp)
                    print("# ShotNo = %d" % self.ShotNo, file=fp)
                    print("# SubNo = %d" % self.SubNo, file=fp)
                    print("# Date = '%s'" % self.Date, file=fp)
                    print("#", file=fp)
                    print("# DimNo = %d" % self.DimNo, file=fp)
                    print("# DimName = %s" % dimname, file=fp)
                    print("# DimSize = %s" % dimsize, file=fp)
                    print("# DimUnit = %s" % dimunit, file=fp)
                    print("#", file=fp)
                    print("# ValNo = %d" % self.ValNo, file=fp)
                    print("# ValName = %s" % valname, file=fp)
                    print("# ValUnit = %s" % valunit, file=fp)
                    print("#", file=fp)
                    print("# [Comments]", file=fp)
                    for line in self.comments:
                        print("# %s" % line, file=fp)
                    print("# filename = %s (written by egDataFormatIO)" %
                          filename,
                          file=fp)
                    print("#", file=fp)
                    print("# [Data]", file=fp)
                # end with open filename
            except:
                """ python2: when the with_statement fails """
                fp = open(filename, 'w')
                print("# [Parameters]", file=fp)
                print("# Name = '%s'" % self.Name, file=fp)
                print("# ShotNo = %d" % self.ShotNo, file=fp)
                print("# SubNo = %d" % self.SubNo, file=fp)
                print("# Date = '%s'" % self.Date, file=fp)
                print("#", file=fp)
                print("# DimNo = %d" % self.DimNo, file=fp)
                print("# DimName = %s" % dimname, file=fp)
                print("# DimSize = %s" % dimsize, file=fp)
                print("# DimUnit = %s" % dimunit, file=fp)
                print("#", file=fp)
                print("# ValNo = %d" % self.ValNo, file=fp)
                print("# ValName = %s" % valname, file=fp)
                print("# ValUnit = %s" % valunit, file=fp)
                print("#", file=fp)
                print("# [Comments]", file=fp)
                for line in self.comments:
                    print("# %s" % line, file=fp)
                print("# filename = %s (written by egDataFormatIO)" % filename,
                      file=fp)
                print("#", file=fp)
                print("# [Data]", file=fp)
                fp.close()
            finally:
                try:
                    fp.close()
                except:
                    pass
Exemplo n.º 22
0
 def strftime(self, frmt):
     return _strftime(frmt,self._tfunc(self._intervals_since_epoch * self._isec))
Exemplo n.º 23
0
 def strftime(format, t=TEST_NOW):
     return _strftime(format, t)
Exemplo n.º 24
0
def x2dspec(x2dfile, traceloc='max', extrsize='stsci', bksize='stsci', bkoff='stsci', x1dfile=None, fitsout=None,
            overwrite=True, bkmask=0):
    """
    Creates a spectrum from HST STIS (or maybe also COS?) data from HST using the x2d file provided by the default 
    STScI pipeline.

    Parameters
    ----------
    x2dfile : str
        Path of the x2d file.
    traceloc : {int|'max'|'lya'}, optional
        Location of the spectral trace.
        int : the midpoint pixel
        'max' : use the mean y-location of the pixel with highest S/N
    extrsize, bksize, bkoff : {int|'stsci'}, optional
        The height of the signal extraction region, the height of the
        background extraction regions, and the offset above and below the
        spectral trace at which to center the background extraction regions.
        'stsci' : use the value used by STScI in making the x1d (requires
            x1dfile)
        int : user specified value in pixels
    x1dfile : str, optional if 'stsci' is not specfied for any other keyword
        Path of the x1d file.
    fitsout : str, optional
        Path for saving a FITS file version of the spectrum.
    overwrite : {True|False}, optional
        Whether to overwrite the existing FITS file.
    bkmask : int, optional
        Data quality flags to mask the background. Background pixels that have
        at least one of these flags will be discarded.

    Returns
    -------
    spectbl : astropy table
        The wavelength, flux, error, and data quality flag values of the extracted
        spectrum.

    Cautions
    --------
    Using a non-stsci extraction size will cause a systematic error because a
    flux correction factor is applied that assumes the STScI extraction
    ribbon was used.

    This still isn't as good as an x1d, mainly because the wavelength dependency
    of the slit losses is not accounted for.
    """

    x2d = _fits.open(x2dfile)

    # get the flux and error from the x2d
    f, e, q = x2d['sci'].data, x2d['err'].data, x2d['dq'].data

    inst = x2d[0].header['instrume']
    if inst != 'STIS':
        raise NotImplementedError("This function cannot handle {} data at "
                                  "present.".format(inst))

    # make sure x1d is available if 'stsci' is specified for anything
    if 'stsci' in [traceloc, extrsize, bksize, bkoff]:
        try:
            x1d = _fits.open(x1dfile)
            xd = x1d[1].data
        except:
            raise ValueError("An open x1d file is needed if 'stsci' is "
                             "specified for any of the keywords.")

    # get the ribbon values
    if extrsize == 'stsci': extrsize = _np.mean(xd['extrsize'])
    if bksize == 'stsci': bksize = _np.mean([xd['bk1size'], xd['bk2size']])
    if bkoff == 'stsci':
        bkoff = _np.mean(_np.abs([xd['bk1offst'], xd['bk2offst']]))

    # select the trace location
    if traceloc == 'max':
        sn = f / e
        sn[q > 0] = 0.0
        sn[e <= 0.0] = 0.0
        maxpixel = _np.nanargmax(sn)
        traceloc = _np.unravel_index(maxpixel, f.shape)[0]
    if traceloc == 'lya':
        xmx = _np.nanmedian(_np.argmax(f, 1))
        redsum = _np.nansum(f[:, xmx+4:xmx+14], 1)
        smoothsum = data_structures._smooth_sum(redsum, extrsize) / float(extrsize)
        traceloc = _np.argmax(smoothsum) + extrsize/2

    # convert everything to integers so we can make slices
    try:
        intrnd = lambda x: int(round(x))
        traceloc, extrsize, bksize, bkoff = map(intrnd, [traceloc, extrsize, bksize, bkoff])
    except ValueError:
        raise ValueError("Invalid input for either traceloc, extrsize, bksize, "
                         "or bkoff. See docstring.")

    # convert intensity to flux
    fluxfac = x2d['sci'].header['diff2pt']
    f, e = f * fluxfac, e * fluxfac

    # get slices for the ribbons
    sigslice = slice(traceloc - extrsize // 2, traceloc + extrsize // 2 + 1)
    bk0slice = slice(traceloc - bkoff - bksize // 2, traceloc - bkoff + bksize // 2 + 1)
    bk1slice = slice(traceloc + bkoff - bksize // 2, traceloc + bkoff + bksize // 2 + 1)
    slices = [sigslice, bk0slice, bk1slice]

    # mask bad values in background regions
    if bkmask:
        badpix = (q & bkmask) > 0
        badpix[sigslice] = False  # but don't modify the signal region
        f[badpix], e[badpix], q[badpix] = 0.0, 0.0, 0
        # make a background area vector to account for masked pixels
        goodpix = ~badpix
        bkareas = [_np.sum(goodpix[slc, :], 0) for slc in slices[1:]]
        bkarea = sum(bkareas)
    else:
        bkarea = bksize * 2

    # sum fluxes in each ribbon
    fsig, fbk0, fbk1 = [_np.sum(f[slc, :], 0) for slc in slices]

    # sum errors in each ribbon
    esig, ebk0, ebk1 = [_np.sqrt(_np.sum(e[slc, :]**2, 0)) for slc in slices]

    # condense dq flags in each ribbon
    bitor = lambda a: reduce(lambda x, y: x | y, a)
    qsig, qbk0, qbk1 = [bitor(q[slc, :]) for slc in slices]

    # subtract the background
    area_ratio = float(extrsize) / bkarea
    f1d = fsig - area_ratio * (fbk0 + fbk1)
    e1d = _np.sqrt(esig**2 + (area_ratio * ebk0)**2 + (area_ratio * ebk1)**2)

    # make sure no zero errors
    e1d[e1d == 0] = e1d.min()

    # propagate the data quality flags
    q1d = qsig | qbk0 | qbk1

    # construct wavelength array
    wedges = _get_x2d_waveedges(x2d)
    w0, w1 = wedges[:-1], wedges[1:]

    # construct exposure time array
    expt = _np.ones(f.shape[0]) * x2d['sci'].header['exptime']

    #region PUT INTO TABLE
    # make data columns
    colnames = ['w0', 'w1', 'w', 'flux', 'error', 'dq', 'exptime']
    units = ['Angstrom'] * 3 + ['ergs/s/cm2/Angstrom'] * 2 + ['s']
    descriptions = ['left (short,blue) edge of the wavelength bin',
                    'right (long,red) edge of the wavelength bin',
                    'midpoint of the wavelength bin',
                    'average flux over the bin',
                    'error on the flux',
                    'data quality flags',
                    'cumulative exposure time for the bin']
    dataset = [w0, w1, (w0+w1)/2., f1d, e1d, q1d, expt]
    cols = [_tbl.Column(d, n, unit=u, description=dn) for d, n, u, dn in
            zip(dataset, colnames, units, descriptions)]

    # make metadata dictionary
    descriptions = {'rootname': 'STScI identifier for the dataset used to '
                                'create this spectrum.'}
    meta = {'descriptions': descriptions,
            'rootname': x2d[1].header['rootname'],
            'traceloc': traceloc,
            'extrsize': extrsize,
            'bkoff': bkoff,
            'bksize': bksize}

    # put into table
    tbl = _tbl.Table(cols, meta=meta)
    #endregion

    #region PUT INTO FITS
    if fitsout is not None:
        # spectrum hdu
        fmts = ['E'] * 5 + ['I', 'E']
        cols = [_fits.Column(n, fm, u, array=d) for n, fm, u, d in
                zip(colnames, fmts, units, dataset)]
        del meta['descriptions']
        spechdr = _fits.Header(meta.items())
        spechdu = _fits.BinTableHDU.from_columns(cols, header=spechdr,
                                                name='spectrum')

        # make primary header
        prihdr = _fits.Header()
        prihdr['comment'] = ('Spectrum generated from an x2d file produced by '
                             'STScI. The dataset is identified with the header '
                             'keywrod rootname. All pixel locations refer to '
                             'the x2d and are indexed from 0. '
                             'Created with spectralPhoton software '
                             'http://github.com/parkus/spectralPhoton')
        prihdr['date'] = _strftime('%c')
        prihdr['rootname'] = x2d[1].header['rootname']
        prihdu = _fits.PrimaryHDU(header=prihdr)

        hdulist = _fits.HDUList([prihdu, spechdu])
        hdulist.writeto(fitsout, clobber=overwrite)
    #endregion

    return tbl