def fetch_sensor(platformID: str, name: str,
                 units: str = None, parser: str = None, tags: list = None,
                 startime: int = None, endtime: int = None, truncate = True,
                 db: str = DB, connection: MongoClient = CONN ) -> Sensor:
    """
    Fetches a specified Sensor from MongoDB, truncated by startime and endtime

    Args:
        platformID  (str): name of the target collection, typically a platformID
        name        (str): Sensor name,  eg 'm_depth' or 'salinity'
        units  (str=None): sensor units, eg 'rad' or 'deg' or 'nodim'
        parser (str=None): parser-tag,   eg mima or xbd or suflog
        tags  (list=None): sensor's tags. #TODO, currently UNIMPLIMENTED
        startime   (int=None): epoch query start time
        endtime    (int=None): epoch query end time
        truncate (bool=True): True  -> Sensor data range is exactly the requested range
                              False -> Sensor data returned in 24hr chunks, as saved in database
        db                 (str=<default>): name of the target database
        connection (MongoClient=<default>): specifies host and port of target mongodb server

    Returns:
        a Sensor

    """
    collection = connection[db][platformID]

    doc = {'name': name}
    if units:
        doc['units'] = units
    if parser:
        doc['parser'] = parser

    if startime:
        startday = MSensor.epoch2epochday(startime)
    else:
        startday = 0
        startime = -inf

    if endtime:
        enday = endtime
    else:
        enday = time() #no future data
        endtime = inf

    doc['epochday'] = {'$gte':startday, '$lt':enday}

    cursor = collection.find(doc).sort('epochday',1)
    msensors = []
    for doc in cursor:
        msensors.append(CLASSON.decode(doc))
    #    sensor.xy.extend(doc['xy'])
    sensor = MSensor.to_sensor(msensors)


    if truncate:
        sensor.xy = list(filter(lambda xy_tup: startime <= xy_tup[0] < endtime, sensor.xy))

    return sensor
def truncate_filechunk(chunk_index, platformID) -> (int, int, str):

    chunk_index = abs(chunk_index)

    collection = util.CONN[util.DB][platformID]
    cursor = collection.find({'_class':'gliderops.dataman.parsers.ascmrg_parser.XBD_File'})
    print(cursor.count())
    cursor = cursor.sort('start',-1)
    doc = CLASSON.decode(cursor[chunk_index])

    return doc.start, doc.end, doc.filename
def calc_deadby_date(glider, max_amphr):
    try:
        collection = CONN[DB][glider]
        cursor = collection.find({'name':'m_coulomb_amphr_total'}).sort('epochday',-1)
        sensor = mongoman.MSensor.to_sensor([CLASSON.decode(cursor[0]),
                                             CLASSON.decode(cursor[1])])
        #sensor = dataman.fetch_sensor(glider, 'm_coulomb_amphr_total', truncate=False, startime=time.time()-3*24*60*60)
        x0,y0 = sensor.xy[0]
        x1,y1 = sensor.xy[-1]
        amphr_per_day = 24*60*60*(y1-y0)/(x1-x0)
        apd_estimate_span = (x1-x0)/(24*60*60) #days

        m = (y1-y0)/(x1-x0) # rise over run
        b = y1-(m*x1)       # from y=mx+b

        y3 = max_amphr
        x3 = (y3-b)/m       # from y=mx+b
        the_date = time.strftime('%Y-%m-%d',time.gmtime(x3))

        print('AMPHR_PER_DAY =', int(amphr_per_day*100)/100,
              ', span = ',int(apd_estimate_span*100)/100,
              ', deaddby_date =',the_date,
              ', latest amphr =', y1)

        x,y=[1000*x0,1000*x1],[y0,y1]
        step = int((y3-y1)/100)
        if step < 1: step = 1
        for yn in range(int(y1+10),int(y3), step):
            x.append( 1000*(yn-b)/m)
            y.append( yn )
        x.append(1000*x3)
        y.append(y3)

        the_data = dict(x=x, y=y)

        return the_data, the_date
    except Exception as e:
        print(glider, 'calc_deadby_date fail', e)
        return dict(x=[],y=[]), ''