コード例 #1
0
def create_fits_by_tw(utc_begin, utc_end, product_type):
    auth.log_visit()
    result = {'error': 'No packet found'}
    try:
        start_unix = stix_datetime.utc2unix(utc_begin)
        duration = stix_datetime.utc2unix(utc_end) - start_unix
        if duration> MAX_DURATION:
            return {'error': f'Requested data time range not satisfiable.  Max duration: {MAX_DURATION} sec'}
        config = {
            'folder': TEMP_FITS_FOLDER,
            'type': 'time',
            'memory_file': True,
            'spid': PRODUCT_SPID[product_type],
            'conditions': {
                'start_unix_time': start_unix,
                'duration': duration
            }
        }
        info = fits_creator.create_fits(config)
        #print(info)
        if info['success']:
            filename = info['filename']
            mem = info['data']
            return send_file(mem,
                             as_attachment=True,
                             attachment_filename=filename,
                             mimetype='binary/x-fits')
        result['error'] = info['message']
    except Exception as e:
        result = {'error': str(e)}
    return json_util.dumps(result)
コード例 #2
0
ファイル: apis.py プロジェクト: i4Ds/STIX-data-center
def get_light_curve_data(begin_utc, end_utc, light_time_correction):
    try:
        start_unix = sdt.utc2unix(begin_utc)
        end_unix = sdt.utc2unix(end_utc)
        duration = end_unix - start_unix
        dlt = 0
        owlt = 0
        if start_unix < 0 or duration <= 0:
            result = {'error': 'Begin time or end time invalid'}
        elif duration > 48 * 3600:
            result = {
                'error':
                'The duration was too long. The maximum allowed duration is 48 hours.'
            }
        else:

            if light_time_correction:
                empheris = solo.get_solo_ephemeris(begin_utc,
                                                   end_utc,
                                                   num_steps=5)
                dlt = empheris.get('light_time_diff', [0])[0]
                owlt = empheris.get('owlt', [0])[0]

            packets = STIX_MDB.get_LC_pkt_by_tw(start_unix, duration)
            result = ql_analyzer.LightCurveAnalyzer.parse(packets, dlt=dlt)
            result['DELTA_LIGHT_TIME'] = dlt
            result['SOLO_EARTH_LIGHT_TIME'] = owlt
            result['IS_LIGHT_TIME_CORRECTED'] = light_time_correction
            #result['SUN_DIAMETER_ARSEC'] = sun_diameter
    except Exception as e:
        result = {'error': e}
    return json_util.dumps(result)
コード例 #3
0
ファイル: apis.py プロジェクト: i4Ds/STIX-data-center
def request_ql_flareloc():
    auth.log_visit()
    try:
        begin_utc = request.values['begin']
        end_utc = request.values['end']
        start_unix = sdt.utc2unix(begin_utc)
        end_unix = sdt.utc2unix(end_utc)
        duration = end_unix - start_unix
        dlt = 0
        if start_unix < 0 or duration <= 0:
            result = {'error': 'Begin time or end time invalid'}
        elif duration > 48 * 3600:
            result = {
                'error':
                'The duration was too long. The maximum allowed duration is 48 hours.'
            }
        else:
            #empheris=solo.get_solo_ephemeris(begin_utc,
            #           end_utc,
            #           num_steps=5)
            rows = STIX_MDB.get_ql_flare_loc_in_timewindow(
                start_unix, duration)
            result = {'flare_locations': list(rows)}
    except Exception as e:
        result = {'error': 'Invalid request'}
    return json_util.dumps(result)
コード例 #4
0
def data_to_fits():
    auth.log_visit()
    # write post data to fits
    data = []
    plot_name = ''
    if request.method == 'POST':
        jsondata = json.loads(request.form['jsonstring'])
        data = jsondata['data']
        plot_name = jsondata['name']
    else:
        abort(404, description="Failed to create fits file")

    p_hdr = fits.Header()
    t_hdr = fits.Header()

    p_hdr['source'] = 'STIX pub023 server'
    p_hdr['name'] = plot_name
    cols = []
    try:
        for i, trace in enumerate(data):

            if i == 0:
                if not is_date(trace['x'][0]):
                    # print(trace['x'])
                    cols.append(
                        fits.Column(name='x',
                                    array=np.array(trace['x']),
                                    format='D'))
                else:
                    time_zero = trace['x'][0]
                    t_hdr['timezero'] = time_zero
                    time_zero_unix = stix_datetime.utc2unix(time_zero)
                    time_array = np.array([
                        stix_datetime.utc2unix(x) - time_zero_unix
                        for x in trace['x']
                    ])
                    cols.append(
                        fits.Column(name='time',
                                    array=time_array,
                                    unit='s',
                                    format='D'))
            name = 'y' + str(i)
            if 'name' in trace:
                name = trace['name']
            cols.append(
                fits.Column(name=name, array=np.array(trace['y']), format='D'))
        hdulist = fits.HDUList()
        hdulist.append(fits.PrimaryHDU(header=p_hdr))
        hdulist.append(
            fits.BinTableHDU.from_columns(cols, header=t_hdr, name='DATA'))
        mem = io.BytesIO()
        hdulist.writeto(mem)
        mem.seek(0)
        return send_file(mem,
                         as_attachment=True,
                         attachment_filename=plot_name + '.fits',
                         mimetype='binary/x-fits')
    except Exception as e:
        abort(404, description="Failed to create fits file")
コード例 #5
0
def request_detector_config(time):
    unix_time = 0
    try:
        unix_time = int(time)
    except Exception as e:
        unix_time = None

    if not unix_time:
        try:
            unix_time = int(stix_datetime.utc2unix(time))
        except:
            pass

    if not unix_time:
        return json_util({'status': 'FAILED', 'error': 'Invalid timestamp'})

    results = {'status': 'OK', 'data': []}
    packets = []
    #'detector' 'ZIX39019','ZIX39004','ZIX36605' #ASIC register write, asic latency, and HV
    packets.extend(
        STIX_MDB.get_last_telecommands('ZIX39019',
                                       unix_time,
                                       32,
                                       id_only=False))  #max 32 tcs
    packets.extend(
        STIX_MDB.get_last_telecommands('ZIX39004', unix_time, 1,
                                       id_only=False))
    packets.extend(
        STIX_MDB.get_last_telecommands('ZIX36605',
                                       unix_time,
                                       32,
                                       id_only=False))
    results['data'] = dconf.get_detector_config(packets)
    return json_util.dumps(results)
コード例 #6
0
 def get_calibration_run_elut(utc):
     unix = sdt.utc2unix(utc)
     run = list(
         caldb.find({
             'start_unix_time': {
                 '$lte': unix
             },
             'analysis_report': {
                 '$exists': True
             },
             'duration': {
                 '$gt': MIN_CALIBRATION_DURATION
             }
         }).sort('start_unix_time', -1).limit(1))
     res = {}
     if run:
         res = {
             'slopes':
             np.round(run[0]['analysis_report']['slope'], 4),
             'offsets':
             np.round(run[0]['analysis_report']['offset'], 4),
             'slope_errors':
             np.round(run[0]['analysis_report']['slope_error'], 4),
             'offset_errors':
             np.round(run[0]['analysis_report']['offset_error'], 4),
             'run_id':
             run[0]['_id'],
             'duration':
             run[0]['duration'],
             'start_unix_time':
             run[0]['start_unix_time'],
             'start_utc':
             sdt.unix2utc(run[0]['start_unix_time'])
         }
     return res
コード例 #7
0
def view_user_data_requests():
    start_unix = 0
    span = 0
    run = -1
    request_id = -1
    flare = -1
    entry_id = -1
    if request.method == 'GET':
        args = request.args.to_dict()
        try:
            start_unix = 0
            start_str = args['start']
            if start_str.isnumeric():
                start_unix = float(start_str)
            else:
                start_unix = stix_datetime.utc2unix(start_str)
        except Exception as e:
            pass
        span = float(args.get('span', 0))
        run = int(args.get('run', -1))
        flare = int(args.get('flare', -1))
        request_id = int(args.get('id', -1))
        entry_id = int(args.get('entryID', -1))

    return render_template(
        'user-data-requests.html',
        start_unix=start_unix,
        span=span,
        request_id=request_id,
        flare=flare,  #flare unique id
        flare_entry_id=entry_id,
        run=run)
コード例 #8
0
def view_lightcurves_for_time_range(start_utc, duration):
    auth.log_visit()
    start_unix = 0
    run = -1
    try:
        start_unix = sdt.utc2unix(start_utc)
    except (KeyError, ValueError):
        pass
    return render_template('plot-lightcurves.html',
                           start_unix=start_unix,
                           duration=duration,
                           run=run)
コード例 #9
0
def get_science_time_bin_statistics():
    start_unix = sdt.utc2unix(request.values['startUTC'])
    end_unix = start_unix + float(request.values['duration'])
    min_time_bin = float(request.values['timeBin'])  #minimum time bin

    result = {}
    try:
        result = STIX_MDB.estimate_sci_timebin_numbers(start_unix, end_unix,
                                                       min_time_bin)
    except Exception as e:
        result['error'] = str(e)
    return json_util.dumps(result)
コード例 #10
0
ファイル: calibration.py プロジェクト: i4Ds/STIX-data-center
def request_calibration_run_data():
    result = {'status': 'Invalid request', 'data': []}
    try:
        start_utc = request.values['start_utc']
        days = int(request.values['days'])
        start_unix = stix_datetime.utc2unix(start_utc)
        span_seconds = days * 86400
    except (TypeError, ValueError, IndexError):
        return json_util.dumps(result)
    if start_unix > 0 and span_seconds > 0:
        status, data = STIX_MDB.select_calibration_runs_by_tw(
            start_unix, span_seconds)
        result['status'] = status
        result['data'] = data
    return json_util.dumps(result)
コード例 #11
0
ファイル: apis.py プロジェクト: i4Ds/STIX-data-center
def request_housekeeping_packets():
    from sdcweb.spice import stix_datetime
    result = {}
    SPIDs = [54102]
    try:
        start_utc = request.values['start_utc']
        span_seconds = int(request.values['duration'])
        start_unix = int(stix_datetime.utc2unix(start_utc))
        print(start_utc, span_seconds)
        if start_unix > 0 and span_seconds > 0:
            status, packets = STIX_MDB.select_packets_by_SPIDs(
                SPIDs,
                start_unix,
                span_seconds,
                header_only=False,
                sort_field='header.unix_time',
                duplicates_removed=True)
            result = {'data': packets}
    except Exception as e:
        result = {'error': str(e)}
    return json_util.dumps(result)
コード例 #12
0
 def get_onboard_elut(utc):
     unix = sdt.utc2unix(utc)
     elut = {}
     min_time = 5e9
     max_time = 0
     #pkt_ids=[]
     offsets = [0] * 384
     slopes = [0] * 384
     for i in range(384):
         pixel_elut = list(
             scdb.find({
                 'pixel_id': i,
                 'type': 'elut',
                 'execution_unix': {
                     '$lte': unix
                 }
             }).sort('execution_unix', -1).limit(1))
         if pixel_elut:
             offsets[i] = pixel_elut[0]['offset']
             slopes[i] = pixel_elut[0]['slope']
             uptime = pixel_elut[0]['execution_unix']
             if uptime < min_time:
                 min_time = uptime
             if uptime > max_time:
                 max_time = uptime
         #pkt_ids.append(pixel_elut[0]['packet_id'])
     elut = {
         'slopes': slopes,
         'offsets': offsets,
         'upload_time_range':
         [sdt.unix2utc(min_time),
          sdt.unix2utc(max_time)],
         'energy_bin_edges': NOMINAL_EBIN_EDGES,
         #'packet_ids':pkt_ids
     }
     return elut
コード例 #13
0
def create_template(
    flare_ids,
    flare_entry_ids,
    run_ids,
    start_utc,
    duration,
    emax=13,
    left_margin=0,
    right_margin=0,
    tunit=1,
    level=1,
    time_tag=0,
    subject=None,
    purpose=None,
):
    level_name = DATA_LEVEL_NAMES[level]
    if list(
            bsd_form.find({
                'flare_id': flare_ids,
                'request_type': level_name
            }).sort('_id', -1)):
        msg.append(f'data request for Flare {flare_ids} already exists.\n')
    try:
        current_id = bsd_form.find().sort('_id', -1).limit(1)[0]['_id'] + 1
    except IndexError:
        current_id = 0

    if level not in [1, 4]:
        msg.append('Not supported data level\n')
        return

    if left_margin != 0:
        start_utc = stix_datetime.unix2utc(
            stix_datetime.utc2unix(start_utc) + left_margin)

    if isinstance(flare_ids, list):
        if len(flare_ids) == 1:
            flare_ids = flare_ids[0]

    duration = int(duration - left_margin + right_margin)
    detector_mask_hex = '0xFFFFFFFF' if level == 1 else '0xFFFFFCFF'
    pixel_mask_hex = '0xFFF'
    detector_mask = 0xFFFFFFFF if level == 1 else 0xFFFFFCFF
    pixel_mask = 0xFFF
    emin = 1
    eunit = 1
    num_ebins = (emax - emin + 1) / eunit
    data_volume, data_volume_upper_limit = sci_volume.estimate_volume(
        start_utc, duration, tunit, num_ebins, detector_mask, pixel_mask,
        level)
    if subject is None:
        subject = f"Flare {flare_ids}" if not isinstance(
            flare_ids,
            list) else 'Flares ' + ', '.join([str(f) for f in flare_ids])

    purpose = purpose if purpose is not None else 'Solar Flare'

    form = {
        "data_volume": str(math.floor(data_volume)),
        "data_volume_upper_limit": str(math.floor(data_volume_upper_limit)),
        "execution_date": '',
        "author": author['name'],
        "email": author['email'],
        "subject": subject,
        "purpose": purpose,
        "request_type": level_name,
        "start_utc": str(start_utc),
        "start_unix": stix_datetime.utc2unix(start_utc),
        "end_unix": stix_datetime.utc2unix(start_utc) + duration,
        "duration": str(duration),
        "time_bin": str(tunit),
        "flare_id": flare_ids,
        'flare_entry_ids': flare_entry_ids,
        "detector_mask": detector_mask_hex,
        "creation_time": datetime.utcnow(),
        "creator": 'batch_creator',
        "time_tag": time_tag,
        "pixel_mask": pixel_mask_hex,
        "emin": str(emin),
        "emax": str(emax),
        'hidden': False,
        'run_id': run_ids,
        'status': 0,
        'priority': 1,
        "eunit": str(eunit),
        '_id': current_id,
        "description": f"{level_name} data request for {subject}",
        "volume": str(int(data_volume)),
        "unique_ids": []
    }
    msg.append(f'Inserting request {form["_id"]}, type: {level_name} \n')
    msg.append(str(form))
    bsd_form.insert_one(form)

    if not isinstance(flare_entry_ids, list):
        flare_entry_ids = [flare_entry_ids]

    for flare_id in flare_entry_ids:
        request_info = {'level': level, 'request_id': current_id}
        flare_collection.update_one({'_id': flare_id},
                                    {'$push': {
                                        'data_requests': request_info
                                    }})

    return form
コード例 #14
0
def get_background_request_time_ranges(min_request_time_interval=24*3600, start_date=None):
    #create background data request
    #it should be called after automatic L1 and L4 requests
    db_request=mdb.get_collection('data_requests')
    db_qllc=mdb.get_collection('quick_look')

    last_bkg_request= list(db_request.find({'purpose':'Background','hidden':False}).sort('start_unix',-1).limit(1))
    if not last_bkg_request:
        return [], 'Can not find last background request'

    last_ql_doc= list(db_qllc.find().sort('start_unix_time',-1).limit(1))
    #now we need to request background data between the dates
    if start_date is None:
        start=last_bkg_request[0]['start_unix']
    else:
        start=sdt.utc2unix(start_date)

    end=last_ql_doc[0]['stop_unix_time']
    time_range=f'{sdt.unix2utc(start)} {sdt.unix2utc(end)}'
    msg=f'Time range containing no background requests: {time_range}'
    print(msg)


    slots=mdb.find_quiet_sun_periods(start, end,min_duration=MAX_TEMP_CYCLE_PERIOD*NUM_TEMP_CYCLE_REQ)
    #slots smaller than the min_duration will be excluded
    if not slots:
        msg+=f'No quiet sun period found in  time range:{time_range}'
        print(msg)
    last_request_time=start

    request_time_ranges=[]
    for s in slots:
        start_unix,duration=s
        if duration<MIN_TEMP_CYCLE_PERIOD*NUM_TEMP_CYCLE_REQ:
            print("quiet time is too short")
            continue

        start_utc=sdt.unix2utc(start_unix)
        print("Start  time",start_utc, 'Last request:', sdt.unix2utc(last_request_time))
        if start_unix - last_request_time<min_request_time_interval:
            #don't request 
            print("ignore, less than 24 hours")
            continue

        status=sts.get_stix_status(s[0],s[1])
        #get stix status
        print(status)
        if status['gaps']<5 and sum(status['modes'][0:4])==0: 
            # 5 minutes, data gap less than 5*64 sec
            #no change of operation modes
            period=get_temperature_cycle_period(start_unix)
            print("temperature cycle",period)
            if period>=MIN_TEMP_CYCLE_PERIOD:
                print("this is valid: ", start_utc)
                request_time_ranges.append((start_unix, period*NUM_TEMP_CYCLE_REQ))
                last_request_time=start_unix
            else:
                print(start_utc, ' temperature cycle too short')
        else:
            print(start_utc, ' stix not in nominal mode')

    return request_time_ranges,msg
コード例 #15
0
    def get_solo_ephemeris(start_utc,
                           end_utc,
                           num_steps=200):
        '''
          calculate solo orbiter orbit using spice kernel data
          Args:
            start_utc: start_utc string
            end_utc:   end utc string
            frame:    coordinate frame supported by SPICE
            num_steps:  number of data points. 
          Returns:
            orbit data which is a python dictionary
        '''
        observer='Earth'
        frame='SOLO_HEE_NASA'
        target='SOLO'
        orbiter_earth= Trajectory(target)
        orbiter_sun= Trajectory(target)
        earth_hee= Trajectory('Earth')
        #starttime = stix_datetime.utc2datetime(start_utc)
        start_unix=stix_datetime.utc2unix(start_utc)
        end_unix=stix_datetime.utc2unix(end_utc)

        start_unix=max(MIN_UNIX_TIM_LIMIT,start_unix)
        end_unix=max(start_unix, end_unix)
        num_steps=max(int((end_unix-start_unix)/(12*3600)), num_steps)
        ut_space=np.linspace(start_unix, end_unix, num_steps)

        times = []
        utc_times = []
        for t in ut_space:
            dt=stix_datetime.unix2datetime(t)
            times.append(dt)
            utc_times.append(dt.strftime("%Y-%m-%dT%H:%M:%SZ"))

        result = {}
        try:
            orbiter_earth.generate_positions(times, 'Earth', frame)
            orbiter_sun.generate_positions(times, 'SUN', frame)
            earth_hee.generate_positions(times, 'SUN', frame)

            orbiter_earth.change_units(u.au)
            orbiter_sun.change_units(u.au)


            solo_dist_to_earth = orbiter_earth.r.value



            sun_open_angle=const.R_sun.to(u.m)/orbiter_sun.r.to(u.m)

            sun_angular_diameter_arcmin=np.degrees(np.arctan(sun_open_angle.value))*60.*2

            lt_diff = earth_hee.light_times - orbiter_sun.light_times 

            earth_sun_solo_angles = SoloEphemeris.compute_earth_sun_so_angle(orbiter_sun)
            elevation= np.degrees(np.arctan2(orbiter_sun.z.value, orbiter_sun.r.value))
            #orientations=SoloEphemeris.get_solo_orientations(times)

            result = {
                'ref_frame':frame,
                'observer':observer,
                'aunit': 'deg',
                'lunit': 'au',
                'vunit':'km/s',
                'tunit':'s',
                'utc': utc_times,
                'x': -orbiter_sun.x.value,
                'y': -orbiter_sun.y.value,
                'z': -orbiter_sun.z.value,
                'sun_solo_r': orbiter_sun.r.value,
                'earth_solo_r': orbiter_earth.r.value,
                'speed': orbiter_sun.speed.value,
                #'orientation':orientations,
                'owlt': orbiter_earth.light_times,
                #'sun_earth_ltime':sun_earth_ltime,
                'light_time_diff': lt_diff,
            'earth_sun_solo_angle': earth_sun_solo_angles,
            'sun_angular_diameter':sun_angular_diameter_arcmin,
            'elevation': elevation,
        }
        except Exception as e:
            result = {'error': str(e)}
        return result
コード例 #16
0
from sdcweb.spice import spice_manager

MAX_STEPS=10000

# Mapping from SPICE frame name to (frame, frame kwargs)
spice_astropy_frame_mapping = {
    'IAU_SUN': (suncoords.HeliographicCarrington,
                {'observer': suncoords.HeliographicStonyhurst(
                    0 * u.deg, 0 * u.deg, sunpy.sun.constants.radius)}),
}
SOLAR_ORBITER_ID = -144
SOLAR_ORBITER_SRF_FRAME_ID = -144000
SOLAR_ORBITER_STIX_ILS_FRAME_ID = -144851
SOLAR_ORBITER_STIX_OPT_FRAME_D = -144852

MIN_UNIX_TIM_LIMIT= stix_datetime.utc2unix('2020-02-10T05:00:00Z')

def vsep(v1,v2):
    """
        Find the separation angle in radians 
        between two double precision, 3-dimensional vectors. This angle is defined as zero if either vector is zero.
    """
    vector1=np.array([v1.x.value,v1.y.value,v1.z.value])
    vector2=np.array([v2.x.value,v2.y.value,v2.z.value])
    unit_vector1 = vector1 / np.linalg.norm(vector1)
    unit_vector2 = vector2 / np.linalg.norm(vector2)
    dot_product = np.dot(unit_vector1, unit_vector2)
    angle = np.arccos(dot_product) #angle in radian
    return np.degrees(angle)

コード例 #17
0
def get_science_data_time_resolution_by_utc(start, end):
    start_unix = sdt.utc2unix(start)
    end_unix = sdt.utc2unix(end)
    return get_science_data_time_resolution(start_unix, end_unix)
コード例 #18
0
def create_aia_image_utc(utc):
    unix = stix_datetime.utc2unix(utc)
    return create_aia(unix)
コード例 #19
0
def find_asw_parameter_telecommands():
    start_utc = request.form['startTime']
    end_utc = request.form['endTime']
    names = ['AIXF414A', 'ZIX37018']  #load prameters
    from sdcweb.spice import stix_datetime
    start_unix = stix_datetime.utc2unix(start_utc)
    end_unix = stix_datetime.utc2unix(end_utc)
    query_string = {
        'startUnix': {
            '$gte': start_unix,
            '$lt': end_unix
        },
        'status': {
            '$gt': 0
        },
        'occurrences': {
            '$elemMatch': {
                'name': {
                    '$in': names
                }
            }
        },
    }
    iors = STIX_MDB.get_collection('iors').find(query_string).sort(
        'startUnix', -1)
    param_set_tcs = []
    results = []
    for ior in iors:
        occurrences = ior['occurrences']
        for tc in occurrences:
            if tc['name'] in names:
                param_id = tc['parameters'][0][1]
                parameter = par[int(param_id)]

                value = tc['parameters'][2][1]
                if parameter not in param_set_tcs:
                    param_set_tcs.append(parameter)
                else:
                    continue

                at = tc['actionTime']
                if '-' not in at:
                    at = ior['startTime']
                results.append({
                    'Time': at,
                    'IOR': ior['_id'],
                    'filename': ior['filename'],
                    'name': tc['name'],
                    'ParamID': param_id,
                    'Parameter': parameter,
                    'Value': value
                })
    content = '<table border="1">'
    for i, row in enumerate(results):
        keys = row.keys()
        line = '<tr>'
        if i == 0:
            for key in keys:
                line += f'<td>{key}</td>'
            line += '</tr><tr>'
        for key in keys:
            line += f'<td>{row[key]}</td>'
        line += '</tr>'
        content += line
    content += '</table>'
    return content
コード例 #20
0
def create_occurrences(collection, _ids):
    requst_forms = collection.find({
        '_id': {
            '$in': _ids
        },
        'hidden': False
    }).sort([('request_type', -1), ('detector_mask', 1), ('pixel_mask', -1)])
    last_detector_mask = 0
    last_pixel_mask = 0
    total_volume = 0
    total_volume_upper_limit = 0
    last_level = -1

    requests = {'occurrences': []}
    TC_enabled_transfer = enable_data_transfer()
    requests['occurrences'].append(TC_enabled_transfer)
    requests['errors']=[]

    #seq_of_day=STIX_MDB.select_data_request_by_date(start_datetime).count()+1
    asp_index = 0
    bsd_index = 0

    selected_ids = _ids[:]

    for form in requst_forms:
        start_utc = form['start_utc']
        start_unix = stix_datetime.utc2unix(start_utc)
        level = data_levels[form['request_type']]
        dt = int(form['duration'])
        unique_ids = []
        #lc_filename=make_lightcurve(form['_id'], start_unix, dt)
        #if lc_filename:
        #    form['lc_filename']=lc_filename

        start_date = stix_datetime.utc2datetime(start_utc)
        start_date_str = start_date.strftime('%y%m%d')
        uid = STIX_MDB.get_user_data_request_next_unique_id(
            start_date, selected_ids)
        selected_ids.remove(form['_id'])

        #don't exclude it for next iteration


        mask_TCs = []
        detector_mask = parse_int(form['detector_mask'])
        pixel_mask = parse_int(form['pixel_mask'])
        try:
            tbin = float(form.get('time_bin'))
        except Exception:
            tbin=1
            requests['errors'].append(f"Invalid time bin for {form['_id']}")

        emin = parse_int(form['emin'])
        emax = parse_int(form['emax'])
        eunit = parse_int(form['eunit'])
        if level!=5 and (detector_mask != last_detector_mask or pixel_mask != last_pixel_mask or level != last_level):
            mask_TCs = form_mask_config_telecommands(detector_mask, pixel_mask,
                                                 level, bsd_index)
            requests['occurrences'].extend(mask_TCs)
        
        duration_limit=MAX_DURATION_PER_REQUEST if level!=5 else ASP_MAX_DURATION_PER_REQUEST
        num_TCs = math.ceil(dt / duration_limit)
        #create several TCs for long requests
        if num_TCs < 1:
            num_TCs = 1

        last_end = 0
        while last_end < dt:
            T0 = last_end
            deltaT = dt - T0
            deltaT = min(deltaT, duration_limit)
            last_end = T0 + deltaT

            #get_uid(T0, deltaT, form['request_type'], 0)
            TC = form_bsd_request_sequence(uid, T0 + start_unix, level,
                                           detector_mask, 0, deltaT, tbin,
                                           emin, emax, eunit, pixel_mask)
            unique_ids.append(uid)

            attach_TC_aux_info(TC, form)
            requests['occurrences'].append(TC)
            bsd_index += 1
            total_volume += TC['data_volume']
            total_volume_upper_limit += TC['data_volume_upper_limit']

            uid += 1

        last_pixel_mask = pixel_mask
        last_detector_mask = detector_mask
        last_level = level

        if len(set(unique_ids)) != len(unique_ids):
            requests[
                'errors'].append('Internal error. Some unique IDs are not unique!')
            return requests
        form['unique_ids'] = unique_ids
        form['export_time'] = datetime.now()
        collection.update_one({'_id': form['_id']}, {'$set': form})

    requests['predicted_total_volume'] = total_volume
    requests['total_volume_upper_limit'] = total_volume_upper_limit
    return requests