예제 #1
0
    def inject_references(obj):
        if isinstance(obj, dict):
            if 'file' in obj:
                filename = pjoin(src_path, obj['file'])
                if filename.endswith('csv'):
                    df = pd.read_csv(filename)
                    obj.pop('file')
                    obj.update(df.to_dict(orient='list'))
                elif filename.endswith('.png'):
                    return base64.b64encode(open(filename, 'rb').read())
                elif filename.endswith('xml'):
                    with open(filename, 'rb') as fid:
                        payload = fid.read()
                    return payload
                elif filename.endswith('reg'):
                    return Regions.read(filename).serialize(format='ds9')
                elif filename.endswith('h5') or filename.endswith('hdf5'):
                    payload = Table.read(filename).to_pandas().to_dict(
                        orient='list')
                    return payload
                else:
                    raise NotImplementedError(
                        f'{filename}: Only CSV, PNG, xml, reg, and hdf5 files currently supported for extending individual objects'
                    )

            for k, v in obj.items():
                obj[k] = inject_references(v)
            return obj
        elif isinstance(obj, str) and obj.startswith('='):
            try:
                return references[obj[1:]]
            except KeyError:
                print(
                    f'\nReference {obj[1:]} not found while posting to {endpoint}; skipping'
                )
                raise
        elif isinstance(obj, list):
            return [inject_references(item) for item in obj]
        else:
            return obj
예제 #2
0
    def load_static_regions_from_file(self, region_file, region_format='ds9', prefix='region',
                                      max_num_regions=20, **kwargs):
        """Load regions defined in the given file.
        See :ref:`regions:regions_io` for supported file formats.

        Parameters
        ----------
        region_file : str
            Path to region file.

        region_format : {'crtf', 'ds9', 'fits'}
            See :meth:`regions.Regions.get_formats`.

        prefix : str
            Prefix for the Subset names generated by loaded regions.
            Names will have the format of ``<prefix>_<i>``, where ``i``
            is the index in the load order.

        max_num_regions : int
            Maximum number of regions to read from the file, starting
            from top of the file, invalid regions included.

        kwargs : dict
            See :meth:`load_static_regions`.

        Returns
        -------
        bad_regions : dict
            See :meth:`load_static_regions`.

        """
        from regions import Regions

        raw_regs = Regions.read(region_file, format=region_format)
        my_regions = dict([(f'{prefix}_{i}', reg) for i, reg in
                           enumerate(raw_regs[:max_num_regions])])
        return self.load_static_regions(my_regions, **kwargs)
예제 #3
0
def test_observation_plan_galaxy(
    user, super_admin_token, upload_data_token, view_only_token, public_group
):
    catalog_name = 'test_galaxy_catalog'

    # in case the catalog already exists, delete it.
    status, data = api(
        'DELETE', f'galaxy_catalog/{catalog_name}', token=super_admin_token
    )

    datafile = f'{os.path.dirname(__file__)}/../../../data/GW190814.xml'
    with open(datafile, 'rb') as fid:
        payload = fid.read()
    data = {'xml': payload}

    status, data = api('POST', 'gcn_event', data=data, token=super_admin_token)
    assert status == 200
    assert data['status'] == 'success'
    gcnevent_id = data['data']['gcnevent_id']

    # wait for event to load
    for n_times in range(26):
        status, data = api(
            'GET', "gcn_event/2019-08-14T21:10:39", token=super_admin_token
        )
        if data['status'] == 'success':
            break
        time.sleep(2)
    assert n_times < 25

    # wait for the localization to load
    params = {"include2DMap": True}
    for n_times_2 in range(26):
        status, data = api(
            'GET',
            'localization/2019-08-14T21:10:39/name/LALInference.v1.fits.gz',
            token=super_admin_token,
            params=params,
        )

        if data['status'] == 'success':
            data = data["data"]
            assert data["dateobs"] == "2019-08-14T21:10:39"
            assert data["localization_name"] == "LALInference.v1.fits.gz"
            assert np.isclose(np.sum(data["flat_2d"]), 1)
            break
        else:
            time.sleep(2)
    assert n_times_2 < 25
    localization_id = data['id']

    name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'telescope',
        data={
            'name': name,
            'nickname': name,
            'lat': 0.0,
            'lon': 0.0,
            'elevation': 0.0,
            'diameter': 10.0,
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    telescope_id = data['data']['id']

    fielddatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Fields.csv'
    regionsdatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Region.reg'

    instrument_name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'instrument',
        data={
            'name': instrument_name,
            'type': 'imager',
            'band': 'Optical',
            'filters': ['ztfr'],
            'telescope_id': telescope_id,
            'api_classname': 'ZTFAPI',
            'api_classname_obsplan': 'ZTFMMAAPI',
            'field_data': pd.read_csv(fielddatafile)[:5].to_dict(orient='list'),
            'field_region': Regions.read(regionsdatafile).serialize(format='ds9'),
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    instrument_id = data['data']['id']

    # wait for the fields to populate
    nretries = 0
    fields_loaded = False
    while not fields_loaded and nretries < 5:
        try:
            status, data = api(
                'GET',
                f'instrument/{instrument_id}',
                token=super_admin_token,
            )
            assert status == 200
            assert data['status'] == 'success'
            assert data['data']['band'] == 'NIR'

            assert len(data['data']['fields']) == 5
            fields_loaded = True
        except AssertionError:
            nretries = nretries + 1
            time.sleep(3)

    datafile = f'{os.path.dirname(__file__)}/../../../data/CLU_mini.hdf5'
    data = {
        'catalog_name': catalog_name,
        'catalog_data': Table.read(datafile).to_pandas().to_dict(orient='list'),
    }

    status, data = api('POST', 'galaxy_catalog', data=data, token=super_admin_token)
    assert status == 200
    assert data['status'] == 'success'

    params = {'catalog_name': catalog_name}

    nretries = 0
    galaxies_loaded = False
    while nretries < 10:
        status, data = api(
            'GET', 'galaxy_catalog', token=view_only_token, params=params
        )
        assert status == 200
        data = data["data"]["galaxies"]
        if len(data) == 92 and any(
            [
                d['name'] == '6dFgs gJ0001313-055904'
                and d['mstar'] == 336.60756522868667
                for d in data
            ]
        ):
            galaxies_loaded = True
            break
        nretries = nretries + 1
        time.sleep(5)

    assert nretries < 10
    assert galaxies_loaded

    request_data = {
        'group_id': public_group.id,
        'instrument_id': instrument_id,
        'pi': 'Shri Kulkarni',
        'hours_allocated': 200,
        'start_date': '3021-02-27T00:00:00',
        'end_date': '3021-07-20T00:00:00',
        'proposal_id': 'COO-2020A-P01',
    }

    status, data = api('POST', 'allocation', data=request_data, token=super_admin_token)
    assert status == 200
    assert data['status'] == 'success'
    allocation_id = data['data']['id']

    queue_name = str(uuid.uuid4())
    request_data = {
        'allocation_id': allocation_id,
        'gcnevent_id': gcnevent_id,
        'localization_id': localization_id,
        'payload': {
            'start_date': '2019-04-25 01:01:01',
            'end_date': '2019-04-27 01:01:01',
            'filter_strategy': 'block',
            'schedule_strategy': 'galaxy',
            'galaxy_catalog': catalog_name,
            'schedule_type': 'greedy_slew',
            'exposure_time': 300,
            'filters': 'ztfg',
            'maximum_airmass': 2.0,
            'integrated_probability': 100,
            'minimum_time_difference': 30,
            'queue_name': queue_name,
            'program_id': 'Partnership',
            'subprogram_name': 'GRB',
        },
    }

    status, data = api(
        'POST', 'observation_plan', data=request_data, token=super_admin_token
    )
    assert status == 200
    assert data['status'] == 'success'
    id = data['data']['ids'][0]

    # wait for the observation plan to populate
    nretries = 0
    observation_plan_loaded = False
    while not observation_plan_loaded and nretries < 5:
        try:
            status, data = api(
                'GET',
                f'observation_plan/{id}',
                params={"includePlannedObservations": "true"},
                token=super_admin_token,
            )

            assert status == 200
            assert data['status'] == 'success'

            assert data["data"]["gcnevent_id"] == gcnevent_id
            assert data["data"]["allocation_id"] == allocation_id
            assert data["data"]["payload"] == request_data["payload"]

            assert len(data["data"]["observation_plans"]) == 1
            observation_plan = data["data"]["observation_plans"][0]

            assert (
                observation_plan['plan_name'] == request_data["payload"]['queue_name']
            )
            assert observation_plan['validity_window_start'] == request_data["payload"][
                'start_date'
            ].replace(" ", "T")
            assert observation_plan['validity_window_end'] == request_data["payload"][
                'end_date'
            ].replace(" ", "T")

            planned_observations = observation_plan['planned_observations']
            assert len(planned_observations) > 0

            observation_plan_loaded = True

        except AssertionError:
            nretries = nretries + 1
            time.sleep(10)

    assert len(planned_observations) == 29
    assert all(
        [
            obs['filt'] == request_data["payload"]['filters']
            for obs in planned_observations
        ]
    )
    assert all(
        [
            obs['exposure_time'] == int(request_data["payload"]['exposure_time'])
            for obs in planned_observations
        ]
    )
예제 #4
0
def test_observation_plan_tiling(
    user, super_admin_token, upload_data_token, view_only_token, public_group
):

    datafile = f'{os.path.dirname(__file__)}/../data/GW190425_initial.xml'
    with open(datafile, 'rb') as fid:
        payload = fid.read()
    data = {'xml': payload}

    status, data = api('POST', 'gcn_event', data=data, token=super_admin_token)
    assert status == 200
    assert data['status'] == 'success'
    gcnevent_id = data['data']['gcnevent_id']

    dateobs = "2019-04-25 08:18:05"
    skymap = "bayestar.fits.gz"
    status, data = api(
        'GET',
        f'localization/{dateobs}/name/{skymap}',
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    localization_id = data['data']['id']

    name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'telescope',
        data={
            'name': name,
            'nickname': name,
            'lat': 0.0,
            'lon': 0.0,
            'elevation': 0.0,
            'diameter': 10.0,
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    telescope_id = data['data']['id']

    fielddatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Fields.csv'
    regionsdatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Region.reg'

    instrument_name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'instrument',
        data={
            'name': instrument_name,
            'type': 'imager',
            'band': 'Optical',
            'filters': ['ztfr'],
            'telescope_id': telescope_id,
            'api_classname': 'ZTFAPI',
            'api_classname_obsplan': 'ZTFMMAAPI',
            'field_data': pd.read_csv(fielddatafile)[:5].to_dict(orient='list'),
            'field_region': Regions.read(regionsdatafile).serialize(format='ds9'),
            'sensitivity_data': {
                'ztfr': {
                    'limiting_magnitude': 20.3,
                    'magsys': 'ab',
                    'exposure_time': 30,
                    'zeropoint': 26.3,
                }
            },
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    instrument_id = data['data']['id']

    # wait for the fields to populate
    time.sleep(15)

    request_data = {
        'group_id': public_group.id,
        'instrument_id': instrument_id,
        'pi': 'Shri Kulkarni',
        'hours_allocated': 200,
        'start_date': '3021-02-27T00:00:00',
        'end_date': '3021-07-20T00:00:00',
        'proposal_id': 'COO-2020A-P01',
    }

    status, data = api('POST', 'allocation', data=request_data, token=super_admin_token)
    assert status == 200
    assert data['status'] == 'success'
    allocation_id = data['data']['id']

    queue_name = str(uuid.uuid4())
    request_data = {
        'allocation_id': allocation_id,
        'gcnevent_id': gcnevent_id,
        'localization_id': localization_id,
        'payload': {
            'start_date': '2019-04-25 01:01:01',
            'end_date': '2019-04-27 01:01:01',
            'filter_strategy': 'block',
            'schedule_strategy': 'tiling',
            'schedule_type': 'greedy_slew',
            'exposure_time': 300,
            'filters': 'ztfr',
            'maximum_airmass': 2.0,
            'integrated_probability': 100,
            'minimum_time_difference': 30,
            'queue_name': queue_name,
            'program_id': 'Partnership',
            'subprogram_name': 'GRB',
        },
    }

    status, data = api(
        'POST', 'observation_plan', data=request_data, token=super_admin_token
    )
    assert status == 200
    assert data['status'] == 'success'
    id = data['data']['ids'][0]

    # wait for the observation plan to finish
    time.sleep(15)

    status, data = api(
        'GET',
        f'observation_plan/{id}',
        params={"includePlannedObservations": "true"},
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'

    assert data["data"]["gcnevent_id"] == gcnevent_id
    assert data["data"]["allocation_id"] == allocation_id
    assert data["data"]["payload"] == request_data["payload"]

    assert len(data["data"]["observation_plans"]) == 1
    observation_plan = data["data"]["observation_plans"][0]

    assert observation_plan['plan_name'] == request_data["payload"]['queue_name']
    assert observation_plan['validity_window_start'] == request_data["payload"][
        'start_date'
    ].replace(" ", "T")
    assert observation_plan['validity_window_end'] == request_data["payload"][
        'end_date'
    ].replace(" ", "T")

    planned_observations = observation_plan['planned_observations']

    assert len(planned_observations) == 5
    assert all(
        [
            obs['filt'] == request_data["payload"]['filters']
            for obs in planned_observations
        ]
    )
    assert all(
        [
            obs['exposure_time'] == int(request_data["payload"]['exposure_time'])
            for obs in planned_observations
        ]
    )

    status, data = api(
        'GET', f'observation_plan/{id}/simsurvey', token=super_admin_token
    )
    assert status == 200
예제 #5
0
def test_gcn_request(driver, user, super_admin_token, public_group):

    datafile = f'{os.path.dirname(__file__)}/../data/GW190425_initial.xml'
    with open(datafile, 'rb') as fid:
        payload = fid.read()
    data = {'xml': payload}

    status, data = api('POST', 'gcn_event', data=data, token=super_admin_token)
    assert status == 200
    assert data['status'] == 'success'

    telescope_name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'telescope',
        data={
            'name': telescope_name,
            'nickname': telescope_name,
            'lat': 0.0,
            'lon': 0.0,
            'elevation': 0.0,
            'diameter': 10.0,
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    telescope_id = data['data']['id']

    fielddatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Fields.csv'
    regionsdatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Region.reg'

    instrument_name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'instrument',
        data={
            'name': instrument_name,
            'type': 'imager',
            'band': 'NIR',
            'filters': ['ztfr'],
            'telescope_id': telescope_id,
            "api_classname_obsplan": "ZTFMMAAPI",
            'field_data':
            pd.read_csv(fielddatafile)[:5].to_dict(orient='list'),
            'field_region':
            Regions.read(regionsdatafile).serialize(format='ds9'),
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    instrument_id = data['data']['id']

    params = {'includeGeoJSON': True}

    # wait for the fields to populate
    nretries = 0
    fields_loaded = False
    while not fields_loaded and nretries < 5:
        try:
            status, data = api(
                'GET',
                f'instrument/{instrument_id}',
                params=params,
                token=super_admin_token,
            )
            assert status == 200
            assert data['status'] == 'success'
            assert data['data']['band'] == 'NIR'

            print(data['data'])

            assert len(data['data']['fields']) == 5
            fields_loaded = True
        except AssertionError:
            nretries = nretries + 1
            time.sleep(3)

    datafile = f'{os.path.dirname(__file__)}/../../../data/sample_observation_data.csv'
    data = {
        'telescopeName': telescope_name,
        'instrumentName': instrument_name,
        'observationData': pd.read_csv(datafile).to_dict(orient='list'),
    }

    status, data = api('POST',
                       'observation',
                       data=data,
                       token=super_admin_token)

    assert status == 200
    assert data['status'] == 'success'

    params = {
        'telescopeName': telescope_name,
        'instrumentName': instrument_name,
        'startDate': "2019-04-25 08:18:05",
        'endDate': "2019-04-28 08:18:05",
        'localizationDateobs': "2019-04-25T08:18:05",
        'localizationName': "bayestar.fits.gz",
        'localizationCumprob': 1.01,
        'returnStatistics': True,
    }

    # wait for the executed observations to populate
    nretries = 0
    observations_loaded = False
    while not observations_loaded and nretries < 5:
        try:
            status, data = api('GET',
                               'observation',
                               params=params,
                               token=super_admin_token)
            assert status == 200
            data = data["data"]
            assert len(data['observations']) == 10
            observations_loaded = True
        except AssertionError:
            nretries = nretries + 1
            time.sleep(3)

    driver.get(f'/become_user/{user.id}')
    driver.get('/gcn_events/2019-04-25T08:18:05')

    driver.wait_for_xpath('//*[text()="190425 08:18:05"]')
    driver.wait_for_xpath('//*[text()="LVC"]')
    driver.wait_for_xpath('//*[text()="BNS"]')

    driver.wait_for_xpath('//*[@id="root_localizationName"]')
    driver.click_xpath('//*[@id="root_localizationName"]')
    driver.wait_for_xpath('//li[contains(text(), "bayestar.fits.gz")]')
    driver.click_xpath('//li[contains(text(), "bayestar.fits.gz")]')
    driver.wait_for_xpath('//*[@id="root_localizationCumprob"]').clear()
    driver.wait_for_xpath('//*[@id="root_localizationCumprob"]').send_keys(
        1.01)

    submit_button_xpath = '//button[@type="submit"]'
    driver.wait_for_xpath(submit_button_xpath)
    driver.click_xpath(submit_button_xpath)

    select_box = driver.find_element_by_id(
        "mui-component-select-followupRequestInstrumentSelect")
    select_box.click()

    driver.click_xpath(
        f'//li[contains(text(), "{telescope_name}")][contains(text(), "{instrument_name}")]',
        scroll_parent=True,
    )

    driver.click_xpath(
        f'//a[contains(@data-testid, "observationGcn_{instrument_id}")]',
        scroll_parent=True,
    )
예제 #6
0
def test_observationplan_request(driver, user, super_admin_token,
                                 public_group):

    datafile = f'{os.path.dirname(__file__)}/../data/GW190425_initial.xml'
    with open(datafile, 'rb') as fid:
        payload = fid.read()
    data = {'xml': payload}

    status, data = api('POST', 'gcn_event', data=data, token=super_admin_token)
    assert status == 200
    assert data['status'] == 'success'

    telescope_name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'telescope',
        data={
            'name': telescope_name,
            'nickname': telescope_name,
            'lat': 0.0,
            'lon': 0.0,
            'elevation': 0.0,
            'diameter': 10.0,
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    telescope_id = data['data']['id']

    fielddatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Fields.csv'
    regionsdatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Region.reg'

    instrument_name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'instrument',
        data={
            'name': instrument_name,
            'type': 'imager',
            'band': 'NIR',
            'filters': ['f110w'],
            'telescope_id': telescope_id,
            "api_classname_obsplan": "ZTFMMAAPI",
            'field_data':
            pd.read_csv(fielddatafile)[:5].to_dict(orient='list'),
            'field_region':
            Regions.read(regionsdatafile).serialize(format='ds9'),
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    instrument_id = data['data']['id']

    params = {'includeGeoJSON': True}

    # wait for the fields to populate
    nretries = 0
    fields_loaded = False
    while not fields_loaded and nretries < 5:
        try:
            status, data = api(
                'GET',
                f'instrument/{instrument_id}',
                token=super_admin_token,
                params=params,
            )
            assert status == 200
            assert data['status'] == 'success'
            assert data['data']['band'] == 'NIR'

            assert len(data['data']['fields']) == 5
            fields_loaded = True
        except AssertionError:
            nretries = nretries + 1
            time.sleep(3)

    status, data = api(
        "POST",
        "allocation",
        data={
            "group_id": public_group.id,
            "instrument_id": instrument_id,
            "hours_allocated": 100,
            "pi": "Ed Hubble",
            '_altdata': '{"access_token": "testtoken"}',
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data["status"] == "success"

    driver.get(f'/become_user/{user.id}')
    driver.get('/gcn_events/2019-04-25T08:18:05')

    driver.wait_for_xpath('//*[text()="190425 08:18:05"]')
    driver.wait_for_xpath('//*[text()="LVC"]')
    driver.wait_for_xpath('//*[text()="BNS"]')

    submit_button_xpath = (
        '//div[@data-testid="observationplan-request-form"]//button[@type="submit"]'
    )
    driver.wait_for_xpath(submit_button_xpath)

    select_box = driver.find_element_by_id(
        "mui-component-select-followupRequestAllocationSelect")
    select_box.click()

    driver.click_xpath(
        f'//li[contains(text(), "{instrument_name}")][contains(text(), "{public_group.name}")]',
        scroll_parent=True,
    )

    # Click somewhere outside to remove focus from instrument select
    driver.click_xpath("//header")

    driver.click_xpath(submit_button_xpath)

    driver.wait_for_xpath(
        f"//div[@data-testid='{instrument_name}-requests-header']", timeout=15)
    driver.click_xpath(
        f"//div[@data-testid='{instrument_name}-requests-header']")
    driver.wait_for_xpath(
        f'//div[contains(@data-testid, "{instrument_name}_observationplanRequestsTable")]//div[contains(., "g,r,i")]',
        timeout=15,
    )
    driver.wait_for_xpath(
        f'''//div[contains(@data-testid, "{instrument_name}_observationplanRequestsTable")]//div[contains(., "complete")]''',
        timeout=15,
    )

    status, data = api("GET", "observation_plan", token=super_admin_token)
    assert status == 200

    observation_plan_request_id = data['data'][-1]['observation_plans'][0][
        'observation_plan_request_id']
    driver.click_xpath(
        f'//a[contains(@data-testid, "gcnRequest_{observation_plan_request_id}")]',
        scroll_parent=True,
    )
    driver.click_xpath(
        f'//button[contains(@data-testid, "treasuremapRequest_{observation_plan_request_id}")]',
        scroll_parent=True,
    )
    driver.click_xpath(
        f'//a[contains(@data-testid, "downloadRequest_{observation_plan_request_id}")]',
        scroll_parent=True,
    )
    driver.click_xpath(
        f'//button[contains(@data-testid, "sendRequest_{observation_plan_request_id}")]',
        scroll_parent=True,
    )
    driver.wait_for_xpath(
        f'''//div[contains(@data-testid, "{instrument_name}_observationplanRequestsTable")]//div[contains(., "submitted to telescope queue")]''',
        timeout=10,
    )
    driver.click_xpath(
        f'//button[contains(@data-testid, "removeRequest_{observation_plan_request_id}")]',
        scroll_parent=True,
    )
    driver.wait_for_xpath(
        f'''//div[contains(@data-testid, "{instrument_name}_observationplanRequestsTable")]//div[contains(., "deleted from telescope queue")]''',
        timeout=10,
    )
예제 #7
0
def test_gcnevents_observations(driver, user, super_admin_token,
                                upload_data_token, view_only_token,
                                ztf_camera):

    datafile = f'{os.path.dirname(__file__)}/../data/GW190425_initial.xml'
    with open(datafile, 'rb') as fid:
        payload = fid.read()
    data = {'xml': payload}

    status, data = api('POST', 'gcn_event', data=data, token=super_admin_token)
    assert status == 200
    assert data['status'] == 'success'

    telescope_name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'telescope',
        data={
            'name': telescope_name,
            'nickname': telescope_name,
            'lat': 0.0,
            'lon': 0.0,
            'elevation': 0.0,
            'diameter': 10.0,
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    telescope_id = data['data']['id']

    fielddatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Fields.csv'
    regionsdatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Region.reg'

    instrument_name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'instrument',
        data={
            'name': instrument_name,
            'type': 'imager',
            'band': 'Optical',
            'filters': ['ztfr'],
            'telescope_id': telescope_id,
            'field_data':
            pd.read_csv(fielddatafile)[:5].to_dict(orient='list'),
            'field_region':
            Regions.read(regionsdatafile).serialize(format='ds9'),
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    instrument_id = data['data']['id']

    params = {'includeGeoJSON': True}

    # wait for the fields to populate
    nretries = 0
    fields_loaded = False
    while not fields_loaded and nretries < 5:
        try:
            status, data = api(
                'GET',
                f'instrument/{instrument_id}',
                params=params,
                token=super_admin_token,
            )
            assert status == 200
            assert data['status'] == 'success'
            assert data['data']['band'] == 'NIR'

            assert len(data['data']['fields']) == 5
            fields_loaded = True
        except AssertionError:
            nretries = nretries + 1
            time.sleep(3)

    datafile = f'{os.path.dirname(__file__)}/../../../data/sample_observation_data.csv'
    data = {
        'telescopeName': telescope_name,
        'instrumentName': instrument_name,
        'observationData': pd.read_csv(datafile).to_dict(orient='list'),
    }

    status, data = api('POST',
                       'observation',
                       data=data,
                       token=super_admin_token)

    assert status == 200
    assert data['status'] == 'success'

    # wait for the executed observations to populate
    nretries = 0
    observations_loaded = False
    while not observations_loaded and nretries < 5:
        try:
            status, data = api('GET',
                               'observation',
                               params=data,
                               token=super_admin_token)
            assert status == 200
            data = data["data"]
            assert len(data['observations']) == 10
            observations_loaded = True
        except AssertionError:
            nretries = nretries + 1
            time.sleep(3)

    driver.get(f'/become_user/{user.id}')
    driver.get('/gcn_events/2019-04-25T08:18:05')

    driver.wait_for_xpath('//*[text()="190425 08:18:05"]')
    driver.wait_for_xpath('//*[text()="LVC"]')
    driver.wait_for_xpath('//*[text()="BNS"]')

    # test modify sources form
    driver.wait_for_xpath('//*[@id="root_startDate"]').send_keys('04/24/2019')
    driver.wait_for_xpath('//*[@id="root_startDate"]').send_keys(Keys.TAB)
    driver.wait_for_xpath('//*[@id="root_startDate"]').send_keys('01:01')
    driver.wait_for_xpath('//*[@id="root_startDate"]').send_keys('P')
    driver.wait_for_xpath('//*[@id="root_endDate"]').send_keys('04/30/2019')
    driver.wait_for_xpath('//*[@id="root_endDate"]').send_keys(Keys.TAB)
    driver.wait_for_xpath('//*[@id="root_endDate"]').send_keys('01:01')
    driver.wait_for_xpath('//*[@id="root_endDate"]').send_keys('P')
    driver.wait_for_xpath('//*[@id="root_localizationCumprob"]').clear()
    driver.wait_for_xpath('//*[@id="root_localizationCumprob"]').send_keys(
        '1.01')
    driver.wait_for_xpath('//*[@id="root_localizationName"]')
    driver.click_xpath('//*[@id="root_localizationName"]')
    driver.wait_for_xpath('//li[contains(text(), "bayestar.fits.gz")]')
    driver.click_xpath('//li[contains(text(), "bayestar.fits.gz")]')

    submit_button_xpath = (
        '//div[@data-testid="gcnsource-selection-form"]//button[@type="submit"]'
    )
    driver.wait_for_xpath(submit_button_xpath)
    driver.click_xpath(submit_button_xpath)

    # check that the executed observation table appears
    driver.wait_for_xpath('//*[text()="84434604"]')
    driver.wait_for_xpath('//*[text()="ztfr"]')
    driver.wait_for_xpath('//*[text()="1.57415"]')
    driver.wait_for_xpath('//*[text()="20.40705"]')
예제 #8
0
def test_token_user_post_get_instrument(super_admin_token):
    name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'telescope',
        data={
            'name': name,
            'nickname': name,
            'lat': 0.0,
            'lon': 0.0,
            'elevation': 0.0,
            'diameter': 10.0,
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    telescope_id = data['data']['id']

    fielddatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Fields.csv'
    regionsdatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Region.reg'

    instrument_name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'instrument',
        data={
            'name': instrument_name,
            'type': 'imager',
            'band': 'NIR',
            'filters': ['f110w'],
            'telescope_id': telescope_id,
            'field_data': pd.read_csv(fielddatafile)[:5].to_dict(orient='list'),
            'field_region': Regions.read(regionsdatafile).serialize(format='ds9'),
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    instrument_id = data['data']['id']

    params = {'includeGeoJSON': True}

    # wait for the fields to populate
    nretries = 0
    fields_loaded = False
    while not fields_loaded and nretries < 5:
        try:
            status, data = api(
                'GET',
                f'instrument/{instrument_id}',
                params=params,
                token=super_admin_token,
            )
            assert status == 200
            assert data['status'] == 'success'
            assert data['data']['band'] == 'NIR'
            assert len(data['data']['fields']) == 5
            fields_loaded = True
        except AssertionError:
            nretries = nretries + 1
            time.sleep(3)

    params = {'includeGeoJSON': True}

    instrument_id = data['data']['id']
    status, data = api(
        'GET', f'instrument/{instrument_id}', params=params, token=super_admin_token
    )
    assert status == 200
    assert data['status'] == 'success'
    assert data['data']['band'] == 'NIR'

    assert len(data['data']['fields']) == 5

    assert any(
        [
            d['field_id'] == 1
            and d['contour']['features'][0]['geometry']['coordinates'][0][0]
            == [110.84791974982103, -87.01522999646508]
            for d in data['data']['fields']
        ]
    )

    params = {'includeGeoJSONSummary': True}

    instrument_id = data['data']['id']
    status, data = api(
        'GET', f'instrument/{instrument_id}', params=params, token=super_admin_token
    )
    assert status == 200
    assert data['status'] == 'success'
    assert data['data']['band'] == 'NIR'

    assert len(data['data']['fields']) == 5

    assert any(
        [
            d['field_id'] == 1
            and d['contour_summary']['features'][0]['geometry']['coordinates'][0]
            == [1.0238351746164418, -89.93777511600825]
            for d in data['data']['fields']
        ]
    )
예제 #9
0
 def setup_class(self):
     self.region_file = get_pkg_data_filename(
         'data/ds9.fits.reg', package='regions.io.ds9.tests')
     self.arr = np.ones((1024, 1024))
     self.raw_regions = Regions.read(self.region_file, format='ds9')
예제 #10
0
            photflam = float(hst_hdul[1].header["PHOTFLAM"])
        photflam = photflam * u.erg / u.AA / u.s / u.cm**2
        print("PHOTFLAM keyword value: %.2E %s" % (photflam.value, photflam.unit))
        zero_point = float(hst_hdul[1].header["PHOTZPT"])


        image_data = hst_hdul[1].data
        hst_wcs = wcs.WCS(hst_hdul[1].header)
        source_aperture = hst_ut.region_to_aperture(source_reg, hst_wcs)
        mask = image_data < 0
        phot_source = aperture_photometry(image_data, source_aperture, error=np.sqrt(image_data * exp_time) / exp_time, wcs=hst_wcs, mask=mask)
        source_area  = source_aperture.area
        aperture_keyword = "corrected_aperture_sum(%s)" % units

        if args.exclude is not None:
            for exclude_reg in Regions.read(args.exclude, format="ds9"):
                exclude_aperture = hst_ut.region_to_aperture(exclude_reg, hst_wcs)
                phot_exclude = aperture_photometry(image_data, exclude_aperture, wcs=hst_wcs, error=np.sqrt(image_data * exp_time) / exp_time, mask=mask)
                source_area  -= exclude_aperture.area
                phot_source["aperture_sum_err"] = np.sqrt(phot_exclude["aperture_sum_err"] ** 2 + phot_source["aperture_sum_err"] ** 2)
                phot_source["aperture_sum"] -= phot_exclude["aperture_sum"]

        # if a background region was given
        if len(regions) > 1:
            bkg_reg = regions[1]
            bkg_aperture = hst_ut.region_to_aperture(bkg_reg, hst_wcs)
            phot_bkg = aperture_photometry(image_data, bkg_aperture, wcs=hst_wcs, error=np.sqrt(image_data * exp_time) / exp_time, mask=mask)
            phot_source[aperture_keyword] = (phot_source["aperture_sum"] - phot_bkg["aperture_sum"] / bkg_aperture.area * source_area) / args.aperture_correction
            phot_source["corrected_aperture_err"] = sqrt(phot_source["aperture_sum_err"] ** 2 + (phot_bkg["aperture_sum_err"] / bkg_aperture.area * source_area) ** 2) / args.aperture_correction

        else:
예제 #11
0
def test_upload_observations(driver, super_admin_user, super_admin_token):

    telescope_name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'telescope',
        data={
            'name': telescope_name,
            'nickname': telescope_name,
            'lat': 0.0,
            'lon': 0.0,
            'elevation': 0.0,
            'diameter': 10.0,
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    telescope_id = data['data']['id']

    fielddatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Fields.csv'
    regionsdatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Region.reg'

    instrument_name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'instrument',
        data={
            'name': instrument_name,
            'type': 'imager',
            'band': 'Optical',
            'filters': ['ztfr'],
            'telescope_id': telescope_id,
            'field_data':
            pd.read_csv(fielddatafile)[:5].to_dict(orient='list'),
            'field_region':
            Regions.read(regionsdatafile).serialize(format='ds9'),
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'

    # wait for the fields to populate
    time.sleep(15)

    driver.get(f"/become_user/{super_admin_user.id}")
    driver.get("/observations/")

    filename = "sample_observation_data_upload_malformed.csv"

    attachment_file = driver.wait_for_xpath('//input[@type="file"]')
    attachment_file.send_keys(
        os.path.join(
            os.path.dirname(os.path.dirname(__file__)),
            'data',
            filename,
        ), )

    driver.wait_for_xpath(f'//*[contains(., "{filename}")]')
    submit_button_xpath = '//button[contains(.,"Submit")]'
    driver.click_xpath(submit_button_xpath, scroll_parent=True)

    filename = "sample_observation_data_upload.csv"

    attachment_file = driver.wait_for_xpath('//input[@type="file"]')
    attachment_file.send_keys(
        os.path.join(
            os.path.dirname(os.path.dirname(__file__)),
            'data',
            filename,
        ), )

    driver.wait_for_xpath(f'//*[contains(., "{filename}")]')
    submit_button_xpath = '//button[contains(.,"Submit")]'
    driver.click_xpath(submit_button_xpath, scroll_parent=True)

    search_button_xpath = '//button[@data-testid="Search-iconButton"]'
    driver.click_xpath(search_button_xpath, scroll_parent=True)
    search_bar = driver.wait_for_xpath('//input[@aria-label="Search"]')
    search_bar.send_keys('84434604')
    driver.wait_for_xpath('//*[text()="84434604"]', timeout=10)
    search_bar.clear()

    driver.refresh()

    # Click somewhere outside to remove focus from search bar
    header = driver.wait_for_xpath("//header")
    ActionChains(driver).move_to_element(header).click().perform()

    filename = "sample_observation_data_upload_noseeing.csv"

    attachment_file = driver.wait_for_xpath('//input[@type="file"]')
    attachment_file.send_keys(
        os.path.join(
            os.path.dirname(os.path.dirname(__file__)),
            'data',
            filename,
        ), )

    driver.wait_for_xpath(f'//*[contains(., "{filename}")]')
    submit_button_xpath = '//button[contains(.,"Submit")]'
    driver.click_xpath(submit_button_xpath, scroll_parent=True)

    search_button_xpath = '//button[@data-testid="Search-iconButton"]'
    driver.click_xpath(search_button_xpath, scroll_parent=True)
    search_bar = driver.wait_for_xpath('//input[@aria-label="Search"]')
    search_bar.send_keys('94434604')
    search_bar.clear()
def test_default_observation_plan_tiling(user, super_admin_token,
                                         public_group):

    name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'telescope',
        data={
            'name': name,
            'nickname': name,
            'lat': 0.0,
            'lon': 0.0,
            'elevation': 0.0,
            'diameter': 10.0,
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    telescope_id = data['data']['id']

    fielddatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Fields.csv'
    regionsdatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Region.reg'

    instrument_name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'instrument',
        data={
            'name': instrument_name,
            'type': 'imager',
            'band': 'Optical',
            'filters': ['ztfr'],
            'telescope_id': telescope_id,
            'api_classname': 'ZTFAPI',
            'api_classname_obsplan': 'ZTFMMAAPI',
            'field_data':
            pd.read_csv(fielddatafile)[:5].to_dict(orient='list'),
            'field_region':
            Regions.read(regionsdatafile).serialize(format='ds9'),
            'sensitivity_data': {
                'ztfr': {
                    'limiting_magnitude': 20.3,
                    'magsys': 'ab',
                    'exposure_time': 30,
                    'zeropoint': 26.3,
                }
            },
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    instrument_id = data['data']['id']

    request_data = {
        'group_id': public_group.id,
        'instrument_id': instrument_id,
        'pi': 'Shri Kulkarni',
        'hours_allocated': 200,
        'start_date': '3021-02-27T00:00:00',
        'end_date': '3021-07-20T00:00:00',
        'proposal_id': 'COO-2020A-P01',
    }

    status, data = api('POST',
                       'allocation',
                       data=request_data,
                       token=super_admin_token)
    assert status == 200
    assert data['status'] == 'success'
    allocation_id = data['data']['id']

    request_data = {
        'allocation_id': allocation_id,
        'payload': {
            'filter_strategy': 'block',
            'schedule_strategy': 'tiling',
            'schedule_type': 'greedy_slew',
            'exposure_time': 300,
            'filters': 'ztfr',
            'maximum_airmass': 2.0,
            'integrated_probability': 100,
            'minimum_time_difference': 30,
            'program_id': 'Partnership',
            'subprogram_name': 'GRB',
        },
    }

    status, data = api('POST',
                       'default_observation_plan',
                       data=request_data,
                       token=super_admin_token)
    assert status == 200
    assert data['status'] == 'success'
    id = data['data']['id']

    status, data = api(
        'GET',
        f'default_observation_plan/{id}',
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    assert data["data"]["allocation_id"] == allocation_id

    status, data = api(
        'DELETE',
        f'default_observation_plan/{id}',
        token=super_admin_token,
    )
    assert status == 200
예제 #13
0
    def __read_regions_file(self, fn, padding_factor, max_size,
                            min_nfacet_per_axis):
        """ Reads a ds9 region file and sets up a grid of sub clusters (aka. facets) for each hull read out of
            the file. Each of the sub clusters are padded to a padding factor as determined by
            DDFacet's EstimateNpix. The number of sub clusters is determined from the maximum
            size of such sub cluster.

            If the fn is None it is assumed that the user intends to split the full sky up into a single
            direction with each sub cluster contributing to this single direction (i.e. regular
            tesselation of the sky into square facets).

            Subclusters are guaranteed to be odd sized and square

            return dictionary of directions, each entry containing a list of sub regions
        """
        clusters = []
        if fn is not None:  # dde case
            shapes = Regions.read(fn)
            if not all([type(reg) is PolygonPixelRegion for reg in shapes]):
                raise RuntimeError(
                    "Currently only supports regions of type 'polygon' with 'physical' (pixel) coordinates as input regions"
                )
            for regi, reg in enumerate(shapes):
                coords = np.array(
                    list(
                        zip(map(int, reg.vertices.x), map(int,
                                                          reg.vertices.y))))
                clusters.append(
                    BoundingConvexHull(coords,
                                       name="DDE_REG{0:d}".format(regi + 1)))
        else:  # die case
            clusters = [
                BoundingBox(0,
                            self.__nx - 1,
                            0,
                            self.__ny - 1,
                            name="die",
                            check_mask_outofbounds=True)
            ]
        log(2).print(
            "\tInitialized bounding boxes for regions. There are {0:d} region(s)"
            .format(len(clusters)))
        # now create axis aligned bounding boxes for each of the regions
        # and further split them to the maximum permitted facet size
        clusters = [
            BoundingBoxFactory.AxisAlignedBoundingBox(
                c, check_mask_outofbounds=False) for c in clusters
        ]

        def __split_regular_region(reg, max_size):
            if max_size < 0:
                raise ValueError("Expected positive value for min_size")
            reg_size_deg = np.max(
                np.array(reg.box_npx) * self.pixel_scale / 3600.0)
            nsplit = max(
                1,
                max(min_nfacet_per_axis,
                    int(np.ceil(reg_size_deg / max_size))))
            return BoundingBoxFactory.SplitBox(reg,
                                               nsubboxes=nsplit,
                                               check_mask_outofbounds=True)

        log(2).print(
            "\tSplitting regions into facetted regions, with maximum unpadded size of {0:.2f} degrees per facet"
            .format(max_size))
        clusters = [
            aasubreg for aareg in map(
                lambda reg: __split_regular_region(reg, max_size), clusters)
            for aasubreg in aareg
        ]
        clusters = list(
            map(
                lambda reg: BoundingBoxFactory.AxisAlignedBoundingBox(
                    reg, square=True, check_mask_outofbounds=False), clusters))

        def __pad_cluster(c, padding_factor):
            npx, _ = c.box_npx  # square facet at this point
            # this returns an odd npix:
            npixunpadded, npixpadded = EstimateNpix(npx,
                                                    Padding=padding_factor)
            return BoundingBoxFactory.PadBox(c,
                                             npixpadded,
                                             npixpadded,
                                             check_mask_outofbounds=False)

        log(2).print(
            "\tPadding all facets by a minimum factor of {0:.2f}x".format(
                padding_factor))
        clusters = list(
            map(lambda c: __pad_cluster(c, padding_factor), clusters))
        log.debug("\tNormalizing regional weights")
        BoundingConvexHull.normalize_masks(clusters)
        log(2).print("\tCaching regional weight maps for future predicts")
        list(map(lambda x: x.mask, clusters))  # cache mask
        dirs = {}
        for c in clusters:
            dirs[c.name] = dirs.get(c.name, []) + [c]
        return dirs
예제 #14
0
def test_observation_plan_galaxy(
    user, super_admin_token, upload_data_token, view_only_token, public_group
):

    datafile = f'{os.path.dirname(__file__)}/../data/GW190425_initial.xml'
    with open(datafile, 'rb') as fid:
        payload = fid.read()
    data = {'xml': payload}

    status, data = api('POST', 'gcn_event', data=data, token=super_admin_token)
    assert status == 200
    assert data['status'] == 'success'
    gcnevent_id = data['data']['gcnevent_id']

    dateobs = "2019-04-25 08:18:05"
    skymap = "bayestar.fits.gz"
    status, data = api(
        'GET',
        f'localization/{dateobs}/name/{skymap}',
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    localization_id = data['data']['id']

    name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'telescope',
        data={
            'name': name,
            'nickname': name,
            'lat': 0.0,
            'lon': 0.0,
            'elevation': 0.0,
            'diameter': 10.0,
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    telescope_id = data['data']['id']

    fielddatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Fields.csv'
    regionsdatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Region.reg'

    instrument_name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'instrument',
        data={
            'name': instrument_name,
            'type': 'imager',
            'band': 'Optical',
            'filters': ['ztfr'],
            'telescope_id': telescope_id,
            'api_classname': 'ZTFAPI',
            'api_classname_obsplan': 'ZTFMMAAPI',
            'field_data': pd.read_csv(fielddatafile)[:5].to_dict(orient='list'),
            'field_region': Regions.read(regionsdatafile).serialize(format='ds9'),
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    instrument_id = data['data']['id']

    # wait for the fields to populate
    nretries = 0
    fields_loaded = False
    while not fields_loaded and nretries < 5:
        try:
            status, data = api(
                'GET',
                f'instrument/{instrument_id}',
                token=super_admin_token,
            )
            assert status == 200
            assert data['status'] == 'success'
            assert data['data']['band'] == 'NIR'

            assert len(data['data']['fields']) == 5
            fields_loaded = True
        except AssertionError:
            nretries = nretries + 1
            time.sleep(3)

    catalog_name = str(uuid.uuid4())
    datafile = f'{os.path.dirname(__file__)}/../../../data/CLU_mini.hdf5'
    data = {
        'catalog_name': catalog_name,
        'catalog_data': Table.read(datafile).to_pandas().to_dict(orient='list'),
    }

    status, data = api('POST', 'galaxy_catalog', data=data, token=super_admin_token)
    assert status == 200
    assert data['status'] == 'success'

    params = {'catalog_name': catalog_name}

    nretries = 0
    galaxies_loaded = False
    while not galaxies_loaded and nretries < 5:
        try:
            status, data = api(
                'GET', 'galaxy_catalog', token=view_only_token, params=params
            )
            assert status == 200
            data = data["data"]["sources"]
            assert len(data) == 10
            assert any(
                [
                    d['name'] == '6dFgs gJ0001313-055904'
                    and d['mstar'] == 336.60756522868667
                    for d in data
                ]
            )
            galaxies_loaded = True
        except AssertionError:
            nretries = nretries + 1
            time.sleep(5)

    request_data = {
        'group_id': public_group.id,
        'instrument_id': instrument_id,
        'pi': 'Shri Kulkarni',
        'hours_allocated': 200,
        'start_date': '3021-02-27T00:00:00',
        'end_date': '3021-07-20T00:00:00',
        'proposal_id': 'COO-2020A-P01',
    }

    status, data = api('POST', 'allocation', data=request_data, token=super_admin_token)
    assert status == 200
    assert data['status'] == 'success'
    allocation_id = data['data']['id']

    request_data = {
        'allocation_id': allocation_id,
        'gcnevent_id': gcnevent_id,
        'localization_id': localization_id,
        'payload': {
            'start_date': '2019-04-25 01:01:01',
            'end_date': '2019-04-27 01:01:01',
            'filter_strategy': 'block',
            'schedule_strategy': 'galaxy',
            'galaxy_catalog': catalog_name,
            'schedule_type': 'greedy_slew',
            'exposure_time': '300',
            'filters': 'ztfg',
            'maximum_airmass': 2.0,
            'integrated_probability': 100,
            'minimum_time_difference': 30,
            'queue_name': 'ToO_Fake',
            'program_id': 'Partnership',
            'subprogram_name': 'GRB',
        },
    }

    status, data = api(
        'POST', 'observation_plan', data=request_data, token=super_admin_token
    )
    assert status == 200
    assert data['status'] == 'success'
    id = data['data']['id']

    # wait for the observation plan to populate
    nretries = 0
    observation_plan_loaded = False
    while not observation_plan_loaded and nretries < 5:
        try:
            status, data = api(
                'GET',
                f'observation_plan/{id}',
                params={"includePlannedObservations": "true"},
                token=super_admin_token,
            )

            assert status == 200
            assert data['status'] == 'success'

            assert data["data"]["gcnevent_id"] == gcnevent_id
            assert data["data"]["allocation_id"] == allocation_id
            assert data["data"]["payload"] == request_data["payload"]

            assert len(data["data"]["observation_plans"]) == 1
            observation_plan = data["data"]["observation_plans"][0]

            assert (
                observation_plan['plan_name'] == request_data["payload"]['queue_name']
            )
            assert observation_plan['validity_window_start'] == request_data["payload"][
                'start_date'
            ].replace(" ", "T")
            assert observation_plan['validity_window_end'] == request_data["payload"][
                'end_date'
            ].replace(" ", "T")

            planned_observations = observation_plan['planned_observations']
            print(planned_observations)
            assert len(planned_observations) > 0

            observation_plan_loaded = True

        except AssertionError:
            nretries = nretries + 1
            time.sleep(10)

    assert len(planned_observations) == 6
    assert all(
        [
            obs['filt'] == request_data["payload"]['filters']
            for obs in planned_observations
        ]
    )
    assert all(
        [
            obs['exposure_time'] == int(request_data["payload"]['exposure_time'])
            for obs in planned_observations
        ]
    )
예제 #15
0
def test_observation_isot(super_admin_token, view_only_token):

    telescope_name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'telescope',
        data={
            'name': telescope_name,
            'nickname': telescope_name,
            'lat': 0.0,
            'lon': 0.0,
            'elevation': 0.0,
            'diameter': 10.0,
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    telescope_id = data['data']['id']

    fielddatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Fields.csv'
    regionsdatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Region.reg'

    instrument_name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'instrument',
        data={
            'name': instrument_name,
            'type': 'imager',
            'band': 'Optical',
            'filters': ['ztfr'],
            'telescope_id': telescope_id,
            'field_data': pd.read_csv(fielddatafile)[:5].to_dict(orient='list'),
            'field_region': Regions.read(regionsdatafile).serialize(format='ds9'),
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    instrument_id = data['data']['id']

    # wait for the fields to populate
    nretries = 0
    fields_loaded = False
    while not fields_loaded and nretries < 5:
        try:
            status, data = api(
                'GET', f'instrument/{instrument_id}', token=super_admin_token
            )
            assert status == 200
            assert data['status'] == 'success'
            assert data['data']['band'] == 'NIR'

            assert len(data['data']['fields']) == 5
            fields_loaded = True
        except AssertionError:
            nretries = nretries + 1
            time.sleep(3)

    datafile = (
        f'{os.path.dirname(__file__)}/../../../data/sample_observation_data_isot.csv'
    )
    data = {
        'telescopeName': telescope_name,
        'instrumentName': instrument_name,
        'observationData': pd.read_csv(datafile).to_dict(orient='list'),
    }

    status, data = api('POST', 'observation', data=data, token=super_admin_token)

    assert status == 200
    assert data['status'] == 'success'

    params = {
        'startDate': "2019-04-25 08:18:05",
        'endDate': "2019-04-28 08:18:05",
    }

    # wait for the executed observations to populate
    nretries = 0
    observations_loaded = False
    while not observations_loaded and nretries < 5:
        try:
            status, data = api(
                'GET', 'observation', params=params, token=super_admin_token
            )
            assert status == 200
            data = data["data"]
            assert len(data) == 10
            observations_loaded = True
        except AssertionError:
            nretries = nretries + 1
            time.sleep(3)

    assert any(
        [
            d['obstime'] == '2019-04-25T08:18:18' and d['observation_id'] == 94434604
            for d in data['observations']
        ]
    )
예제 #16
0
def test_observation(super_admin_token, view_only_token):

    datafile = f'{os.path.dirname(__file__)}/../data/GW190425_initial.xml'
    with open(datafile, 'rb') as fid:
        payload = fid.read()
    data = {'xml': payload}

    status, data = api('POST', 'gcn_event', data=data, token=super_admin_token)
    assert status == 200
    assert data['status'] == 'success'

    telescope_name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'telescope',
        data={
            'name': telescope_name,
            'nickname': telescope_name,
            'lat': 0.0,
            'lon': 0.0,
            'elevation': 0.0,
            'diameter': 10.0,
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'
    telescope_id = data['data']['id']

    fielddatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Fields.csv'
    regionsdatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Region.reg'

    instrument_name = str(uuid.uuid4())
    status, data = api(
        'POST',
        'instrument',
        data={
            'name': instrument_name,
            'type': 'imager',
            'band': 'Optical',
            'filters': ['ztfr'],
            'telescope_id': telescope_id,
            'field_data': pd.read_csv(fielddatafile)[:5].to_dict(orient='list'),
            'field_region': Regions.read(regionsdatafile).serialize(format='ds9'),
        },
        token=super_admin_token,
    )
    assert status == 200
    assert data['status'] == 'success'

    # wait for the fields to populate
    time.sleep(15)

    datafile = f'{os.path.dirname(__file__)}/../../../data/sample_observation_data.csv'
    data = {
        'telescopeName': telescope_name,
        'instrumentName': instrument_name,
        'observationData': pd.read_csv(datafile).to_dict(orient='list'),
    }

    status, data = api('POST', 'observation', data=data, token=super_admin_token)

    assert status == 200
    assert data['status'] == 'success'

    # wait for the executed observations to populate
    time.sleep(15)

    data = {
        'telescopeName': telescope_name,
        'instrumentName': instrument_name,
        'startDate': "2019-04-25 08:18:05",
        'endDate': "2019-04-28 08:18:05",
        'localizationDateobs': "2019-04-25T08:18:05",
        'localizationName': "bayestar.fits.gz",
        'localizationCumprob': 1.01,
        'returnStatistics': True,
    }

    status, data = api('GET', 'observation', params=data, token=super_admin_token)
    assert status == 200
    data = data["data"]
    assert len(data['observations']) == 10
    assert np.isclose(data['probability'], 2.927898964006069e-05)
    assert any(
        [
            d['obstime'] == '2019-04-25T08:18:18.002909'
            and d['observation_id'] == 84434604
            for d in data['observations']
        ]
    )

    for d in data['observations']:
        if d['observation_id'] == 84434604:
            observation_id = d['id']
            break

    status, data = api(
        'DELETE', f'observation/{observation_id}', token=super_admin_token
    )
    assert status == 200

    data = {
        'telescopeName': telescope_name,
        'instrumentName': instrument_name,
        'startDate': "2019-04-25 08:18:05",
        'endDate': "2019-04-28 08:18:05",
        'localizationDateobs': "2019-04-25T08:18:05",
        'localizationName': "bayestar.fits.gz",
        'localizationCumprob': 1.01,
        'returnStatistics': True,
    }

    status, data = api('GET', 'observation', params=data, token=super_admin_token)
    assert status == 200
    data = data["data"]

    assert len(data['observations']) == 9
    assert not any(
        [
            d['obstime'] == '2019-04-25T08:18:18.002909'
            and d['observation_id'] == 84434604
            for d in data['observations']
        ]
    )