def test_download_files(): def _requests_mock(method, url, **kwargs): response = Mock() response.headers = { 'Content-Disposition': 'attachment; ' 'filename={}'.format(url.split('/')[-1])} return response def _download_file_mock(url, file_name, **kwargs): return file_name alma = Alma() alma._request = Mock(side_effect=_requests_mock) alma._download_file = Mock(side_effect=_download_file_mock) downloaded_files = alma.download_files(['https://location/file1']) assert len(downloaded_files) == 1 assert downloaded_files[0].endswith('file1') alma._request.reset_mock() alma._download_file.reset_mock() downloaded_files = alma.download_files(['https://location/file1', 'https://location/file2']) assert len(downloaded_files) == 2 # error cases alma._request = Mock() # no Content-Disposition results in no downloaded file alma._request.return_value = Mock(headers={}) result = alma.download_files(['https://location/file1']) assert not result
def test_sia(): sia_mock = Mock() empty_result = Table.read(os.path.join(DATA_DIR, 'alma-empty.txt'), format='ascii') sia_mock.search.return_value = Mock(table=empty_result) alma = Alma() alma._get_dataarchive_url = Mock() alma._sia = sia_mock result = alma.query_sia(pos='CIRCLE 1 2 1', calib_level=[0, 1], data_rights='Public', band=(300, 400), time=545454, maxrec=10, pol=['XX', 'YY'], instrument='JAO', collection='ALMA', field_of_view=0.0123130, data_type='cube', target_name='J0423-013', publisher_did='ADS/JAO.ALMA#2013.1.00546.S', exptime=25) assert len(result.table) == 0 assert_called_with(sia_mock.search, calib_level=[0, 1], band=(300, 400), data_type='cube', pos='CIRCLE 1 2 1', time=545454, maxrec=10, pol=['XX', 'YY'], instrument='JAO', collection='ALMA', data_rights='Public', field_of_view=0.0123130, target_name='J0423-013', publisher_did='ADS/JAO.ALMA#2013.1.00546.S', exptime=25)
def _get_raw_artifacts(member_ous): logger.info("Staging artifacts for member_ous {}".format(member_ous)) files = Alma().stage_data(member_ous) file_urls = list(set(files['URL'])) logger.debug('\n'.join(file_urls)) results = [] for f in file_urls: if '.asdm.' in f: results.append(f) elif re.match(r'.*[0-9]{3,4}_of_[0-9]{3,4}\.tar$', f): results.append(f) return results
def test_tap(): tap_mock = Mock() empty_result = Table.read(os.path.join(DATA_DIR, 'alma-empty.txt'), format='ascii') tap_mock.search.return_value = Mock(table=empty_result) alma = Alma() alma._get_dataarchive_url = Mock() alma._tap = tap_mock result = alma.query_tap('select * from ivoa.ObsCore') assert len(result.table) == 0 tap_mock.search.assert_called_once_with('select * from ivoa.ObsCore', language='ADQL')
def test_get_data_info(): datalink_mock = Mock() dl_result = Table.read(data_path('alma-datalink.xml'), format='votable') mock_response = Mock(to_table=Mock(return_value=dl_result)) mock_response.status = ['OK'] datalink_mock.run_sync.return_value = mock_response alma = Alma() alma._get_dataarchive_url = Mock() alma._datalink = datalink_mock result = alma.get_data_info(uids='uid://A001/X12a3/Xe9') assert len(result) == 7 datalink_mock.run_sync.assert_called_once_with('uid://A001/X12a3/Xe9')
def test_galactic_query(): """ regression test for 1867 """ tap_mock = Mock() empty_result = Table.read(os.path.join(DATA_DIR, 'alma-empty.txt'), format='ascii') mock_result = Mock() mock_result.to_table.return_value = empty_result tap_mock.search.return_value = mock_result alma = Alma() alma._get_dataarchive_url = Mock() alma._tap = tap_mock result = alma.query_region(SkyCoord(0*u.deg, 0*u.deg, frame='galactic'), radius=1*u.deg, get_query_payload=True) assert result['ra_dec'] == SkyCoord(0*u.deg, 0*u.deg, frame='galactic').icrs.to_string() + ", 1.0"
def get_observation(id): """ Return the observation corresponding to the id. It is the ALMA implementation of the get_observation function expected by the base proxy alma docker image. :param id: id of the observation :return: observation corresponding to the id or None if such such observation does not exist """ member_ouss_id = _to_member_ouss_id(id) # alternative ALMA mirror # Alma.archive_url = 'http://almascience.eso.org' results = Alma().query({'member_ous_id': member_ouss_id}, science=False, cache=False, format='ascii') if not results: logger.debug('No observation found for ID : {}'.format(member_ouss_id)) return None return member2observation(member_ouss_id, results)
parser.add_argument("project_id") parser.add_argument('--verbose', action='store_true') parser.add_argument('--debug', action='store_true') args = parser.parse_args() log_level = logging.ERROR if args.verbose: log_level = logging.INFO elif args.debug: log_level = logging.DEBUG logging.basicConfig(level=log_level) md = pickle.load(open("{}_md.pk".format(args.project_id))) db_table = Alma().query(payload={'project_code': args.project_id}) overrides={} overrides['pi_name'] = db_table['PI name'][0] overrides['project_title'] = db_table['Project title'][0] overrides['casa_version'] = "4.7.2" overrides['casa_run_date'] = "2018-05-17T00:00:00" overrides['release_date'] = db_table['Release date'][0] overrides['project_id'] = args.project_id overrides['keywords'] = db_table['Science keyword'][0] for artifact in md.keys(): overrides['observation_id'] = artifact.rstrip('.ms.split.cal') overrides['target_name'] = md[artifact]['field'] overrides['ra'] = md[artifact]['ra']
>>> import pip >>> pip.main(['install', 'keyrings.alt', '--user']) and try again. """ import numpy as np from astroquery.alma import Alma import os import six import runpy import tarfile from taskinit import casalog alma = Alma() alma.cache_location = Alma.cache_location = '.' username = six.moves.input("Username: "******"""
print(array_goals[i] + ", z=" + str(result_table["Redshift"][0])) target = Alma.query_object(array_goals[i]) spws = target['Frequency support'].tolist() uids = target['Member ous id'].tolist() loop2 = len(spws) for j in range(loop2): print(uids[j]) for k in range(len(spws[j].split(" U "))): freq_cover = spws[j].split(" U ")[k].split(",")[0] edge_low = float(freq_cover.split("..")[0].replace("[", "")) edge_high = float(freq_cover.split("..")[1].replace("GHz", "")) redshift_plus_1 = 1 + result_table["Redshift"][0] obs_freq = search_freq / redshift_plus_1 if edge_low < obs_freq < edge_high: print(data_suffix + " is here!") uid_url_table = Alma.stage_data(uids[j]) myAlma = Alma() myAlma.cache_location = \ "/mnt/fhgfs/saito/data_node4/" \ + dir_data.replace("../", "") if key_download == "fits": filelist = \ myAlma.download_and_extract_files(uid_url_table['URL']) list_donwload.append(array_goals[i]) elif key_download == "txt": list_donwload.append(array_goals[i]) np.savetxt(dir_data + list_donwload_txt, np.array(list(set(list_donwload))), fmt="%s")
from astroquery.alma import Alma a = Alma() a.login('keflavich') # Find my project result = a.query_object('W51', payload=dict(pi_name='ginsburg'), public=False) _7m = result['Spatial resolution'] > 3 _12m = result['Spatial resolution'] < 3 # unfortunately, ALMA doesn't support querying for MOUSes uid_url_table = a.stage_data(result['Member ous id']) filelist = a.download_and_extract_files(uid_url_table['URL'], include_asdm=True, regex='.*', delete=True, verbose=True)
def test_query(): # Tests the query and return values tap_mock = Mock() empty_result = Table.read(os.path.join(DATA_DIR, 'alma-empty.txt'), format='ascii') mock_result = Mock() mock_result.to_table.return_value = empty_result tap_mock.search.return_value = mock_result alma = Alma() alma._get_dataarchive_url = Mock() alma._tap = tap_mock result = alma.query_region(SkyCoord(1 * u.deg, 2 * u.deg, frame='icrs'), radius=1 * u.deg) assert len(result) == 0 assert 'proposal_id' in result.columns tap_mock.search.assert_called_once_with( "select * from ivoa.obscore WHERE " "(INTERSECTS(CIRCLE('ICRS',1.0,2.0,1.0), s_region) = 1) " "AND calib_level>1 AND data_rights='Public'", language='ADQL') # one row result tap_mock = Mock() onerow_result = Table.read(os.path.join(DATA_DIR, 'alma-onerow.txt'), format='ascii') mock_result = Mock() mock_result.to_table.return_value = onerow_result tap_mock.search.return_value = mock_result alma = Alma() alma._tap = tap_mock with patch('astroquery.alma.tapsql.coord.SkyCoord.from_name') as name_mock: name_mock.return_value = SkyCoord(1, 2, unit='deg') result = alma.query_object('M83', public=False, band_list=[3]) assert len(result) == 1 tap_mock.search.assert_called_once_with( "select * from ivoa.obscore WHERE " "(INTERSECTS(CIRCLE('ICRS',1.0,2.0,0.16666666666666666), s_region) = 1) " "AND band_list LIKE '%3%' AND calib_level>1 AND data_rights='Proprietary'", band_list=[3], language='ADQL') # repeat for legacy columns mock_result = Mock() tap_mock = Mock() mock_result.to_table.return_value = onerow_result tap_mock.search.return_value = mock_result alma = Alma() alma._tap = tap_mock with patch('astroquery.alma.tapsql.coord.SkyCoord.from_name') as name_mock: name_mock.return_value = SkyCoord(1, 2, unit='deg') result_legacy = alma.query_object('M83', public=False, legacy_columns=True, band_list=[3]) assert len(result) == 1 assert 'Project code' in result_legacy.columns tap_mock.search.assert_called_once_with( "select * from ivoa.obscore WHERE " "(INTERSECTS(CIRCLE('ICRS',1.0,2.0,0.16666666666666666), s_region) = 1) " "AND band_list LIKE '%3%' AND calib_level>1 AND data_rights='Proprietary'", band_list=[3], language='ADQL') row_legacy = result_legacy[0] row = result[0] for item in _OBSCORE_TO_ALMARESULT.items(): if item[0] == 't_min': assert Time(row[item[0]], format='mjd').strftime('%d-%m-%Y') ==\ row_legacy[item[1]] else: assert row[item[0]] == row_legacy[item[1]] # query with different arguments tap_mock = Mock() empty_result = Table.read(os.path.join(DATA_DIR, 'alma-empty.txt'), format='ascii') mock_result = Mock() mock_result.to_table.return_value = empty_result tap_mock.search.return_value = mock_result alma = Alma() alma._get_dataarchive_url = Mock() alma._tap = tap_mock result = alma.query_region('1 2', radius=1 * u.deg, payload={'frequency': '22'}, public=None, band_list='1 3', science=False, start_date='01-01-2010', polarisation_type='Dual', fov=0.0123130, integration_time=25) assert len(result) == 0 tap_mock.search.assert_called_with( "select * from ivoa.obscore WHERE frequency=22.0 AND " "(INTERSECTS(CIRCLE('ICRS',1.0,2.0,1.0), s_region) = 1) AND " "(band_list LIKE '%1%' OR band_list LIKE '%3%') AND " "t_min=55197.0 AND pol_states='/XX/YY/' AND s_fov=0.012313 AND " "t_exptime=25", band_list='1 3', fov=0.012313, integration_time=25, language='ADQL', polarisation_type='Dual', start_date='01-01-2010')
from astroquery.alma import Alma from astropy.table import Table # db_table = Alma().query(payload={'project_code': '2016.1.00010.S'}) db_table = Alma().query(payload={'project_code': '2016.1.00010.S'}, result_view='project') # db_table.write('{}/alma_query.xml'.format(TEST_DATA_DIR), # format='votable') db_table.write('{}/alma_query3.html'.format('./'), format='html', overwrite=True)
def saveMetadata(self, search='', save_link_list=True, download=False, date=None): # date is the Release Date from the project self.__create_sqlite_connection() with self._connection: cur = self._connection.cursor() # ---------------------------------- # | GETTING DATA FROM ALMA ARCHIVE | # ---------------------------------- if date: data = Alma.query_object(search) else: data = Alma.query_object(search) print('[Downloader] Number of results:', len(data)) i = 0 cols_translations = dict() # ----------------------------- # | GETTING ID FOR INSERTIONS | # ----------------------------- cur.execute( "SELECT id_projects_data AS id FROM projects_data ORDER BY id_projects_data DESC LIMIT 1;" ) result = cur.fetchone() if result: next_project_id = result[0] + 1 else: next_project_id = 1 cur.execute( "SELECT id_link FROM links_list ORDER BY id_link DESC LIMIT 1;" ) result = cur.fetchone() if result: next_link_id = result[0] + 1 else: next_link_id = 1 # --------------------------- # | GETTING RESULT COLNAMES | # --------------------------- #print('Number of colums:', len(data.columns)) for col in data.columns: new_colname = re.sub('\(.*\)', '', data[col].name).strip() new_colname = new_colname.replace(" ", "_").lower() cols_translations[data[col].name] = new_colname # --------------------------------- # | GETTING ALL PROJECTS METADATA | # --------------------------------- tmp_col_data = [] tmp_mousid = set() print("[Downloader] Saving all metadata of projects ...") for row in data: i += 1 tmp_col_data.append(next_project_id) for col in row.columns: if isinstance(row[col], np.ma.core.MaskedArray): tmp_col_data.append(np.ma.dumps(row[col])) else: tmp_col_data.append(str(row[col])) try: cur.execute( "INSERT INTO projects_data VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);", tmp_col_data) print('(ProjectData) Insertion number [' + str(i) + ']; ID=' + str(next_project_id)) next_project_id += 1 except lite.Error, e: print "Error: %s" % e.args[0] raise del tmp_col_data[:] tmp_mousid.add(row['Member ous id']) # Test with only one project-row: #if i == 1: # break # ------------------------------- # | GETTING ALL LINKS FROM UIDS | # ------------------------------- print("[Downloader] Saving all links from projects-uids ...") if save_link_list: for mous in tmp_mousid: uids = np.unique(mous) link_list = Alma.stage_data(uids) for link in link_list: try: cur.execute( "INSERT INTO links_list VALUES(?, ?, ?, ?)", [ next_link_id, link['URL'], link['uid'], link['size'] ]) print(' (Link) RowDB_ID=' + str(next_link_id)) next_link_id += 1 except lite.Error, e: print "Error: %s" % e.args[0] raise #print(link_list['size'].sum()) if download: print('[Downloader] Downloading tars ...') myAlma = Alma() make_sure_path_exists(self.download_directory) myAlma.cache_location = self.download_directory myAlma.download_files(link_list, cache=True)