Exemplo n.º 1
0
def test_invalid_name(tmp_path, rnx_sample):
    sample_path = tmp_path / 'sample'
    shutil.copy(rnx_sample, sample_path)
    with pytest.raises(ValueError) as excinfo:
        decompress_on_disk(sample_path)
    msg = excinfo.value.args[0]
    assert msg.endswith('is not a valid RINEX file name')
Exemplo n.º 2
0
def test_on_disk_invalid_input(tmp_path):
    path = tmp_path / 'sample.crx'
    path.write_bytes(b'blah' * 100)
    with pytest.raises(ValueError) as excinfo:
        decompress_on_disk(path)
    msg = excinfo.value.args[0]
    assert 'not a valid RINEX file' in msg
    assert not get_compressed_path(path).exists()
Exemplo n.º 3
0
def test_on_disk_truncated_input(tmp_path):
    in_file = 'sample.crx'
    sample_path = tmp_path / in_file
    sample_path.write_bytes(get_data_path(in_file).read_bytes()[:200])
    with pytest.raises(HatanakaException) as excinfo:
        decompress_on_disk(sample_path)
    msg = excinfo.value.args[0]
    assert 'truncated in the middle' in msg
    assert not get_compressed_path(sample_path).exists()
Exemplo n.º 4
0
def test_on_disk_empty_input(tmp_path, crx_str, rnx_bytes):
    path = tmp_path / 'sample.crx'
    path.write_bytes(b'')
    with pytest.raises(ValueError) as excinfo:
        decompress_on_disk(path)
    assert "empty file" in str(excinfo.value)
    assert not get_decompressed_path(path).exists()
    path.unlink()

    path = tmp_path / 'sample.rnx'
    path.write_bytes(b'')
    with pytest.raises(ValueError) as excinfo:
        compress_on_disk(path)
    assert "file is too short" in str(excinfo.value)
    assert not get_compressed_path(path, is_obs=True).exists()
Exemplo n.º 5
0
def rnpath(inps):

    # Let us start by parsing out all relevant user configuration inputs.
    yy       = inps['dtstart_yy'] # The starting two-digit year number
    doy      = inps['dtstart_doy'] # The starting day of year in GPST
    cwd      = Path(inps['cwd']) # Current working directory
    iwd      = cwd / 'input'
    name1    = inps['name1'] # 4-letter ID of the first spacecraft
    name2    = inps['name2'] # 4-letter ID of the second spacecraft

    # Now, we get all necessary file paths, in the input folder.
    rnx1file = iwd / (name1 + doy + '0.' + yy + 'O')
    rnx2file = iwd / (name2 + doy + '0.' + yy + 'O')
    crx1file = iwd / (name1 + doy + '0.' + yy + 'D')
    crx2file = iwd / (name2 + doy + '0.' + yy + 'D')

    # Check if there is a need for hatanaka decompression for LEO 1.
    if rnx1file.exists():
        print('Decompressed RINEX obs file observed for ' + name1 + '\n')
    elif crx1file.exists():
        print('Hatanaka compressed file observed for ' + name1)
        hatanaka.decompress_on_disk(crx1file)
        if rnx1file.exists():
            print('Hatanaka decompression successful for LEO1! \n')
        else:
            print('Decompression failed. Did you rename any folders? \n')
    else:
        print('Neither compressed nor decompressed RINEX files were found! \n')

    # Check if there is a need for hatanaka decompression for LEO 2.
    if rnx2file.exists():
        print('Decompressed RINEX obs file observed for ' + name2 + '\n')
    elif crx2file.exists():
        print('Hatanaka compressed file observed for ' + name2)
        hatanaka.decompress_on_disk(crx2file)
        if rnx2file.exists():
            print('Hatanaka decompression successful for LEO2! \n')
        else:
            print('Decompression failed. Did you rename any folders? \n')
    else:
        print('Neither compressed nor decompressed RINEX files were found! \n')    

    if rnx1file.exists() and rnx2file.exists():
        return rnx1file, rnx2file
    else:
        print('Error, somehow RINEX observation files still not found!')
        print('Did you rename any folders accidentally? \n')
        return False
Exemplo n.º 6
0
def ftp_download_files(url_base, folder_path, cacheDir, filenames, compression='', overwrite=False):
  """
  Like download file, but more of them. Keeps a persistent FTP connection open
  to be more efficient.
  """
  folder_path_abs = os.path.join(cacheDir, folder_path)

  ftp = ftp_connect(url_base + folder_path)

  filepaths = []
  for filename in filenames:
    filename_zipped = filename + compression
    filepath = str(hatanaka.get_decompressed_path(os.path.join(folder_path_abs, filename)))
    filepath_zipped = os.path.join(folder_path_abs, filename_zipped)
    print("pulling from", url_base, "to", filepath)

    if not os.path.isfile(filepath) or overwrite:
      if not os.path.exists(folder_path_abs):
        os.makedirs(folder_path_abs)
      try:
        ftp.retrbinary('RETR ' + filename_zipped, open(filepath_zipped, 'wb').write)
      except (ftplib.error_perm):
        raise IOError("Could not download file from: " + url_base + folder_path + filename_zipped)
      except (socket.timeout):
        raise IOError("Read timed out from: " + url_base + folder_path + filename_zipped)
      filepaths.append(str(hatanaka.decompress_on_disk(filepath_zipped)))
    else:
      filepaths.append(filepath)
  return filepaths
Exemplo n.º 7
0
def download_and_cache_file(url_base, folder_path, cacheDir, filename, compression='', overwrite=False):
  folder_path_abs = os.path.join(cacheDir, folder_path)
  filename_zipped = filename + compression

  filepath = str(hatanaka.get_decompressed_path(os.path.join(folder_path_abs, filename)))
  filepath_attempt = filepath + '.attempt_time'
  filepath_zipped = os.path.join(folder_path_abs, filename_zipped)

  if os.path.exists(filepath_attempt):
    with open(filepath_attempt, 'rb') as rf:
      last_attempt_time = float(rf.read().decode())
    if time.time() - last_attempt_time < SECS_IN_HR:
      raise IOError(f"Too soon to try  {folder_path + filename_zipped} from {url_base} ")

  if not os.path.isfile(filepath) or overwrite:
    if not os.path.exists(folder_path_abs):
      os.makedirs(folder_path_abs)

    try:
      data_zipped = download_file(url_base, folder_path, filename_zipped)
    except (IOError, pycurl.error, socket.timeout):
      unix_time = time.time()
      if not os.path.exists(cacheDir + 'tmp/'):
        os.makedirs(cacheDir + '/tmp')
      with tempfile.NamedTemporaryFile(delete=False, dir=cacheDir+'tmp/') as fout:
        fout.write(str.encode(str(unix_time)))
      os.replace(fout.name, filepath + '.attempt_time')
      raise IOError(f"Could not download {folder_path + filename_zipped} from {url_base} ")


    with open(filepath_zipped, 'wb') as wf:
      wf.write(data_zipped)

    filepath = str(hatanaka.decompress_on_disk(filepath_zipped))
  return filepath
Exemplo n.º 8
0
def test_decompress_on_disk_delete(tmp_path, rnx_bytes):
    # prepare
    in_file = 'sample.crx.gz'
    sample_path = tmp_path / in_file
    shutil.copy(get_data_path(in_file), sample_path)
    # decompress and delete
    out_path = decompress_on_disk(sample_path, delete=True)
    # check
    expected_path = tmp_path / 'sample.rnx'
    assert not sample_path.exists()
    assert out_path == expected_path
    assert expected_path.exists()
    assert clean(decompress(expected_path)) == clean(rnx_bytes)
    # check that already decompressed is not deleted
    out_path = decompress_on_disk(expected_path, delete=True)
    assert out_path == expected_path
    assert out_path.exists()
Exemplo n.º 9
0
def test_decompress_non_obs(tmp_path, rnx_bytes, input_suffix):
    # prepare
    txt = make_nav(rnx_bytes)
    sample_path = tmp_path / ('sample' + input_suffix + '.gz')
    sample_path.write_bytes(gzip.compress(txt))
    # decompress
    out_path = decompress_on_disk(sample_path)
    # check
    assert out_path.exists()
    assert out_path == tmp_path / ('sample' + input_suffix)
    assert clean(out_path.read_bytes()) == clean(txt)
Exemplo n.º 10
0
def test_decompress_on_disk(tmp_path, crx_sample, rnx_bytes, input_suffix,
                            expected_suffix):
    # prepare
    sample_path = tmp_path / ('sample' + input_suffix)
    in_file = 'sample' + input_suffix
    shutil.copy(get_data_path(in_file), sample_path)
    # decompress
    out_path = decompress_on_disk(sample_path)
    # check
    assert out_path.exists()
    print(list(tmp_path.glob('*')))
    assert out_path == tmp_path / ('sample' + expected_suffix)
    assert clean(out_path.read_bytes()) == clean(rnx_bytes)
Exemplo n.º 11
0
def download_corr_files(out_path, base_stn, obs, nav):
    """
    This function downloads the necessary base station and satellite files (broadcast/orbits)
    for correcting raw rinex files using rtkpost.
    """

    # file root name
    name = obs.split('.obs')[0]

    # first we need the year, doy, and GPS week from the observations file
    f = open(out_path + obs, 'r').read()
    print("Downloading correction data for {}".format(obs))
    lines = f.strip().splitlines()
    for l in lines:
        # read from RINEX header
        if l.endswith('TIME OF FIRST OBS'):
            var = l.split()
            yr = var[0]
            # use datetime and gnsscal to convert the date to doy and gps week
            date = datetime.date(int(yr), int(var[1]), int(var[2]))
            doy = str(gnsscal.date2doy(date))
            if not len(doy) == 3:  # add the leading 0 if DOY is < 3 digits
                doy = '0' + doy
            week = str(gnsscal.date2gpswd(date)[0]) + str(
                gnsscal.date2gpswd(date)[1])

    # create a new folder to hold the correction data and move the rinex files into it
    path = out_path + yr + doy + '_' + name + '/'
    if not os.path.exists(path):
        os.mkdir(path)
        shutil.move(out_path + obs, path + obs)
        shutil.move(out_path + nav, path + nav)

    # download the base station observations, broadcast navigation, and satellite orbital clocks
    # NOTE: we log into and out of the FTP server each time to avoid some hang-ups in downloading
    # also this could allow for replacement of the server to different locations for each file

    # connect to the FTP for getting the base station observations
    f = ftp.FTP('igs.ensg.ign.fr')
    try:
        f.login()
        print('logged into ftp')
    except:
        print('hmm couldn\'t connect to base station ftp. no internet?')
        sys.exit()
    # navigate to the directory
    f.cwd('/pub/igs/data/' + yr + '/' + doy + '/')
    # download the base station using the leading identifier and wildcards
    # note we use the 30 second decimated data (*30S* wildcard below), this is
    # available for most sites
    filematch = base_stn + '*30S*.crx.gz'
    for filename in f.nlst(filematch):
        target_file_name = os.path.join(path, os.path.basename(filename))
        with open(target_file_name, 'wb') as fhandle:
            f.retrbinary('RETR %s' % filename, fhandle.write)
            # quit and close server connection
            f.quit()
            f.close()
    # also decompress the file
    decompressed_path = hatanaka.decompress_on_disk(path + filename,
                                                    delete=True)
    # final filename
    baseSTN = path + filename.split('crx')[0] + yr[2:] + 'o'
    shutil.move(decompressed_path, baseSTN)

    # grab the broadcast navigation data from the same directory
    f = ftp.FTP('igs.ensg.ign.fr')
    try:
        f.login()
        print('logged into ftp')
    except:
        print('hmm couldn\'t connect to base station ftp. no internet?')
        sys.exit()
    # navigate to the directory
    f.cwd('/pub/igs/data/' + yr + '/' + doy + '/')
    # get the filematch
    filename = 'brdc' + doy + '0.' + yr[2:] + 'n.Z'
    target_file_name = os.path.join(path, os.path.basename(filename))
    with open(target_file_name, 'wb') as fhandle:
        f.retrbinary('RETR %s' % filename, fhandle.write)
        # quit and close server connection
        f.quit()
        f.close()
    # decompress the file
    decompressed_path = hatanaka.decompress_on_disk(path + filename,
                                                    delete=True)
    # final filename
    brdc = str(decompressed_path)

    # finally grab the satellite precise orbits from a different directory
    f = ftp.FTP('igs.ensg.ign.fr')
    try:
        f.login()
        print('logged into ftp')
    except:
        print('hmm couldn\'t connect to base station ftp. no internet?')
        sys.exit()
    # navigate to the directory
    f.cwd('/pub/igs/products/' + week[0:4] + '/')
    # we try with the rapid orbits, if they're available
    try:
        filename = 'igr' + week + '.sp3.Z'
        target_file_name = os.path.join(path, os.path.basename(filename))
        with open(target_file_name, 'wb') as fhandle:
            f.retrbinary('RETR %s' % filename, fhandle.write)
    # retry with the ultra-rapid orbits if that didn't work
    except:
        filename = 'igu' + week + '_18.sp3.Z'  # arbitrarily taking the final ultra-rapid file (18:00)
        target_file_name = os.path.join(path, os.path.basename(filename))
        with open(target_file_name, 'wb') as fhandle:
            f.retrbinary('RETR %s' % filename, fhandle.write)
    # decompress the file
    decompressed_path = hatanaka.decompress_on_disk(path + filename,
                                                    delete=True)
    # final filename
    orbits = str(decompressed_path)
    # quit and close server connection for good
    f.quit()
    f.close()

    return yr, doy, week, baseSTN, brdc, orbits