old_subDir = "junk"

  last_timestamp = -1

  while 1:

    sub_directory = getCurrentJulianday()

    if sub_directory != old_subDir:
      ftp_dir = path.join (opt_ftp_dir, sub_directory)
      print ftp_dir


      ftp_info = ftptools.FTP_info(opt_ftp_server, ftp_dir, opt_ftp_user, opt_ftp_passwd, opt_log)

    pmu.auto_register("getting ftp file listing.")
    got_list = 0 
    tries = 0
    while not got_list:
      tries = tries + 1
      got_list = 1
    try:

      signal.alarm(100)
      ftp_info.get_listing()
      signal.alarm(0)
    except:
      print "ftp date error: %s, %s" % (sys.exc_type, sys.exc_value)
      got_list = 0
      if tries > opt_max_num_tries:
        print "Exiting after ",  opt_max_num_tries, " tries"
  need_sleep = 0

  dot_string = '.'
  if opt_noDot:
    dot_string = ''
    
  request_string = '*' + dot_string + opt_source_ext

  if opt_debug:
    print "Request_string: %s" % request_string
    
  file_time_threshold = 0
  while 1:

    pmu.auto_register("To sleep")

    if need_sleep:
      time.sleep(opt_sleep_secs)

    need_sleep = 1

    pmu.auto_register("downloading data")
    print "+++++++++++++ starting at ", strftime("%a, %d %b %Y %H:%M:%S +0000", gmtime())

    #
    # if url specified, download .nc data files from the 
    # given url using wget.
    # otherwise, download data file (not necesarily .nc type)
    # using ftp
    #
  #
  if not opt_run_once:
    pmu.auto_init(prog_name, opt_instance, 60)

  #
  # set last_time to two hours before now to prevent unecessary searching
  # 
  last_file_time = time() - 7200
  debug_print('last_file_time = ' + asctime(gmtime(last_file_time)))
  
  while 1:

    url_info = 'http://' + opt_web_server + separator + opt_url_path
    
    if not opt_run_once:
      pmu.auto_register('getting file listing from ' + url_info)

    index_page = fetch_page(url_info)

    print 'made it here.'

    print str(index_page)
    
    file_list = parse_index_page(index_page)

    print str(file_list)
    
    #
    # reverse list -- now in descending order
    #
    filename = ''
Beispiel #4
0
def pull_file(filename):
    pmu.auto_register("Checking for new files")

    #
    # Construct the local filenames.  The temporary filename is used
    # for pulling the file down.  When the retrieval is complete, the
    # file is renamed to its original name so that it appears atomically.
    #

    temp_filename = opt_temp_dir + '/.' + filename
    local_filename = opt_output_dir + '/' + filename

    if opt_debug:
        print "local_filename = " + local_filename
        print "temp_filename = " + temp_filename

    #
    # Check to see if we already have this file.  If we do, then skip
    # it.
    #

    local_gz_filename = local_filename + ".gz"

    #
    # this is the case of the file that is gzipped on the ftp site
    # but unzipped locally
    #
    if (os.path.splitext(local_filename)[1] == ".gz"):
        if os.path.exists(os.path.splitext(local_filename)[0]):
            if opt_debug:
                print
                print "File already exists locally: " + local_filename
                print "Skipping retrieval...."
            return

    if os.path.exists(local_filename) or os.path.exists(local_gz_filename):
        if opt_debug:
            print
            print "File already exists locally: " + local_filename
            print "Skipping retrieval...."
        return

    if opt_debug:
        print "Getting file <" + filename + ">"

    #
    # Wait for the file to be quiescent
    #

    if opt_wait_for_quiescence:
        prev_file_size = 0
        file_size = ftp.size(filename)
        while file_size != prev_file_size:
            if opt_debug:
                print "Waiting for file quiescence..."
            time.sleep(opt_quiescence_secs)
            prev_file_size = file_size
            file_size = ftp.size(filename)

    #
    # Pull the data file to the destination directory.
    #

    temp_file = open(temp_filename, 'wb')

    pmu.force_register("Retrieving file " + filename)

    try:
        ftp.retrbinary('RETR ' + filename, temp_file.write)
    except (socket.error):
        print "Socket error occurred while trying to transfer file: ", filename
        print "Skipping file!"
        return

    temp_file.close()

    if (os.path.splitext(temp_filename)[1] == ".gz"):
        filename = os.path.splitext(filename)[0]
        os.system("gunzip " + temp_filename)
        temp_filename = os.path.splitext(temp_filename)[0]
        local_filename = os.path.splitext(local_filename)[0]

    try:
        shutil.copyfile(temp_filename, local_filename)
    except (OSError):
        print "Error Copying temp file to local file\n"

    if (opt_write_Ldata):
        file_stats = os.stat(local_filename)
        data_file_time = datetime.fromtimestamp(file_stats[8])
        Ldata_command = "LdataWriter -dir " + opt_output_dir + " -ext " + opt_Ldata_ext + " -dtype " + opt_Ldata_dtype + " -ltime " + data_file_time.strftime(
            "%Y%m%d%H%M%S") + " -rpath ./" + filename
        print
        print "LdataWriter command line:"
        print Ldata_command
        print
        os.system(Ldata_command)

    try:
        os.remove(temp_filename)
    except (OSError):
        print "Error removing temp file\n"

    return
    #
    if not opt_run_once:
        pmu.auto_init(prog_name, opt_instance, 60)

    #
    # set last_time to two hours before now to prevent unecessary searching
    #
    last_file_time = time() - 7200
    debug_print('last_file_time = ' + asctime(gmtime(last_file_time)))

    while 1:

        url_info = 'http://' + opt_web_server + separator + opt_url_path

        if not opt_run_once:
            pmu.auto_register('getting file listing from ' + url_info)

        index_page = fetch_page(url_info)

        print 'made it here.'

        print str(index_page)

        file_list = parse_index_page(index_page)

        print str(file_list)

        #
        # reverse list -- now in descending order
        #
        filename = ''
Beispiel #6
0
    #

    need_sleep = 0

    dot_string = '.'
    if opt_noDot:
        dot_string = ''

    request_string = '*' + dot_string + opt_source_ext

    if opt_debug:
        print "Request_string: %s" % request_string

    while 1:

        pmu.auto_register("Waiting for data")

        if need_sleep:
            sleep_secs_left = opt_sleep_secs
            while sleep_secs_left > 0:
                pmu.auto_register("Waiting for data")
                time.sleep(2)
                sleep_secs_left = sleep_secs_left - 2

        need_sleep = 1

        #
        # Open the ftp connection
        #

        ftp = ftplib.FTP(opt_source_host, opt_source_user, opt_source_pwd)
    #

    need_sleep = 0

    dot_string = '.'
    if opt_noDot:
        dot_string = ''

    request_string = '*' + dot_string + opt_source_ext

    if opt_debug:
        print "Request_string: %s" % request_string

    while 1:

        pmu.auto_register("To sleep")

        if need_sleep:
            time.sleep(opt_sleep_secs)

        need_sleep = 1

        pmu.auto_register("downloading data")

        #
        # if url specified, download .nc data files from the
        # given url using wget.
        # otherwise, download data file (not necesarily .nc type)
        # using ftp
        #
        download_url()
Beispiel #8
0
    old_subDir = "junk"

    last_timestamp = -1

    while 1:

        sub_directory = getCurrentJulianday()

        if sub_directory != old_subDir:
            ftp_dir = path.join(opt_ftp_dir, sub_directory)
            print ftp_dir

            ftp_info = ftptools.FTP_info(opt_ftp_server, ftp_dir, opt_ftp_user,
                                         opt_ftp_passwd, opt_log)

        pmu.auto_register("getting ftp file listing.")
        got_list = 0
        tries = 0
        while not got_list:
            tries = tries + 1
            got_list = 1
        try:

            signal.alarm(100)
            ftp_info.get_listing()
            signal.alarm(0)
        except:
            print "ftp date error: %s, %s" % (sys.exc_type, sys.exc_value)
            got_list = 0
            if tries > opt_max_num_tries:
                print "Exiting after ", opt_max_num_tries, " tries"
  #
  # Initialize process mapper registration
  #

  pmu.auto_init(prog_name, opt_instance, opt_register_interval)

  #
  # Wait for new files.  When found, push them to the destination.
  #

  last_dir_mod_time = os.stat(opt_input_dir)[stat.ST_MTIME]
  need_sleep = 0

  while 1:

    pmu.auto_register("Waiting for data")
    if opt_debug:
      print "Waiting for data"

    if need_sleep:
      time.sleep(opt_sleep_secs)

    need_sleep = 1

    #
    # Check for new files
    #

    dir_mod_time = os.stat(opt_input_dir)[stat.ST_MTIME]
    if dir_mod_time <= last_dir_mod_time:
      continue
def pull_file(filename):
  pmu.auto_register("Checking for new files")

  #
  # Construct the local filenames.  The temporary filename is used
  # for pulling the file down.  When the retrieval is complete, the
  # file is renamed to its original name so that it appears atomically.
  #

  temp_filename = opt_temp_dir + '/.' + filename
  local_filename = opt_output_dir + '/' + filename

  if opt_debug:
    print "local_filename = " + local_filename
    print "temp_filename = " + temp_filename

  #
  # Check to see if we already have this file.  If we do, then skip
  # it.
  #

  local_gz_filename = local_filename + ".gz"

  #
  # this is the case of the file that is gzipped on the ftp site
  # but unzipped locally
  #
  if(os.path.splitext(local_filename)[1] == ".gz"):
    if os.path.exists( os.path.splitext(local_filename)[0] ):
      if opt_debug:
        print
        print "File already exists locally: " + local_filename
        print "Skipping retrieval...."
      return

  if os.path.exists(local_filename) or os.path.exists(local_gz_filename):
    if opt_debug:
      print
      print "File already exists locally: " + local_filename
      print "Skipping retrieval...."
    return

  if opt_debug:
    print "Getting file <" + filename + ">"

  #
  # Wait for the file to be quiescent
  #

  if opt_wait_for_quiescence:
    prev_file_size = 0
    file_size = ftp.size(filename)
    while file_size != prev_file_size:
      if opt_debug:
        print "Waiting for file quiescence..."
      time.sleep(opt_quiescence_secs)
      prev_file_size = file_size
      file_size = ftp.size(filename)
      
  #
  # Pull the data file to the destination directory.
  #

  temp_file = open(temp_filename, 'wb')

  pmu.force_register("Retrieving file " + filename)

  try:
    ftp.retrbinary('RETR ' + filename, temp_file.write)
  except (socket.error):
    print "Socket error occurred while trying to transfer file: ", filename
    print "Skipping file!"
    return

  temp_file.close()

  if(os.path.splitext(temp_filename)[1] == ".gz"):
    filename = os.path.splitext(filename)[0]
    os.system("gunzip " + temp_filename)
    temp_filename = os.path.splitext(temp_filename)[0]
    local_filename = os.path.splitext(local_filename)[0]

  try:
    shutil.copyfile(temp_filename, local_filename)
  except(OSError):
    print "Error Copying temp file to local file\n"

  if(opt_write_Ldata):
    file_stats = os.stat(local_filename)
    data_file_time = datetime.fromtimestamp(file_stats[8])
    Ldata_command = "LdataWriter -dir " + opt_output_dir + " -ext " + opt_Ldata_ext + " -dtype " + opt_Ldata_dtype + " -ltime " + data_file_time.strftime("%Y%m%d%H%M%S") + " -rpath ./" + filename
    print
    print "LdataWriter command line:"
    print Ldata_command
    print
    os.system(Ldata_command)
  
  try:
    os.remove(temp_filename)
  except(OSError):
    print "Error removing temp file\n"
    
  return