Exemple #1
0
    def test_compose(self, allow_partial):
        """Test the compose method's custom conversion options."""
        key_vals = {"a": "this Is A-Test b_test c test"}

        new_str = compose("{a!c}", key_vals, allow_partial=allow_partial)
        assert new_str == "This is a-test b_test c test"
        new_str = compose("{a!h}", key_vals, allow_partial=allow_partial)
        assert new_str == "thisisatestbtestctest"
        new_str = compose("{a!H}", key_vals, allow_partial=allow_partial)
        assert new_str == "THISISATESTBTESTCTEST"
        new_str = compose("{a!l}", key_vals, allow_partial=allow_partial)
        assert new_str == "this is a-test b_test c test"
        new_str = compose("{a!R}", key_vals, allow_partial=allow_partial)
        assert new_str == "thisIsATestbtestctest"
        new_str = compose("{a!t}", key_vals, allow_partial=allow_partial)
        assert new_str == "This Is A-Test B_Test C Test"
        new_str = compose("{a!u}", key_vals, allow_partial=allow_partial)
        assert new_str == "THIS IS A-TEST B_TEST C TEST"
        # builtin repr
        new_str = compose("{a!r}", key_vals, allow_partial=allow_partial)
        assert new_str == "'this Is A-Test b_test c test'"
        # no formatting
        new_str = compose("{a}", key_vals, allow_partial=allow_partial)
        assert new_str == "this Is A-Test b_test c test"
        # bad formatter
        with pytest.raises(ValueError):
            new_str = compose("{a!X}", key_vals, allow_partial=allow_partial)
        assert new_str == "this Is A-Test b_test c test"
Exemple #2
0
def process_message(msg, config):
    """Process the message."""
    pattern = config["output_file_pattern"]
    input_files = [
        urlsplit(item["uri"]).path for item in msg.data["collection"]
    ]

    data = msg.data.copy()
    data["proc_time"] = datetime.utcnow()
    try:
        aliases = get_aliases(config["aliases"])
    except KeyError:
        aliases = {}
    for key in aliases:
        if key in data:
            data[key] = aliases[key].get(data[key], data[key])

    try:
        min_length = int(config.get('min_length'))
    except NoOptionError:
        min_length = 0
    if data["end_time"] - data["start_time"] < timedelta(minutes=min_length):
        logger.info('Pass too short, skipping: %s to %s',
                    str(data["start_time"]), str(data["end_time"]))
        return

    output_file = compose(pattern, data)

    with bunzipped(input_files) as files_to_read:
        keyvals = {
            "input_files": " ".join(files_to_read),
            "output_file": output_file
        }
        cmd_pattern = config["command"]
        cmd = compose(cmd_pattern, keyvals)
        logger.info("Running %s", cmd)

        if "stdout" in config:
            stdout_file = compose(config["stdout"], keyvals)
            with open(stdout_file, "w") as output:
                p = Popen(cmd.split(), stderr=PIPE, stdout=output)
                err_reader = threading.Thread(target=reader,
                                              args=(p.stderr, logger.error))
                err_reader.start()
                err_reader.join()

        else:
            popen(cmd)

    msg.type = "file"
    new_data = msg.data.copy()
    del new_data["collection"]
    new_data["filename"] = os.path.basename(output_file)
    new_data["uri"] = output_file
    if "publish_topic" in config:
        msg.subject = config.get("publish_topic")
    msg2 = Message(msg.subject, "file", new_data)

    return msg2
Exemple #3
0
    def test_compose(self):
        """Test the compose method's custom conversion options."""
        key_vals = {'a': 'this Is A-Test b_test c test'}

        new_str = compose("{a!c}", key_vals)
        self.assertEqual(new_str, 'This is a-test b_test c test')
        new_str = compose("{a!h}", key_vals)
        self.assertEqual(new_str, 'thisisatestbtestctest')
        new_str = compose("{a!H}", key_vals)
        self.assertEqual(new_str, 'THISISATESTBTESTCTEST')
        new_str = compose("{a!l}", key_vals)
        self.assertEqual(new_str, 'this is a-test b_test c test')
        new_str = compose("{a!R}", key_vals)
        self.assertEqual(new_str, 'thisIsATestbtestctest')
        new_str = compose("{a!t}", key_vals)
        self.assertEqual(new_str, 'This Is A-Test B_Test C Test')
        new_str = compose("{a!u}", key_vals)
        self.assertEqual(new_str, 'THIS IS A-TEST B_TEST C TEST')
        # builtin repr
        new_str = compose("{a!r}", key_vals)
        self.assertEqual(new_str, '\'this Is A-Test b_test c test\'')
        # no formatting
        new_str = compose("{a}", key_vals)
        self.assertEqual(new_str, 'this Is A-Test b_test c test')
        # bad formatter
        self.assertRaises(ValueError, compose, "{a!X}", key_vals)
        self.assertEqual(new_str, 'this Is A-Test b_test c test')
Exemple #4
0
def process_message(msg, config):
    pattern = config["output_file_pattern"]
    input_files = [item["uri"] for item in msg.data["collection"]]

    data = msg.data.copy()
    data["proc_time"] = datetime.utcnow()
    try:
        aliases = get_aliases(config["aliases"])
    except KeyError:
        aliases = {}
    for key in aliases:
        if key in data:
            data[key] = aliases[key].get(data[key], data[key])

    try:
        min_length = int(config.get('min_length'))
    except NoOptionError:
        min_length = 0
    if data["end_time"] - data["start_time"] < timedelta(minutes=min_length):
        LOG.info('Pass too short, skipping: %s to %s', str(data["start_time"]), str(data["end_time"]))
        return



    output_file = compose(pattern, data)

    with bunzipped(input_files) as files_to_read:
        keyvals = {"input_files": " ".join(files_to_read), "output_file": output_file}
        cmd_pattern = config["command"]
        cmd = compose(cmd_pattern, keyvals)
        LOG.info("Running %s", cmd)

        if "stdout" in config:
            stdout_file = compose(config["stdout"], keyvals)
            with open(stdout_file, "w") as output:
                p = Popen(cmd.split(), stderr=PIPE, stdout=output)
                err_reader = threading.Thread(target=reader, args=(p.stderr, LOG.error))
                err_reader.start()
                err_reader.join()

        else:
            popen(cmd)

    msg.type = "file"
    new_data = msg.data.copy()
    del new_data["collection"]
    new_data["filename"] = os.path.basename(output_file)
    new_data["uri"] = output_file
    msg2 = Message(msg.subject, "file", new_data)

    return msg2
Exemple #5
0
def move_aapp_log_files(config):
    """ Move AAPP processing log files from AAPP working directory
    in to sub-directory.
    The directory path is defined in config file (aapp_log_files)
    """
    try:
        filelist = glob(
            '%s/*.log' %
            config['aapp_processes'][config.process_name]['working_dir'])

        try:
            tmp_config = config.config.copy()
            tmp_config.update(
                tmp_config['aapp_processes'][config.process_name])

            _outdir = compose(tmp_config['aapp_outdir_format'], tmp_config)
            destination = os.path.join(
                tmp_config['aapp_log_files_archive_dir'], _outdir)
        except KeyError as err:
            LOG.error(
                "Failed to compose log files dir: {}. Missing key:{}".format(
                    config['aapp_processes'][config.process_name]
                    ['aapp_outdir_format'], err))
            return False
        except Exception as err:
            LOG.error("Failed to compose log files dir: {}. Error:{}".format(
                config['aapp_processes'][config.process_name]
                ['aapp_outdir_format'], err))
            return False

        LOG.debug("move_aapp_log_files destination: " + destination)

        if not os.path.exists(destination):
            try:
                os.makedirs(destination)
            except OSError as err:
                LOG.error("Can't create directory: {} because: {}".format(
                    destination, err))
                return False  # FIXME: Check!
            else:
                LOG.debug("Created new directory for AAPP log files:" +
                          destination)

        for file_name in filelist:
            try:
                base_filename = os.path.basename(file_name)
                dst = os.path.join(destination, base_filename)
                shutil.move(file_name, dst)
            except Exception as err:
                LOG.warning("Failed to move log file: {} to: {}".format(
                    file_name, dst))
            else:
                LOG.debug("Moved {} to {}".format(file_name, dst))

    except OSError as err:
        LOG.error("Moving AAPP log files to " + destination + " failed ", err)

    LOG.info("AAPP log files saved in to " + destination)

    return True
def publish_level1(publisher, config, msg, filelist, station_name,
                   environment):
    """Send a publish message, one message per file in the filelist."""
    for file in filelist:
        LOG.debug("Handeling file for sending: {}".format(file))
        msg_to_send = {}
        try:
            msg_to_send = msg.data.copy()
            if 'dataset' in msg_to_send:
                del msg_to_send['dataset']

            msg_to_send['uri'] = "file://{}{}".format(
                config['aapp_processes'][config.process_name]
                ['message_providing_server'], file['file'])

            msg_to_send['filename'] = os.path.basename(file['file'])
            msg_to_send['uid'] = os.path.basename(file['file'])
            msg_to_send['sensor'] = config['aapp_static_configuration'][
                'sensor_name_converter'].get(file['sensor'], file['sensor'])
            msg_to_send['orbit_number'] = config['orbit_number']
            msg_to_send['format'] = "AAPP"
            msg_to_send['type'] = 'Binary'
            msg_to_send['data_processing_level'] = file['level'].upper(
            ).replace("L", "")
            LOG.debug('level in message: ' +
                      str(msg_to_send['data_processing_level']))
            msg_to_send['start_time'] = config['starttime']
            msg_to_send['end_time'] = config['endtime']
            msg_to_send['station'] = station_name
            msg_to_send['env'] = environment
        except KeyError as ke:
            LOG.error("KeyError, missing key: {}".format(ke))
        except Exception as err:
            LOG.error(
                "Failed to build publish message with error: {}".format(err))
            continue

        try:
            publish_to = compose(
                config['aapp_processes'][config.process_name]
                ['publish_sift_format'], msg_to_send)
        except KeyError:
            LOG.warning(
                "Unknown Key used in format: {}. Check spelling and/or availability."
                .format(config['aapp_processes'][config.process_name]
                        ['publish_sift_format']))
            LOG.warning("Available keys are:")
            for key in msg_to_send:
                LOG.warning("{} = {}".format(key, msg_to_send[key]))
            LOG.error("Can not publish these data!")
            return False
        except ValueError as ve:
            LOG.error("Value Error: {}".format(ve))
            return

        LOG.debug("Publish to:{}".format(publish_to))
        message = Message(publish_to, "file", msg_to_send).encode()
        LOG.debug("sending: " + str(message))
        publisher.send(message)
Exemple #7
0
 def test_partial_compose_simple(self):
     """Test partial compose with a simple use case."""
     fmt = "{variant:s}/{platform_name}_{start_time:%Y%m%d_%H%M}_{product}.{format}"
     composed = compose(
         fmt=fmt,
         keyvals={"platform_name": "foo", "format": "bar"},
         allow_partial=True
     )
     assert composed == "{variant:s}/foo_{start_time:%Y%m%d_%H%M}_{product}.bar"
Exemple #8
0
def get_files(delta=30):
    """Get the tlefiles from the last delta days
    """

    now = datetime.utcnow()
    files = []
    for day in range(delta):
        pattern = compose(REF_PATTERN, {"date": now - timedelta(days=day)})
        files.extend(glob.glob(pattern))
    return files
Exemple #9
0
    def test_match(self, fmt, string, expected):
        """Test cases expected to be matched."""

        # Test parsed value
        parsed = parse(fmt, string)
        assert parsed['foo'] == expected

        # Test round trip
        composed = compose(fmt, {'foo': expected})
        parsed = parse(fmt, composed)
        assert parsed['foo'] == expected
Exemple #10
0
    def _get_bandfilenames(self, **options):
        """Get filename for each band"""
        conf = options[self.platform_name + '-viirs']

        rootdir = conf['rootdir']
        for section in conf:
            if not section.startswith('section'):
                continue
            bandnames = conf[section]['bands']
            for band in bandnames:
                filename = os.path.join(rootdir, conf[section]['filename'])
                self.bandfilenames[band] = compose(
                    filename, {'bandname': band})
Exemple #11
0
    def _get_bandfilenames(self, **options):
        """Get filename for each band"""
        conf = options[self.platform_name + '-viirs']

        rootdir = conf['rootdir']
        for section in conf:
            if not section.startswith('section'):
                continue
            bandnames = conf[section]['bands']
            for band in bandnames:
                filename = os.path.join(rootdir, conf[section]['filename'])
                self.bandfilenames[band] = compose(filename,
                                                   {'bandname': band})
Exemple #12
0
def _compose_destination(destination, msg):
    try:
        _destination = compose(destination, msg.data)
    except KeyError as ke:
        LOGGER.error("Format identifier is missing from the msg.data: %s",
                     str(ke))
        raise
    except ValueError as ve:
        LOGGER.error(
            "Type of format identifier doesn't match the type in m msg.data: %s",
            str(ve))
        raise
    except AttributeError as ae:
        LOGGER.error("msg or msg.data is None: %s", str(ae))
        raise
    return _destination
def rename_file(process_config, values):
    """
    Do the actual renaming and checking
    """
    sensor = next(iter(values))
    process_file = "process_{}".format(sensor)
    try:
        process_config[process_file]
    except KeyError as err:
        LOG.error("No such key: {}".format(err))
        return False

    #Need to build a dict to use in trollsift compose
    tmp_process_config = process_config.config.copy()
    tmp_process_config.update(
        tmp_process_config['aapp_processes'][process_config.process_name])
    #for key, value in tmp_process_config['aapp_processes'][process_config.process_name].iteritems():
    #print key,value
    #    tmp_process_config[key] = value

    #tmp_process_config.update(tmp_process_config['aapp_processes'][process_config.process_name]['rename_aapp_files'])
    tmp_process_config['data_type'] = values[sensor]['data_type']
    tmp_process_config['data_level'] = values[sensor]['data_level']

    #for key, value in tmp_process_config['aapp_processes'][process_config.process_name]['rename_aapp_files'].iteritems():
    #    #print key,value

    new_name = ""

    if process_config[process_file]:
        if os.path.exists(values[sensor]['aapp_file']):
            try:
                _outdir = compose(tmp_process_config['aapp_outdir_format'],
                                  tmp_process_config)
                dir = os.path.join(tmp_process_config['aapp_outdir_base'],
                                   _outdir)
                _new_name = compose(
                    process_config['aapp_processes'][
                        process_config.process_name]['rename_aapp_compose'],
                    tmp_process_config)
                new_name = os.path.join(dir, _new_name)
            except KeyError as err:
                LOG.error(
                    "Failed to compose new filename: {}. Missing key:{}".
                    format(
                        process_config['aapp_processes']
                        [process_config.process_name]['rename_aapp_compose'],
                        err))
                return False
            except Exception as err:
                LOG.error(
                    "Failed to compose new filename: {}. Error:{}".format(
                        process_config['aapp_processes']
                        [process_config.process_name]['rename_aapp_compose'],
                        err))
                return False

            try:

                if not os.path.exists(os.path.dirname(new_name)):
                    LOG.debug("Need to create directory: {}".format(
                        os.path.dirname(new_name)))
                    os.makedirs(os.path.dirname(new_name))
            except OSError as oe:
                LOG.error("Could not create directory: {} with {}".format(
                    os.path.dirname(new_name), oe))
                return False

            try:
                #shutil.move(process_config['aapp_static_configuration']['decommutation_files'][inputfile],new_name)
                shutil.move(values[sensor]['aapp_file'], new_name)
                #LOG.debug("Renamed: {} to {}".format(process_config['aapp_static_configuration']['decommutation_files'][inputfile], new_name))
                LOG.debug("Renamed: {} to {}".format(
                    values[sensor]['aapp_file'], new_name))
            except OSError as e:
                LOG.error("Failed to rename {} to {}. {}".format(
                    process_config[inputfile], new_name, e))
                LOG.error("Please check previous processing")
                return False
        else:
            LOG.error(
                "Excpected file {} does not exists. Please check previous processing."
                .format(values[sensor]['aapp_file']))
            return False
    else:
        return False

    _tmp = {}
    _tmp['file'] = new_name
    _tmp['sensor'] = sensor
    _tmp['level'] = tmp_process_config['data_level']

    return _tmp
Exemple #14
0
def request_push(msg, destination, login, publisher=None, **kwargs):
    """Request a push for data."""
    huid = add_to_ongoing(msg)
    if huid is None:
        return

    if already_received(msg):
        timeout = float(kwargs["req_timeout"])
        send_ack(msg, timeout)
        return

    for msg in iterate_messages(huid):
        try:
            _destination = compose(destination, msg.data)
        except KeyError as ke:
            LOGGER.error("Format identifier is missing from the msg.data: %s",
                         str(ke))
            raise
        except ValueError as ve:
            LOGGER.error(
                "Type of format identifier doesn't match the type in m msg.data: %s",
                str(ve))
            raise
        except AttributeError as ae:
            LOGGER.error("msg or msg.data is None: %s", str(ae))
            raise
        req, fake_req = create_push_req_message(msg, _destination, login)
        LOGGER.info("Requesting: %s", str(fake_req))
        timeout = float(kwargs["transfer_req_timeout"])
        local_dir = create_local_dir(_destination, kwargs.get('ftp_root', '/'))

        if publisher:
            publisher.send(str(fake_req))
        response, hostname = send_request(msg, req, timeout)

        if response and response.type in ['file', 'collection', 'dataset']:
            LOGGER.debug("Server done sending file")
            add_to_file_cache(msg)
            if publisher:
                # Send an 'ack' message so that possible hot spares know
                # the primary has completed the request
                msg = Message(msg.subject, 'ack', msg.data)
                LOGGER.debug(
                    "Sending a public 'ack' of completed transfer: %s",
                    str(msg))
                publisher.send(str(msg))
            try:
                lmsg = unpack_and_create_local_message(response, local_dir,
                                                       **kwargs)
            except IOError:
                LOGGER.exception("Couldn't unpack %s", str(response))
                continue
            if publisher:
                lmsg = make_uris(lmsg, _destination, login)
                lmsg.data['origin'] = response.data['request_address']
                lmsg.data.pop('request_address', None)
                lmsg = replace_mda(lmsg, kwargs)
                lmsg.data.pop('destination', None)

                LOGGER.debug("publishing %s", str(lmsg))
                publisher.send(str(lmsg))
            terminate_transfers(huid, float(kwargs["req_timeout"]))
            break
        else:
            LOGGER.error("Failed to get valid response from server %s: %s",
                         str(hostname), str(response))
    else:
        LOGGER.warning('Could not get a working source for requesting %s',
                       str(msg))
        terminate_transfers(huid, float(kwargs["req_timeout"]))
def start_zipcollector(registry, message, options, **kwargs):
    """From a posttroll (gatherer) message start the pytroll zip collector."""
    del kwargs
    outdir_destination = options['destination_output_dir']
    outdir_local = options['local_output_dir']
    requested_tslots = options['requested_timeslots']

    logger.info("")
    logger.info("registry dict: " + str(registry))
    logger.info("\tMessage:")
    logger.info(message)

    if message is None:
        return registry
    elif (message.type != 'dataset'):
        logger.warning("Message type is not a collection! Type=%s",
                       str(message.type))
        return registry

    if 'start_time' in message.data:
        start_time = message.data['start_time']
        scene_id = start_time.strftime('%Y%m%d%H%M')
    else:
        logger.error("No start time in message!")
        start_time = None
        return registry

    if 'end_time' in message.data:
        end_time = message.data['end_time']
    else:
        logger.warning("No end time in message!")
        end_time = start_time + timedelta(seconds=60 * 12)  # noqa

    if 'seviri' not in message.data['sensor']:
        logger.debug("Scene is not supported")
        logger.warning("Sensor {0} is not SEVIRI! Continue".format(
            str(message.data['sensor'])))
        return registry
    else:
        registry[scene_id] = len(message.data['dataset'])

    # Now check that the time slot is among those requested
    logger.debug("Wanted time slots: %s", str(requested_tslots))
    wanted_timeslot = False
    if '%.2d' % start_time.minute in requested_tslots:
        wanted_timeslot = True

    if wanted_timeslot:
        logger.info("Time slot {0} is requested".format(start_time))

        # Example filename:
        # (service=0deg-lvl1)
        # met10___hritglob1708171100.tgz
        satid = PLATFORM_NAME.get(message.data['platform_name'], 'met10')
        filename = compose(options['archive_filename'], {
            'start_time': start_time,
            'satid': satid
        })

        local_filepath = os.path.join(outdir_local, filename)
        dest_filepath = os.path.join(outdir_destination,
                                     filename + '_original')

        # Create the tar archive:
        logger.debug("Create gzipped tar archive: %s", local_filepath)
        status = True
        try:
            with tarfile.open(local_filepath, "w|gz") as archive:
                for item in message.data['dataset']:
                    filepath = urlparse(item['uri']).path
                    archive.add(filepath, arcname=item['uid'])

            copy_file_to_destination(local_filepath, dest_filepath)
            monitor_msg = "File successfully created"
        except Exception as err:
            monitor_msg = "Failed generating tar file: " + str(err)
            status = False

        if 'monitoring_hook' in options:
            options['monitoring_hook'](status, monitor_msg)
        else:
            logger.error("Configuration lacking a monitoring_hook entry!")

    else:
        logger.info(
            "Time slot {0} NOT requested. Do nothing".format(start_time))

    return registry
Exemple #16
0
 def test_partial_compose_repeated_vars_with_different_formatting(self):
     """Test partial compose with a fmt with repeated vars with different formatting."""
     fmt = "/foo/{start_time:%Y%m}/bar/{baz}_{start_time:%Y%m%d_%H%M}.{format}"
     composed = compose(fmt=fmt, keyvals={"format": "qux"}, allow_partial=True)
     assert composed == "/foo/{start_time:%Y%m}/bar/{baz}_{start_time:%Y%m%d_%H%M}.qux"
Exemple #17
0
 def test_partial_compose_with_similarly_named_params(self):
     """Test that partial compose handles well vars with common substrings in name."""
     original_fmt = "{foo}{afooo}{fooo}.{bar}/{baz:%Y}/{baz:%Y%m%d_%H}/{baz:%Y}/{bar:d}"
     composed = compose(fmt=original_fmt, keyvals={"afooo": "qux"}, allow_partial=True)
     assert composed == "{foo}qux{fooo}.{bar}/{baz:%Y}/{baz:%Y%m%d_%H}/{baz:%Y}/{bar:d}"
Exemple #18
0
 def test_partial_compose_is_identity_with_empty_keyvals(self, original_fmt):
     """Test that partial compose leaves the input untouched if no keyvals at all."""
     assert compose(fmt=original_fmt, keyvals={}, allow_partial=True) == original_fmt
Exemple #19
0
 def test_that_some_invalid_fmt_can_confuse_partial_compose(self):
     """Test that a fmt with a weird char can confuse partial compose."""
     fmt = "{foo?}_{bar}_{foo}.qux"
     with pytest.raises(ValueError):
         _ = compose(fmt=fmt, keyvals={}, allow_partial=True)
def download_tle(config, timestamp, dir_data_tle):

    user = os.getenv("PAR_NAVIGATION_TLE_USER", "xxxxxx")
    passwd = os.getenv("PAR_NAVIGATION_TLE_PASSWD", "xxxxxx")
    url = os.getenv("PAR_NAVIGATION_TLE_URL_DOWNLOAD")
    timeout = 60
    catalogue = "25338,26536,27453,28654,33591,37849,29499,38771,27431,32958,37214,25994,27424"

    tle_infile = ""
    tle_dict = {}
    tle_dict['timestamp'] = timestamp

    tle_cnf = []
    try:
        tle_cnf = config['aapp_processes'][config.process_name]['tle_download']
    except KeyError as ke:
        LOG.info("No tle_download config found. Using the default.")
        pass
    finally:
        tle_cnf.append({'url': url, 'user': user, 'passwd': passwd, 'timeout': timeout, 'catalogue': catalogue})

    try:
        tle_infile = compose(config['aapp_processes'][config.process_name]['tle_infile_format'], tle_dict)
    except KeyError as ke:
        if 'tle_infile_format' in ke:
            tle_infile = compose('tle_{timestamp:%Y%m%d_%H%M}.txt', tle_dict)
            LOG.warning("Using default TLE file name format: %s", tle_infile)
            pass
        else:
            LOG.error("Key error: {}".format(ke))
            LOG.error("Valid keys :")
            for key in tle_dict.keys():
                LOG.error("{}".format(key))
            raise
    except:
        raise

    tle_file_list = []
    for cnf in tle_cnf:
        for cnf_url in cnf['url'].split():
            LOG.debug("Will try to download TLE from {}.".format(cnf_url))

            if "space-track" in cnf_url:
                # Do the special space-track login
                status = False
                returncode = 0
                stdout = ""
                stderr = ""
                cmd = "wget -T {} --post-data=\"identity={}&password={}\" --cookies=on ",
                "--keep-session-cookies --save-cookies=cookies_spacetrack \"{}/ajaxauth/login\" -olog".format(
                    cnf['timeout'], cnf['user'], cnf['passwd'], cnf_url)
                try:
                    status, returncode, stdout, stderr = run_shell_command(cmd)
                except:
                    LOG.error("Failed running command: {} with return code: {}".format(cmd, returncode))
                    LOG.error("stdout: {}".format(stdout))
                    LOG.error("stderr: {}".format(stderr))
                    return_status = False
                else:
                    if returncode != 0:
                        LOG.debug("Running command: {} with return code: {}".format(cmd, returncode))
                        LOG.debug("stdout: {}".format(stdout))
                        LOG.debug("stderr: {}".format(stderr))
                    else:
                        cmd = "wget -T {} --keep-session-cookies --load-cookies=cookies_spacetrack -O weather.txt ",
                        "\"{}/basicspacedata/query/class/tle_latest/ORDINAL/1/NORAD_CAT_ID/{}/orderby/TLE_LINE1\"".format(
                            cnf['timeout'], cnf_url, cnf['catalogue'])
                        try:
                            status, returncode, stdout, stderr = run_shell_command(cmd)
                        except:
                            LOG.error("Failed running command: {} with return code: {}".format(cmd, returncode))
                            LOG.error("stdout: {}".format(stdout))
                            LOG.error("stderr: {}".format(stderr))
                        else:
                            if returncode != 0:
                                LOG.debug("Running command: {} with return code: {}".format(cmd, returncode))
                                LOG.debug("stdout: {}".format(stdout))
                                LOG.debug("stderr: {}".format(stderr))
                            else:
                                LOG.debug("TLE download ok")
                if os.path.exists("weather.txt"):
                    try:
                        tle_file = open("weather.txt", 'r')
                        tle_string = tle_file.read()
                        tle_file.close()
                        tle_file_out = os.path.join(dir_data_tle, tle_infile)
                        with open(tle_file_out, "a") as tle_file:
                            tle_file.write(tle_string)

                        if tle_file_out not in tle_file_list:
                            tle_file_list.append(tle_file_out)
                    except Exception as ex:
                        LOG.debug("Failed rename tle download file: {}".format(ex))
                        raise
            else:
                import urllib2
                f = urllib2.urlopen(cnf_url)
                tle_string = f.read()
                f.close()
                tle_file_out = os.path.join(dir_data_tle, tle_infile)
                with open(tle_file_out, "a") as tle_file:
                    tle_file.write(tle_string)
                if tle_file_out not in tle_file_list:
                    tle_file_list.append(tle_file_out)

    return tle_file_list
def do_tleing(config, timestamp, satellite):
    """Get the tle-file and copy them to the AAPP data structure
       and run the AAPP tleing script and executable"""

    return_status = True

    # This function relays on beeing in a working directory
    try:
        current_dir = os.getcwd()  # Store the dir to change back to after function complete
    except OSError as ose:
        LOG.error("Failed to get current working dir: {}".format(ose))
        raise

    os.chdir(config['aapp_processes'][config.process_name]['working_dir'])

    tle_match_tests = (('.*(\d{4})(\d{2})(\d{2})_?-?T?(\d{2})(\d{2})(\d{2}).*', _do_6_matches),
                       ('.*(\d{4})(\d{2})(\d{2})_?-?T?(\d{2})(\d{2}).*', _do_5_matches),
                       ('.*(\d{4})(\d{2})(\d{2})_?-?T?(\d{2}).*', _do_4_matches),
                       ('.*(\d{4})(\d{2})(\d{2}).*', _do_3_matches),
                       ('.*(\d{2})(\d{2})(\d{2}).*', _do_3_matchesYY))

    if '7' in os.getenv('AAPP_PREFIX'):
        aapp_env = 'AAPP_ENV7'
    elif '8' in os.getenv('AAPP_PREFIX'):
        aapp_env = 'AAPP_ENV8'
    else:
        aapp_env = 'AAPP_ENV'

    if 'dir_navigation' in config['aapp_processes'][config.process_name]:
        LOG.warning("Override the env variable set in {} DIR_NAVIGATION from {} to {}.".format(
            aapp_env,
            os.environ['DIR_NAVIGATION'], config['aapp_processes'][config.process_name]['dir_navigation']))
        os.environ['DIR_NAVIGATION'] = config['aapp_processes'][config.process_name]['dir_navigation']
        # Need to update DIR_DATA_TLE dir to be sure this is corect if not tle_indir is given
        os.environ['DIR_DATA_TLE'] = os.path.join(os.getenv('DIR_NAVIGATION'), 'tle_db')

    if 'tle_indir' in config['aapp_processes'][config.process_name]:
        tle_indir = config['aapp_processes'][config.process_name]['tle_indir']
        LOG.warning("Override the env variable set in {} DIR_DATA_TLE from {} to {}.".format(
            aapp_env,
            os.environ['DIR_DATA_TLE'], tle_indir))
        os.environ['DIR_DATA_TLE'] = tle_indir

    # variables for the TLE HOME directory
    DIR_DATA_TLE = os.getenv('DIR_DATA_TLE', os.path.join(os.getenv('DIR_NAVIGATION'), 'orb_elem'))

    # This is needed by AAPP tleing. Try other if not existing
    if not os.path.exists(DIR_DATA_TLE):
        LOG.warning("Dir " + DIR_DATA_TLE + " does not exist. Create ")
        try:
            os.makedirs(DIR_DATA_TLE)
        except:
            LOG.error("Failed to create %s. Can not handle TLEs without this", DIR_DATA_TLE)
            raise

    if 'tle_file_to_data_diff_limit_days' in config['aapp_processes'][config.process_name]:
        select_closest_tle_file_to_data = True
        min_closest_tle_file = int(
            config['aapp_processes'][config.process_name]['tle_file_to_data_diff_limit_days']) * 24 * 60 * 60
    else:
        select_closest_tle_file_to_data = False

    TLE_INDEX = os.path.join(DIR_DATA_TLE, "tle_{}.index".format(satellite))

    tle_search_dir = DIR_DATA_TLE
    tle_file_list = []
    # dict to hold needed tle keys
    tle_dict = {}
    if not select_closest_tle_file_to_data:
        if os.path.exists(TLE_INDEX):
            # Loop over all tle files, and only do tle
            tle_files = [s for s in glob(os.path.join(DIR_DATA_TLE, 'tle*txt'))
                         if os.path.isfile(os.path.join(DIR_DATA_TLE, s))]
            tle_files.sort(key=lambda s: os.path.getctime(os.path.join(DIR_DATA_TLE, s)))

            tle_index_mtime = os.path.getmtime(TLE_INDEX)
            for s in tle_files:
                if os.path.getmtime(os.path.join(DIR_DATA_TLE, s)) > tle_index_mtime:
                    tle_file_list.append(s)

            if len(tle_file_list) == 0:
                import time
                LOG.warning(("No newer tle files than last update of the index file. " +
                             "Last update of index file is {:d}s. If more than a few days you should check.".format(
                                 int(time.time() - tle_index_mtime))))
            else:
                LOG.info("Will use tle files {}".format(tle_file_list))
        else:
            LOG.warning("index file does not exist. If this is the first run of AAPP tleing.exe it is ok,"
                        " otherwise it is a bit suspisiuos.")
            try:
                tle_files = [s for s in os.listdir(DIR_DATA_TLE) if os.path.isfile(os.path.join(DIR_DATA_TLE, s))]
                tle_files.sort(key=lambda s: os.path.getctime(os.path.join(DIR_DATA_TLE, s)))
                tle_file_list = tle_files
            except OSError:
                LOG.warning("Found to tle files .... ")

    else:
        # dict to hold needed tle keys
        tle_dict['timestamp'] = timestamp
        try:
            infile = compose(config['aapp_processes'][config.process_name]['tle_infile_format'], tle_dict)
        except KeyError as ke:
            LOG.error("Key error: {}".format(ke))
            LOG.error("Valid keys :")
            for key in tle_dict.keys():
                LOG.error("{}".format(key))
            raise
        except:
            raise

        LOG.debug("tle file name: {}".format(infile))

        # Check if I can read the tle file.
        first_search = True

	# FIXME: In AAPP default get_tle script direcory timestamp is TLE_MONTH=`date +%Y-\%m`
        for tle_search_dir in [compose(os.path.join(DIR_DATA_TLE, "{timestamp:%Y_%m}"), tle_dict), DIR_DATA_TLE]:
            if not os.path.exists(tle_search_dir):
                LOG.debug("tle_search_dir {} does not exists.".format(tle_search_dir))
                continue
            LOG.debug("tle_search_dir {}".format(tle_search_dir))
            try:
                with open(os.path.join(tle_search_dir, infile)) as tle_file:
                    del tle_file_list[:]
                    tle_file_list.append(os.path.join(tle_search_dir, infile))
                    min_closest_tle_file = 0
                    pass
            except IOError as e:
                LOG.warning("Could not find tle file: {}. Try find closest ... ".format(infile))
                tle_file_list = glob(os.path.join(tle_search_dir, '*'))
                LOG.debug("tle file list: {}".format(tle_file_list))
                LOG.debug(tle_file_list)
                infile_closest = ""

                for tle_file_name in tle_file_list:
                    for regex, test in tle_match_tests:
                        m = re.match(regex, tle_file_name)
                        if m:
                            try:
                                LOG.debug("{} {}".format(tle_file_name, test(m)))
                                delta = timestamp - test(m)
                                if (abs(delta.total_seconds()) < min_closest_tle_file):
                                    min_closest_tle_file = abs(delta.total_seconds())
                                    # infile_closest = os.path.basename(tle_file_name)
                                    infile_closest = tle_file_name
                                    LOG.debug("Closest tle infile so far: {}".format(infile_closest))
                            except ValueError:
                                pass

                if infile_closest:
                    del tle_file_list[:]
                    tle_file_list.append(infile_closest)
                    break
                else:
                    if not first_search:
                        LOG.error("Could not find tle file close enough to timestamp {} with limit {}".format(
                            timestamp, min_closest_tle_file))
                        LOG.error("Update your TLE files or adjust the limit(Not recomended!).")
                first_search = False
            else:
                break

        # DIR_DATA_TLE = tle_search_dir
        if tle_file_list:
            LOG.debug("Use this: {} offset {}s".format(tle_file_list, min_closest_tle_file))

    if not tle_file_list and config['aapp_processes'][config.process_name]['download_tle_files']:
        LOG.warning("Found no tle files. Try to download ... ")
        tle_file_list = download_tle(config, timestamp, DIR_DATA_TLE)

    for tle_file in tle_file_list:
        archive = False

        # SATellite IDentification mandatory
        # so take care of default values
        os.environ['SATID_FILE'] = os.getenv('SATID_FILE', 'satid.txt')

        """Don't use the tle_indir because this is handeled by the tleing script"""
        if (DIR_DATA_TLE != tle_search_dir):
            tle_filename = compose(os.path.join("{timestamp:%Y_%m}", os.path.basename(tle_file)), tle_dict)
        else:
            tle_filename = os.path.basename(tle_file)
        status = False
        returncode = 0
        stdout = ""
        stderr = ""
        cmd = "tleing.exe"
        stdin = "{}\n{}\n{}\n{}\n".format(DIR_DATA_TLE, tle_filename, satellite, TLE_INDEX)
        LOG.debug('stdin arguments to command: ' + str(stdin))
        try:
            status, returncode, stdout, stderr = run_shell_command(cmd, stdin=stdin)

        except:
            LOG.error("Failed running command: {} with return code: {}".format(cmd, returncode))
            LOG.error("stdout: {}".format(stdout))
            LOG.error("stderr: {}".format(stderr))
            return_status = False
        else:
            if returncode != 0:
                LOG.debug("Running command: {} with return code: {}".format(cmd, returncode))
                LOG.debug("stdout: {}".format(stdout))
                LOG.debug("stderr: {}".format(stderr))
            elif not os.path.exists(TLE_INDEX):
                LOG.error("index file: {} does not exist after tleing. Something is wrong.".format(TLE_INDEX))
                LOG.debug("Running command: {} with return code: {}".format(cmd, returncode))
                LOG.debug("stdout: {}".format(stdout))
                LOG.debug("stderr: {}".format(stderr))
            else:
                LOG.debug("Running command: {} with return code: {}".format(cmd, returncode))
                LOG.debug("stdout: {}".format(stdout))
                LOG.debug("stderr: {}".format(stderr))
                LOG.debug("DIR_DATA_TLE : {}".format(DIR_DATA_TLE))
                LOG.debug("tle_file : {}".format(os.path.basename(tle_file)))
                LOG.debug("satellite : {}".format(satellite))
                LOG.debug("TLE_INDEX : {}".format(TLE_INDEX))

                # When a index file is generated above one line is added for each tle file.
                # If several tle files contains equal TLEs each of these TLEs generate one line in the index file
                # To avoid this, sort the index file keeping only unique lines(skipping the tle filename at the end

                # The sort options +0b -3b is guessed to be sort from column 0 to 3, but this is not documented
                # Could cause problems with future version of sort.
                # See eg. http://search.cpan.org/~sdague/ppt-0.12/bin/sort
                # cmd="sort -u -o {} +0b -3b {}".format(os.path.join(DIR_DATA_TLE, "{}.sort".format(TLE_INDEX)),
                # os.path.join(DIR_DATA_TLE, TLE_INDEX))
                if os.path.exists(TLE_INDEX):
                    cmd = "sort -u +0b -3b {}".format(TLE_INDEX)
                    try:
                        status, returncode, stdout, stderr = run_shell_command(
                            cmd, stdout_logfile="{}.sort1".format(TLE_INDEX))
                    except:
                        LOG.error("Failed running command: {} with return code: {}".format(cmd, returncode))
                        LOG.error("stdout: {}".format(stdout))
                        LOG.error("stderr: {}".format(stderr))
                        return_status = False
                    else:
                        if returncode == 0 and os.path.exists("{}.sort1".format(TLE_INDEX)):
                            cmd = "grep -v NaN {}.sort1".format(TLE_INDEX)
                            try:
                                status, returncode, stdout, stderr = run_shell_command(
                                    cmd, stdout_logfile="{}.sort".format(TLE_INDEX))
                            except:
                                LOG.error("Failed running command: {} with return code: {}".format(cmd, returncode))
                                LOG.error("stdout: {}".format(stdout))
                                LOG.error("stderr: {}".format(stderr))
                                return_status = False
                            else:

                                try:
                                    os.remove(TLE_INDEX)
                                    os.remove("{}.sort1".format(TLE_INDEX))
                                except OSError as e:
                                    LOG.error("Failed to remove unsorted and duplicated index file: {} with {}"
                                              .format(TLE_INDEX, e))
                                else:
                                    try:
                                        os.rename("{}.sort".format(TLE_INDEX), TLE_INDEX)
                                        archive = True
                                    except:
                                        LOG.error("Failed to rename sorted index file to original name.")
                        else:
                            LOG.error("Returncode other than 0: {} or tle index sort file does exists.".format(
                                returncode, "{}.sort".format(TLE_INDEX)))
                else:
                    LOG.error("tle index file: {} does not exists after tleing before sort. This can not happen.")

        # If a new tle is used and archive dir is given in config, copy TLEs to archive
        if archive and ('tle_archive_dir' in config['aapp_processes'][config.process_name]):
            archive_dict = {}
            archive_dict['tle_indir'] = config['aapp_processes'][config.process_name]['tle_indir']
            for tle_file_name in tle_file_list:
                for regex, test in tle_match_tests:
                    m = re.match(regex, tle_file_name)
                    if m:
                        try:
                            archive_dict['timestamp'] = test(m)
                            tle_archive_dir = compose(
                                config['aapp_processes'][config.process_name]['tle_archive_dir'], archive_dict)
                            if not os.path.exists(tle_archive_dir):
                                try:
                                    os.makedirs(tle_archive_dir)
                                except:
                                    LOG.error("Failed to make archive dir: {}".format(tle_archive_dir))
                        except ValueError:
                            LOG.exception('Failed in archive step...')
                            pass

                        try:
                            copy(tle_file_name, tle_archive_dir)
                            LOG.debug("Copied {} to {}.".format(tle_file_name, tle_archive_dir))
                            archive = False
                        except IOError as ioe:
                            LOG.error("Failed to copy TLE file: {} to archive: {} because {}".format(
                                tle_file_name, tle_archive_dir, ioe))
                            LOG.error("CWD: {}".format(os.getcwd()))

    # Change back after this is done
    os.chdir(current_dir)

    return return_status
Exemple #22
0
 def test_default_compose_is_strict(self):
     """Make sure the default compose call does not accept partial composition."""
     fmt = "{foo}_{bar}.qux"
     with pytest.raises(KeyError):
         _ = compose(fmt, {"foo": "foo"})