Beispiel #1
0
            "ip": None,
            "port": None,
            "sample_rate": None,
            "central_freq": None,
            "sync_time": None,
            "predecimation_factor": None
        }
        dataStore.addDataFormatDefinition("Skarab:1", descr)


if __name__ == "__main__":

    parser = getArgumentParser()
    parser.add_argument('--skarab-ip',
                        dest='skarab_ip',
                        type=str,
                        help='The control ip of the skarab board')
    parser.add_argument('--skarab-port',
                        dest='skarab_port',
                        type=int,
                        default=7147,
                        help='The port number to control the skarab board')

    args = parser.parse_args()
    setup_logger(args)

    pipeline = SkarabPipeline(args.host, args.port, args.skarab_ip,
                              args.skarab_port)

    launchPipelineServer(pipeline, args)
        yield self.deconfigure()

    @coroutine
    def deconfigure(self):
        """@brief deconfigure the dspsr pipeline."""
        log.info("Deconfiguring EDD backend")
        if self.state == 'runnning':
            yield self.capture_stop()

        self.state = "deconfiguring"
        if self._subprocessMonitor is not None:
            self._subprocessMonitor.stop()
        for proc in self._subprocesses:
            proc.terminate()

        self.mkrec_cmd = []

        log.debug("Destroying dada buffers")
        for k in self._dada_buffers:
            k['monitor'].stop()
            cmd = "dada_db -d -k {0}".format(k['key'])
            log.debug("Running command: {0}".format(cmd))
            yield command_watcher(cmd)

        self._dada_buffers = []
        self.state = "idle"


if __name__ == "__main__":
    launchPipelineServer(CriticalPFBPipeline)
            os.remove("/tmp/t2pred.dat")
        if os.path.isfile("./core"):
            os.remove("./core")
        log.info("reset DADA buffer")
        for k in self._dada_buffers_monitor:
            log.debug("Stopping DADA buffer monitor")
            k['monitor'].stop()
        self._dada_buffers_monitor = []
        for key in self._dada_buffers:
            self._create_ring_buffer(key, self.numa_number)
            yield
        del self._subprocessMonitor
        self._timer = Time.now() - self._timer
        log.info("Took {} s to stop".format(self._timer * 86400))

    @state_change(target="idle", intermediate="deconfiguring", error='panic')
    @coroutine
    def deconfigure(self):
        """@brief deconfigure the pipeline."""
        log.debug("Destroying dada buffers")

        for k in self._dada_buffers_monitor:
            cmd = "dada_db -d -k {0}".format(k)
            log.debug("Running command: {0}".format(cmd))
            yield command_watcher(cmd, allow_fail=True)
        self._dada_buffers_monitor = []


if __name__ == "__main__":
    launchPipelineServer(EddPulsarPipeline)
Beispiel #4
0
            items["number_of_input_samples"].value)
        number_of_saturated_samples = convert48_64(
            items["number_of_saturated_samples"].value)
        naccumulate = convert48_64(items["naccumulate"].value)
        sync_time = convert48_64(items["sync_time"].value)
        timestamp_count = convert48_64(items["timestamp_count"].value)

        # Integration period does not contain efficiency of sampling as heaps may
        # be lost respectively not in this gate
        integration_period = (naccumulate * fft_length) / sampling_rate

        # The reference time is in the center of the integration # period
        reference_time = float(
            sync_time) + float(timestamp_count) / sampling_rate + float(
                integration_period / 2.)
        _log.debug("Set timestamp: {}".format(reference_time))

        data = {}
        data['timestamp'] = np.array([reference_time])
        data['integration_time'] = np.array(
            [number_of_input_samples / sampling_rate])
        data['saturated_samples'] = np.array([number_of_saturated_samples])
        _log.debug("Meta data:\n{}".format(json.dums(data, indent=4)))

        data['spectrum'] = items['data'].value
        return section_id, data, self.__attributes


if __name__ == "__main__":
    launchPipelineServer(EDDHDF5WriterPipeline)
Beispiel #5
0
            yield proc.terminate()

        self.mkrec_cmd = []

        log.debug("Destroying dada buffers")
        for k in self._dada_buffers:
            k['monitor'].stop()
            cmd = "dada_db -d -k {0}".format(k['key'])
            log.debug("Running command: {0}".format(cmd))
            yield command_watcher(cmd)

        self._dada_buffers = []

    @coroutine
    def populate_data_store(self, host, port):
        """@brief Populate the data store"""
        log.debug("Populate data store @ {}:{}".format(host, port))
        dataStore =  EDDDataStore(host, port)
        log.debug("Adding output formats to known data formats")

        descr = {"description":"VDIF data stream",
                "ip": None,
                "port": None,
                }
        dataStore.addDataFormatDefinition("VDIF:1", descr)



if __name__ == "__main__":
    launchPipelineServer(VLBIPipeline)
    @state_change(target="idle", intermediate="deconfiguring", error='panic')
    @coroutine
    def deconfigure(self):
        """
        @brief deconfigure the gated spectrometer pipeline.
        """
        log.info("Deconfiguring EDD backend")
        if self.previous_state == 'streaming':
            yield self.capture_stop()

        if self._subprocessMonitor is not None:
            yield self._subprocessMonitor.stop()
        for proc in self._subprocesses:
            yield proc.terminate()

        self.mkrec_cmd = []

        log.debug("Destroying dada buffers")
        for k in self._dada_buffers:
            k['monitor'].stop()
            cmd = "dada_db -d -k {0}".format(k['key'])
            log.debug("Running command: {0}".format(cmd))
            yield command_watcher(cmd, allow_fail=True)

        self._dada_buffers = []


if __name__ == "__main__":
    launchPipelineServer(GatedFullStokesSpectrometerPipeline)
        comm = repo.remote().pull()[0].commit
        log.info("Updated to latest commit: {}, {}\n    {}\n\n    {}".format(
            comm.hexsha, comm.authored_datetime.ctime(), comm.author,
            comm.message))


if __name__ == "__main__":
    parser = getArgumentParser()

    parser.add_argument(
        '--edd_ansible_repository',
        dest='edd_ansible_git_repository_folder',
        type=str,
        default=os.path.join(os.getenv("HOME"), "edd_ansible"),
        help='The path to a git repository for the provisioning data')

    parser.add_argument('--edd_ansible_inventory',
                        dest='inventory',
                        type=str,
                        default="effelsberg",
                        help='The inventory to use with the ansible setup')
    args = parser.parse_args()
    setup_logger(args)

    server = EddMasterController(args.host, args.port, args.redis_ip,
                                 args.redis_port,
                                 args.edd_ansible_git_repository_folder,
                                 args.inventory)
    launchPipelineServer(server, args)
            # package is done. Send or drop
            if self.__drop_invalid_packages and not pp.valid:
                log.warning(
                    "Package for reference time {} dropped because it contains NaN)!"
                    .format(packet.reference_time))
                self.invalidPackages += 1
            else:
                self.__fits_interface.put(pp.fw_pkt)
        else:
            # Package not done, (re-)add to preparation stash
            self.__packages_in_preparation[packet.reference_time] = pp

            # Cleanup old packages
            tooold_packages = []
            log.debug('Checking {} packages for age restriction'.format(
                len(self.__packages_in_preparation)))
            for p in self.__packages_in_preparation:
                age = self.__now - p
                #log.debug(" Package with timestamp {}: now: {} age: {}".format(p, self.__now, age) )
                if age > self.__max_age:
                    log.warning(
                        "   Age of package {} exceeded maximum age {} - Incomplete package will be dropped."
                        .format(age, self.__max_age))
                    tooold_packages.append(p)
            for p in tooold_packages:
                self.__packages_in_preparation.pop(p)


if __name__ == "__main__":
    launchPipelineServer(FitsInterfaceServer)
Beispiel #9
0
    @coroutine
    def deconfigure(self):
        """
        Deconfigure the gated spectrometer pipeline.

        Clears all dada buffers.
        """
        log.info("Deconfiguring EDD backend")
        if self.previous_state == 'streaming':
            yield self.capture_stop()

        if self._subprocessMonitor is not None:
            yield self._subprocessMonitor.stop()
        for proc in self._subprocesses:
            yield proc.terminate()

        self.mkrec_cmd = []

        log.debug("Destroying dada buffers")
        for k in self._dada_buffers:
            k['monitor'].stop()
            cmd = "dada_db -d -k {0}".format(k['key'])
            log.debug("Running command: {0}".format(cmd))
            yield command_watcher(cmd, allow_fail=True)

        self._dada_buffers = []


if __name__ == "__main__":
    launchPipelineServer(GatedSpectrometerPipeline)