示例#1
0
class LP_WindowGenerator(BarrierAppDROP):
    component_meta = dlg_component('LPWindowGen',
                                   'Low-pass filter example window generator',
                                   [None], [dlg_batch_output('binary/*', [])],
                                   [dlg_streaming_input('binary/*')])

    # default values
    length = dlg_int_param('length', 256)
    cutoff = dlg_int_param('cutoff', 600)
    srate = dlg_int_param('sample_rate', 5000)
    series = None

    def initialize(self, **kwargs):
        super(LP_WindowGenerator, self).initialize(**kwargs)

    def sinc(self, x_val: np.float64):
        """
        Computes the sin_c value for the input float
        :param x_val:
        """
        if np.isclose(x_val, 0.0):
            return 1.0
        return np.sin(np.pi * x_val) / (np.pi * x_val)

    def gen_win(self):
        alpha = 2 * self.cutoff / self.srate
        win = np.zeros(self.length, dtype=np.float64)
        for i in range(int(self.length)):
            ham = 0.54 - 0.46 * np.cos(
                2 * np.pi * i / int(self.length))  # Hamming coefficient
            hsupp = (i - int(self.length) / 2)
            win[i] = ham * alpha * self.sinc(alpha * hsupp)
        return win

    def run(self):
        outs = self.outputs
        if len(outs) < 1:
            raise Exception('At least one output required for %r' % self)
        self.series = self.gen_win()
        data = self.series.tostring()
        for o in outs:
            o.len = len(data)
            o.write(data)

    """
    def generate_reproduce_data(self):
        return dict(data_hash=common_hash(self.series))
    """

    def generate_recompute_data(self):
        output = dict()
        output['length'] = self.length
        output['cutoff'] = self.cutoff
        output['sample_rate'] = self.srate
        output['status'] = self.status
        output['system'] = system_summary()
        return output
示例#2
0
class DDFacetApp(BashShellApp):

    DDF_CMD = 'DDF.py'

    compontent_meta = dlg_component(
        'DDFacetApp',
        'Faceting for direction-dependent spectral deconvolution',
        [dlg_batch_input('binary/*', [])], [dlg_batch_output('binary/*', [])],
        [dlg_streaming_input('binary/*')])

    data_ms = dlg_string_param('Data-MS', None)
    data_colname = dlg_string_param('Data-ColName', "CORRECTED_DATA")
    data_chunkhours = dlg_int_param('Data-ChunkHours', 0.0)

    def initialize(self, **kwargs):
        self.command = 'dummy'

        super(DDFacetApp, self).initialize(**kwargs)

    def run(self):
        self.command = '{0} ' \
                       '--Data-MS={1}' \
                       '--Data-ColName={2} ' \
                       '--Data-ChunkHours={3}'.format(self.DDF_CMD,
                                                      self.data_ms,
                                                      self.data_colname,
                                                      self.data_chunkhours)

        self._run_bash(self._inputs, self._outputs)
示例#3
0
文件: simple.py 项目: ICRAR/daliuge
class CopyApp(BarrierAppDROP):
    """
    A BarrierAppDrop that copies its inputs into its outputs.
    All inputs are copied into all outputs in the order they were declared in
    the graph.
    """

    component_meta = dlg_component(
        "CopyApp",
        "Copy App.",
        [dlg_batch_input("binary/*", [])],
        [dlg_batch_output("binary/*", [])],
        [dlg_streaming_input("binary/*")],
    )

    _bufsize = dlg_int_param("bufsize", 65536)

    def run(self):
        self.copyAll()

    def copyAll(self):
        for inputDrop in self.inputs:
            self.copyRecursive(inputDrop)

    def copyRecursive(self, inputDrop):
        if isinstance(inputDrop, ContainerDROP):
            for child in inputDrop.children:
                self.copyRecursive(child)
        else:
            for outputDrop in self.outputs:
                droputils.copyDropContents(inputDrop,
                                           outputDrop,
                                           bufsize=self._bufsize)
示例#4
0
文件: simple.py 项目: ICRAR/daliuge
class GenericNpyScatterApp(BarrierAppDROP):
    """
    An APP that splits an object that has a len attribute into <num_of_copies> parts and
    returns a numpy array of arrays.
    """

    component_meta = dlg_component(
        "GenericNpyScatterApp",
        "Scatter an array like object into <num_of_copies> parts",
        [dlg_batch_input("binary/*", [])],
        [dlg_batch_output("binary/*", [])],
        [dlg_streaming_input("binary/*")],
    )

    # automatically populated by scatter node
    num_of_copies: int = dlg_int_param("num_of_copies", 1)
    scatter_axes: List[int] = dlg_list_param("scatter_axes", "[0]")

    def run(self):
        if len(self.inputs) * self.num_of_copies != len(self.outputs):
            raise DaliugeException(
                f"expected {len(self.inputs) * self.num_of_copies} outputs,\
                 got {len(self.outputs)}")
        if len(self.inputs) != len(self.scatter_axes):
            raise DaliugeException(f"expected {len(self.inputs)} axes,\
                 got {len(self.scatter_axes)}, {self.scatter_axes}")

        # split it as many times as we have outputs
        self.num_of_copies = self.num_of_copies

        for in_index in range(len(self.inputs)):
            nObj = droputils.load_numpy(self.inputs[in_index])
            try:
                result = np.array_split(nObj,
                                        self.num_of_copies,
                                        axis=self.scatter_axes[in_index])
            except IndexError as err:
                raise err
            for split_index in range(self.num_of_copies):
                out_index = in_index * self.num_of_copies + split_index
                droputils.save_numpy(self.outputs[out_index],
                                     result[split_index])
示例#5
0
文件: simple.py 项目: ICRAR/daliuge
class GenericScatterApp(BarrierAppDROP):
    """
    An APP that splits an object that has a len attribute into <numSplit> parts and
    returns a numpy array of arrays, where the first axis is of length <numSplit>.
    """

    component_meta = dlg_component(
        "GenericScatterApp",
        "Scatter an array like object into numSplit parts",
        [dlg_batch_input("binary/*", [])],
        [dlg_batch_output("binary/*", [])],
        [dlg_streaming_input("binary/*")],
    )

    # automatically populated by scatter node
    num_of_copies: int = dlg_int_param("num_of_copies", 1)

    def initialize(self, **kwargs):
        super(GenericScatterApp, self).initialize(**kwargs)

    def run(self):
        numSplit = self.num_of_copies
        cont = droputils.allDropContents(self.inputs[0])
        # if the data is of type string it is not pickled, but stored as a binary string.
        try:
            inpArray = pickle.loads(cont)
        except:
            inpArray = cont.decode()
        try:  # just checking whether the object is some object that can be used as an array
            nObj = np.array(inpArray)
        except:
            raise
        try:
            result = np.array_split(nObj, numSplit)
        except IndexError as err:
            raise err
        for i in range(numSplit):
            o = self.outputs[i]
            d = pickle.dumps(result[i])
            o.len = len(d)
            o.write(d)  # average across inputs
示例#6
0
文件: simple.py 项目: ICRAR/daliuge
class RandomArrayApp(BarrierAppDROP):
    """
    A BarrierAppDrop that generates an array of random numbers. It does
    not require any inputs and writes the generated array to all of its
    outputs.

    Keywords:

    integer:  bool [True], generate integer array
    low:      float, lower boundary (will be converted to int for integer arrays)
    high:     float, upper boundary (will be converted to int for integer arrays)
    size:     int, number of array elements
    """

    component_meta = dlg_component(
        "RandomArrayApp",
        "Random Array App.",
        [dlg_batch_input("binary/*", [])],
        [dlg_batch_output("binary/*", [])],
        [dlg_streaming_input("binary/*")],
    )

    # default values
    integer = dlg_bool_param("integer", True)
    low = dlg_float_param("low", 0)
    high = dlg_float_param("high", 100)
    size = dlg_int_param("size", 100)
    marray = []

    def initialize(self, keep_array=False, **kwargs):
        super(RandomArrayApp, self).initialize(**kwargs)
        self._keep_array = keep_array

    def run(self):
        # At least one output should have been added
        outs = self.outputs
        if len(outs) < 1:
            raise Exception(
                "At least one output should have been added to %r" % self)
        marray = self.generateRandomArray()
        if self._keep_array:
            self.marray = marray
        for o in outs:
            d = pickle.dumps(marray)
            o.len = len(d)
            o.write(d)

    def generateRandomArray(self):
        if self.integer:
            # generate an array of self.size integers with numbers between
            # slef.low and self.high
            marray = np.random.randint(int(self.low),
                                       int(self.high),
                                       size=(self.size))
        else:
            # generate an array of self.size floats with numbers between
            # self.low and self.high
            marray = (np.random.random(size=self.size) + self.low) * self.high
        return marray

    def _getArray(self):
        return self.marray
示例#7
0
class LP_SignalGenerator(BarrierAppDROP):
    component_meta = dlg_component('LPSignalGen',
                                   'Low-pass filter example signal generator',
                                   [None], [dlg_batch_output('binary/*', [])],
                                   [dlg_streaming_input('binary/*')])

    # default values
    length = dlg_int_param('length', 256)
    srate = dlg_int_param('sample rate', 5000)
    freqs = dlg_list_param('frequencies', [440, 800, 1000, 2000])
    noise = dlg_list_param('noise', [])
    series = None

    def initialize(self, **kwargs):
        super(LP_SignalGenerator, self).initialize(**kwargs)

    def add_noise(self,
                  series: np.array,
                  mean,
                  std,
                  freq,
                  sample_rate,
                  seed,
                  alpha=0.1):
        """
        A noise to the provided signal by producing random values of a given frequency
        :param series: The input (and output) numpy array signal series
        :param mean: The average value
        :param std: The standard deviation of the value
        :param freq: The frequency of the noisy signal
        :param sample_rate: The sample rate of the input series
        :param seed: The random seed
        :param alpha: The multiplier
        :return: The input series with noisy values added
        """
        np.random.seed(seed)
        samples = alpha * np.random.normal(mean, std, size=len(series))
        for i in range(len(series)):
            samples[i] += np.sin(2 * np.pi * i * freq / sample_rate)
        np.add(series, samples, out=series)
        return series

    def gen_sig(self):
        series = np.zeros(self.length, dtype=np.float64)
        for freq in self.freqs:
            for i in range(self.length):
                series[i] += np.sin(2 * np.pi * i * freq / self.srate)
        return series

    def run(self):
        outs = self.outputs
        if len(outs) < 1:
            raise Exception('At least one output required for %r' % self)
        self.series = self.gen_sig()
        if len(self.noise) > 0:
            self.noise[0] = 1 / self.noise[0]
            self.series = self.add_noise(self.series, self.noise[2],
                                         self.noise[4], self.noise[1],
                                         self.srate, self.noise[3],
                                         self.noise[0])

        data = self.series.tostring()
        for o in outs:
            o.len = len(data)
            o.write(data)

    """
    def generate_reproduce_data(self):
        # This will do for now
        return {'data_hash': common_hash(self.series)}
    """

    def generate_recompute_data(self):
        # This will do for now
        return {
            'length': self.length,
            'sample_rate': self.srate,
            'frequencies': self.freqs,
            'status': self.status,
            'system': system_summary()
        }
示例#8
0
class LP_AddNoise(BarrierAppDROP):
    component_meta = dlg_component(
        'LPAddNoise',
        'Adds noise to a signal generated for the low-pass filter example',
        [dlg_batch_input('binary/*', [])], [dlg_batch_output('binary/*', [])],
        [dlg_streaming_input('binary/*')])

    # default values
    mean = dlg_float_param('avg_noise', 0.0)
    std = dlg_float_param('std_deviation', 1.0)
    freq = dlg_int_param('frequency', 1200)
    srate = dlg_int_param('sample_rate', 5000)
    seed = dlg_int_param('random_seed', 42)
    alpha = dlg_float_param('noise_multiplier', 0.1)
    signal = np.empty([1])

    def initialize(self, **kwargs):
        super(LP_AddNoise).initialize(**kwargs)

    def add_noise(self):
        np.random.seed(self.seed)
        samples = self.alpha * np.random.normal(
            self.mean, self.std, size=len(self.signal))
        for i in range(len(self.signal)):
            samples[i] += np.sin(2 * np.pi * i * self.freq / self.srate)
        np.add(self.signal, samples, out=self.signal)
        return self.signal

    def getInputArrays(self):
        ins = self.inputs
        if len(ins) != 1:
            raise Exception('Precisely one input required for %r' % self)

        array = np.fromstring(droputils.allDropContents(ins[0]))
        self.signal = np.frombuffer(array)

    def run(self):
        outs = self.outputs
        if len(outs) < 1:
            raise Exception('At least one output required for %r' % self)
        self.getInputArrays()
        sig = self.add_noise()
        data = sig.tobytes()
        for o in outs:
            o.len = len(data)
            o.write(data)

    """
    def generate_reproduce_data(self):
        return {'data_hash', common_hash(self.signal)}
    """

    def generate_recompute_data(self):
        return {
            'mean': self.mean,
            'std': self.std,
            'sample_rate': self.srate,
            'seed': self.seed,
            'alpha': self.alpha,
            'system': system_summary(),
            'status': self.status
        }
示例#9
0
class ProduceConfig(BarrierAppDROP):
    """A BarrierAppDrop that produces multiple config files suitable for the CallLeap BarrierAppDrop"""
    compontent_meta = dlg_component('ProduceConfig', 'Produce Config.',
                                    [dlg_batch_input('binary/*', [])],
                                    [dlg_batch_output('binary/*', [])],
                                    [dlg_streaming_input('binary/*')])

    # read component parameters
    numStations = dlg_int_param('number of stations', 1)
    implementation = dlg_string_param('implementation', 'cpu')
    autoCorrelation = dlg_bool_param('auto correlation', False)
    maxDirections = dlg_int_param('max directions', 1)


    def initialize(self, **kwargs):
        super(ProduceConfig, self).initialize(**kwargs)


    def run(self):
        # check number of inputs and outputs
        if len(self.inputs) != 1:
            raise Exception("One input is expected by this application")

        # read directions from input 0
        directions = self._readDirections(self.inputs[0])

        # determine number of directions per instance
        numDirectionsPerInstance = float(len(directions)) / float(len(self.outputs))
        numDirectionsPerInstance = min(numDirectionsPerInstance, self.maxDirections)

        startDirectionIndex = 0
        endDirectionIndex = 0

        # split directions
        for i in range(len(self.outputs)):
            endDirectionIndex = int(math.floor((i+1)*numDirectionsPerInstance))

            # split directions
            partDirections = directions[startDirectionIndex:endDirectionIndex]

            # build config
            configJSON = self._createConfig(self.numStations, partDirections, self.implementation, self.autoCorrelation)

            # stringify config
            config = json.dumps(configJSON)

            # write config to output
            if type(config) is str:
                config = config.encode()
            self.outputs[i].write(config)

            # continue from here in the next iteration
            startDirectionIndex = endDirectionIndex


    def _readDirections(self, inDrop):
        directions = []

        # NOTE: it appears csv.reader() can't use the DROPFile(inDrop) directly,
        #       since DROPFile is not a iterator. Instead, we read the whole
        #       inDrop to a string and pass that to csv.reader()
        with DROPFile(inDrop) as f:
            file_data = f.read()
            if type(file_data) is bytes:
                file_data=file_data.decode('utf-8')
            csvreader = csv.reader(file_data.split('\n'))
            for row in csvreader:
                # skip rows with incorrect number of values
                if len(row) is not 2:
                    continue

                x = float(row[0])
                y = float(row[1])
                directions.append([x,y])

        return directions


    def _createConfig(self, numStations, directions, implementation, autoCorrelation):
        return {
            'stations': numStations,
            'directions': directions,
            'computeImplementation': implementation,
            'readAutoCorrelations': autoCorrelation
        }