Example #1
0
 def __init__(self):
     self.pipeline = Pipeline()
     self.history_pipeline = Pipeline()
     for plsc in pipelinestages.getPipelineFeedStageClasses():
         self.pipeline.appendStage(plsc())
     for plsc in pipelinestages.getPipelineHistoryFeedStageClasses():
         self.history_pipeline.appendStage(plsc())
Example #2
0
 def test_resource_group_get_all_mentioned(self):
     p = Pipeline()
     t = p.new_task()
     t.declare_resource_group(foo={'bed': '{root}.bed', 'bim': '{root}.bim'})
     t.command(f"cat {t.foo.bed}")
     assert(t.foo.bed in t._mentioned)
     assert(t.foo.bim not in t._mentioned)
Example #3
0
 def test_add_extension_input_resource_file(self):
     input_file1 = '/tmp/data/example1.txt.bgz.foo'
     p = Pipeline()
     in1 = p.read_input(input_file1, extension='.txt.bgz.foo')
     with self.assertRaises(Exception):
         in1.add_extension('.baz')
     assert in1._value.endswith('.txt.bgz.foo')
Example #4
0
 def test_resource_group_get_all_mentioned_dependent_tasks(self):
     p = Pipeline()
     t = p.new_task()
     t.declare_resource_group(foo={'bed': '{root}.bed', 'bim': '{root}.bim'})
     t.command(f"cat")
     t2 = p.new_task()
     t2.command(f"cat {t.foo}")
Example #5
0
 def test_iteration(self):
     p1 = Pipe(name='p1')
     p1.pipe_cache = ['ok']
     p2 = Pipe(name='p2')
     p3 = Pipe(name='p3')
     pline = Pipeline(pipes=[p1,p2,p3])
     self.assertEqual(pline.next(),'ok')
Example #6
0
def main_pipeline(filedir):
    #point it somewhere with a bunch of text files
    pipe = Pipeline(getFileNames(filedir), frequencyCount, pruneCommon, writer)
    pipe.run()
    while(not pipe.isDone()):
        pass
    out.close()
Example #7
0
    def on_display(self):
        """
        Rendering callback.
        """
        self._camera.render()

        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)

        self._scale += 0.1
        pipeline = Pipeline(rotation=[0, self._scale, 0],
                            translation=[0, 0, 6],
                            projection=self._projection)
        pipeline.set_camera(self._camera)

        self._effect.set_wvp(pipeline.get_wvp())
        self._effect.set_directional_light(
            self._dir_light_color, self._dir_light_ambient_intensity)

        position, tex_coord = 0, 1
        glEnableVertexAttribArray(position)
        glEnableVertexAttribArray(tex_coord)

        glBindBuffer(GL_ARRAY_BUFFER, self._vbo)
        glVertexAttribPointer(position, 3, GL_FLOAT, GL_FALSE, 20, ctypes.c_void_p(0))
        glVertexAttribPointer(tex_coord, 2, GL_FLOAT, GL_FALSE, 20, ctypes.c_void_p(12))
        glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, self._ibo)

        self._texture.bind(GL_TEXTURE0)
        glDrawElements(GL_TRIANGLES, 18, GL_UNSIGNED_INT, ctypes.c_void_p(0))
        glDisableVertexAttribArray(position)
        glDisableVertexAttribArray(tex_coord)
        glutSwapBuffers()
Example #8
0
 def test_when_consumer_yield_none(self):
     pipeline = Pipeline().add(
         PipeBuilder().alias("yield_none").consumer(lambda m: m if m == 0 else None).buffer_size(100).number_of_consumer(2)
     )
     expected = [0]
     actual = [x for x in pipeline.stream(range(100))]
     self.assertEquals(expected, actual)
Example #9
0
def bas(target, fluxcal, phasecal, fringefinder, bpcal, catlist, FLAG_INT,
        FLAG_BATCH, FLAG_DEBUG, FLAG_SILENT, FLAG_LOG, converts,
        StageSelection, confpath, conffile, inpath, inext, outpath, outprefix,
        logpath, logfile, comment, timeformat, prompt):

    # Get all those parameters from above ^
    settings = locals()

    # Inform the CASA logger of what's happening.
    casalog.origin("bas")
    casalog.post("Output from this task is not appended to the CASA log.",
                 priority="INFO")

    # Make sure we can see all the files we need.
    obit_path = "/home/rowell/src/obit/python"
    parseltongue_path = "/usr/local/share/parseltongue/python"
    sys.path.append(obit_path)
    sys.path.append(parseltongue_path)

    # Do what we came here to do.
    p = Pipeline()
    p.set(settings)
    p.start()

    # Return the settings.
    s = p.settings
    return s
Example #10
0
def main(input_file_path, output_file_path):
    pdf_file = open_pdf_file(open(input_file_path, 'rwb'))

    image_processor = Pipeline(pipeline_provider,
                               [layout_handler, ayat_handler, soura_handler],
                               pipeline_consumer, pipeline_validator)
    image_processor.follow(retrive_page_as_image(pdf_file))
    print 'success'
Example #11
0
def test_pipeline():
    p = Pipeline()
    assert(len(p) == 0)
    p = Pipeline([ajob])
    assert(len(p) == 1)
    assert_raises(ValueError, Pipeline, [notjob])
    p.append(ajob)
    assert(len(p) == 2)
def test_config_dict(pipeline_config):
    pipeline = Pipeline([Filename], [PipelineResult], **pipeline_config)
    config_dict = pipeline.get_config()
    print(config_dict)
    assert('Localizer' in config_dict)
    assert('Decoder' in config_dict)
    assert(config_dict['Localizer']['model_path'] == 'REQUIRED')
    assert(config_dict['Decoder']['model_path'] == 'REQUIRED')
Example #13
0
 def test_resource_group_get_all_inputs(self):
     p = Pipeline()
     input = p.read_input_group(fasta="foo",
                                idx="bar")
     t = p.new_task()
     t.command(f"cat {input.fasta}")
     assert(input.fasta in t._inputs)
     assert(input.idx in t._inputs)
Example #14
0
    def test_resource_group_mentioned(self):
        p = Pipeline()
        t = p.new_task()
        t.declare_resource_group(foo={'bed': '{root}.bed'})
        t.command(f'echo "hello" > {t.foo}')

        t2 = p.new_task()
        t2.command(f'echo "hello" >> {t.foo.bed}')
        p.run()
Example #15
0
    def test_one_pipe(self):
        pipeline = Pipeline().add(
            PipeBuilder().alias("multiplier").consumer(lambda m: m * m).buffer_size(100).number_of_consumer(10)
        )
        expected = [0, 1, 4, 9, 16, 25, 36, 49, 64, 81]

        actual = [x for x in pipeline.stream(range(10))]
        actual.sort()
        self.assertEquals(expected, actual)
Example #16
0
    def __init__(self, data_folder='Data', models_folder='Models', working_folder='Working_Folder'):
        """
        Constructor.

        :param data_folder: the folder containing the data
        :param models_folder: the folder containing the models
        :param working_folder: the folder to store the results
        """
        Pipeline.__init__(self, data_folder, models_folder, working_folder)
Example #17
0
    def test_multiple_pipes(self):
        pipeline = Pipeline().add(
            PipeBuilder("aggregator").aggregation_size(2).buffer_size(10)
        ).add(
            PipeBuilder("summation").consumer(lambda aggr: sum(aggr)).number_of_consumer(1).buffer_size(10)
        ).add(
            PipeBuilder("nth_triangular").consumer(lambda n: (n * n + n) / 2).number_of_consumer(1).buffer_size(10)
        )

        expect = [1, 15, 45, 91]
        self.assertEquals(expect, [x for x in pipeline.stream(range(8))])
Example #18
0
 def test_two_actions(self):
     arguments = {}
     action1 = Mock()
     action1.execute = Mock(return_value='somathin')
     action2 = Mock()
     action2.execute = Mock(return_value='other thing')
     pipeline = Pipeline([ action1, action2 ])
     result = pipeline.execute(arguments)
     action1.execute.assert_called_once_with(arguments)
     action2.execute.assert_called_once_with('somathin')
     self.assertEqual('other thing', result)
Example #19
0
 def test_threads(self):
     pipe_threads = Pipeline(range(1, 3), threads=10).apply(sleep)
     threads_time = timeit("pipe_threads.run()",
                           number=1,
                           globals={"pipe_threads": pipe_threads})
     self.assertLess(threads_time, 3)
     pipe_no_threads = Pipeline(range(1, 3), threads=1).apply(sleep)
     no_threads_time = timeit("pipe_no_threads.run()",
                              number=1,
                              globals={"pipe_no_threads": pipe_no_threads})
     self.assertGreater(no_threads_time, 3)
Example #20
0
def test_unnamed_action_in_pipeline():
    """Test that an unnamed action will get the module.task_name name."""
    actions = [
        TaskAction(
            'stuff_increment_source',
            amount='1'
         ),
    ]
    executor = Pipeline(actions)
    result = executor.schedule(1).get()
    assert 'stuff_increment_source' in result.results
Example #21
0
def get_default_pipeline():
    cypress = CypressStage()
    spirit = SpiritStage(ispca=False,thresh=0.01,ebounds=(0.96,1.1),startm=3)
    kalman = KalmanStage()
    pipeline = Pipeline()
    draw = DrawStage('../etc/tmp/abilene', False)
    pipeline.append_stage(cypress)
    pipeline.append_stage(spirit)
    #pipeline.append_stage(kalman)
    #pipeline.append_stage(draw)
    return pipeline
Example #22
0
def searchImagery(permissions, token, extent):
    validateAccessRights([os.getenv('IMG_AVAILABILITY')], permissions)
    url = utils.SK_IMAGE_API + '/search'
    pipeline = Pipeline(url, token, prepare_searchReq(extent))
    pipeline.start()
    logger.info("Created Pipeline. Waiting for results...")
    response = pipeline.join()
    if not response or 'results' not in response or len(
            response['results']) == 0:
        raise SpaceKnowError('Any imagery found in the response', 500)
    return response['results']
Example #23
0
def evaluatesCosts(scenes, extent, permissions, token):
    validateAccessRights([os.getenv('KRAKEN_DRY_RUN')], permissions)
    data = createEvaluationRequest(scenes, extent)
    url = utils.SK_KRAKEN_API + '/dry-run'
    pipeline = Pipeline(url, token, data)
    pipeline.start()
    logger.info("Created Pipeline. Waiting for results...")
    analysis = pipeline.join()
    if not analysis or 'allocatedCredits' not in analysis:
        raise SpaceKnowError('Cost analysis is not available', 503)
    return analysis
Example #24
0
def pipeline_wrapper(dic_exp_conf, dic_agent_conf, dic_traffic_env_conf,
                     dic_path):
    ppl = Pipeline(dic_exp_conf=dic_exp_conf,
                   dic_agent_conf=dic_agent_conf,
                   dic_traffic_env_conf=dic_traffic_env_conf,
                   dic_path=dic_path)
    global multi_process
    ppl.run(multi_process=multi_process)

    print("pipeline_wrapper end")
    return
Example #25
0
def worker_thread(p, organizations, auto_create_repositories, deployment_map,
                  parameter_store):
    LOGGER.debug("Worker Thread started for %s", p.get('name'))
    pipeline = Pipeline(p)
    if auto_create_repositories == 'enabled':
        code_account_id = p.get('default_providers',
                                {}).get('source',
                                        {}).get('properties',
                                                {}).get('account_id', {})
        has_custom_repo = p.get('default_providers',
                                {}).get('source',
                                        {}).get('properties',
                                                {}).get('repository', {})
        if auto_create_repositories and code_account_id and str(
                code_account_id).isdigit() and not has_custom_repo:
            repo = Repo(code_account_id, p.get('name'), p.get('description'))
            repo.create_update()

    regions = []
    for target in p.get('targets', []):
        target_structure = TargetStructure(target)
        for step in target_structure.target:
            regions = step.get('regions',
                               p.get('regions', DEPLOYMENT_ACCOUNT_REGION))
            paths_tags = []
            for path in step.get('path', []):
                paths_tags.append(path)
            if step.get('tags') is not None:
                paths_tags.append(step.get('tags', {}))
            for path_or_tag in paths_tags:
                pipeline.stage_regions.append(regions)
                pipeline_target = Target(path_or_tag, target_structure,
                                         organizations, step, regions)
                pipeline_target.fetch_accounts_for_target()

            pipeline.template_dictionary["targets"].append(
                target.target_structure.generate_waves())

    if DEPLOYMENT_ACCOUNT_REGION not in regions:
        pipeline.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION)
    pipeline.generate_input()
    ssm_params = fetch_required_ssm_params(pipeline.input["regions"]
                                           or [DEPLOYMENT_ACCOUNT_REGION])
    deployment_map.update_deployment_parameters(pipeline)
    store_regional_parameter_config(pipeline, parameter_store)
    with open(f'cdk_inputs/{pipeline.input["name"]}.json',
              mode='w',
              encoding='utf-8') as outfile:
        data = {}
        data['input'] = pipeline.input
        data['input']['default_scm_branch'] = ssm_params.get(
            'default_scm_branch')
        data['ssm_params'] = ssm_params
        json.dump(data, outfile)
Example #26
0
def pipeline_wrapper(dic_exp_conf, dic_agent_conf, dic_traffic_env_conf, dic_path):
    ppl = Pipeline(dic_exp_conf=dic_exp_conf, # experiment config
                   dic_agent_conf=dic_agent_conf, # RL agent config
                   dic_traffic_env_conf=dic_traffic_env_conf, # the simolation configuration
                   dic_path=dic_path # where should I save the logs?
                   )
    global multi_process
    ppl.run(multi_process=multi_process)

    pass#print("pipeline_wrapper end")
    return
Example #27
0
def test_as_rows(passing_stage, failing_stage, second_passing_stage):
    stages = [passing_stage, failing_stage, second_passing_stage]
    pipeline = Pipeline(stages, trigger="commits")
    runs = pipeline.simulation(now, [commit1], timedelta(minutes=60))
    rows = as_rows(stages, runs)
    assert ",".join(
        map(str, rows[0])
    ) == "Start Time,Changes Included,Build,Build,AcceptanceTest,End Time,Deploy Time"
    assert ",".join(
        map(str, rows[1])
    ) == "2018-04-03 08:00:00,['#0001'],ok,fail,skip (previous failure),2018-04-03 08:20:00,"
Example #28
0
    def __init__(self,
                 name,
                 fields,
                 universe_file=None,
                 field_rename_map={},
                 build_meta=False,
                 fuzzy_thresh=0.75,
                 field_weights=None,
                 exact=[],
                 udelim='\t'):

        self.name = name

        self.fields = fields
        self.universe_file = universe_file
        self.field_rename_map = field_rename_map
        #if target has different names for same fields, specify and rename target fields when matching

        self.LSH = {}

        self.threshold = fuzzy_thresh
        self.field_weights = field_weights
        self.exact = exact

        self.__pipeline__ = Pipeline(name)
        self.__preprocessor__ = PreprocessPiper(self.__pipeline__)

        if (universe_file and os.path.exists(universe_file)):
            self.__pipeline__.connect()
            if (not self.__pipeline__.__client__[Pipeline.__database__][
                    'meta_{}'.format(name)].find_one(
                        {'sha1': sha1_file(universe_file)})):
                try:
                    self.__preprocessor__.upload_universe_file(
                        universe_file, build_meta=build_meta, delim=udelim)
                    self.__pipeline__.__client__[Pipeline.__database__][
                        'meta_{}'.format(name)].insert_one({
                            'sha1':
                            sha1_file(universe_file),
                            'source':
                            universe_file
                        })
                except pymongo.errors.BulkWriteError:
                    ##File might have changed, still uploads new records, does not upload duplicates
                    ##Throws error when any duplicates, any new records still get uploaded
                    pass

        self.filters = self.__getfilters__()

        for exit_status in self.__buildfilters__():
            if (exit_status[1] != 0):
                raise RuntimeError(
                    'When building {obj}, exit code returned non-zero ({code})'
                    .format(obj=exit_status[0], code=exit_status[1]))
Example #29
0
 def input_fn():
     with tf.device('/cpu:0'), tf.name_scope('input_pipeline'):
         pipeline = Pipeline(filenames,
                             batch_size=batch_size,
                             image_size=image_size,
                             time_step=time_step,
                             repeat=is_training,
                             shuffle=is_training,
                             augmentation=is_training)
         features, labels = pipeline.get_batch()
     return features, labels
Example #30
0
def on_message(channel, method_frame, header_frame, body):
    global counter
    global conn
    channel.basic_ack(delivery_tag=method_frame.delivery_tag)
    lines = body.decode()
    pln = Pipeline(lines, conn, nlp)
    pln.run()
    counter = counter + 1
    if counter % 50 == 0:
        logging.info("consumer {}: {} messages consumed ".format(
            queue_name, counter))
Example #31
0
def detect(input_path, output_directory, pipeline_file):
    # if input_path is just a single file, we don't need all the multicore
    # setup.
    if os.path.isfile(input_path):
        pipeline = Pipeline(pipeline_file, os.path.dirname(input_path), output_directory)
        pipeline.execute(input_path)
        
    elif os.path.isdir(input_path):
        multiexecutor = MultiPipelineExecutor()
        multiexecutor.execute(pipeline_file, input_path, output_directory)
    else:
        print("Input is not an image file or directory:", input_path)
Example #32
0
def test_single_action_in_pipeline():
    """Test a single action scheduled by the executor.
    """
    actions = [
        TaskAction('stuff_increment_source',
            name= 'increment',
            amount='1'
        )
    ]
    executor = Pipeline(actions)
    result = executor.schedule(1).get()
    assert result.results['increment'] == 2
Example #33
0
def main():
    """ 
    Parses arguments; initialises logger; initialises camera driver if
    necessary; loads single image from disk if necessary; and runs desired parts
    of pipeline, or loads output from previous execution for printout.
    
    """

    options, args = argparse.run()
    loginit.run(options.verbosity)
    logger = logging.getLogger('main')

    logger.info(' '.join(sys.argv[1:]))

    if options.simulate == 0:
        options.simulate = None
        l = DC1394Library()
    elif options.simulate > 0:
        options.simulate -= 1
    elif options.simtime is None:
        options.simtime = 36000

    global pipeline
    pipeline = Pipeline(options)

    if options.disk:
        logger.info('using poses from disk')
        pipe = Pipeline()
        pipe.options = options
        printer = Printer(pipe=pipe)
        printer.final()
        logger.info('done. exiting')
        sys.exit(0)

    if args:
        try:
            image = cv2.imread('images/'+args[0], cv2.CV_LOAD_IMAGE_GRAYSCALE)
            pipeline.set_image(image)
            logger.info('opening image file %s from disk' % args[0])
        except IOError:
            logger.error('image file not found: %s' % args[0])
            exit(1)
    elif options.simulate is not None:
        logger.info('running in simulation mode')
    else:
        try:
            fwcam = handle_common_options(options, l)
            pipeline.set_fwcam(fwcam)
            logger.info('init. pydc1394 camera object')
            logger.info('camera: %s' % fwcam.model)
            logger.info('mode: %s' % fwcam.mode)
            logger.info('framerate: %d' % fwcam.framerate.val)
        except:
            logger.error('unable to open camera capture')
            exit(1)

    pipeline.run()
Example #34
0
    def configure(self):
        self.logger.info('Loading configuration: %r' % self.path)
        # push implicit top-level context
        self.pipeline = Pipeline()
        with self.pipeline:
            try:
                execfile(self.path)
            except Exception as ex:
                trace = sys.exc_info()[2]
                raise ConfigException(ex), None, trace

        # pipeline has no final output queue
        self.pipeline.setup(None)
Example #35
0
 def __init__(self, name):
     self.crawl = Crawl()
     self.analysis = Analysis()
     self.pipe = Pipeline()
     self.options = webdriver.ChromeOptions()
     # 指定下载位置
     prefs = {
         'profile.default_content_settings.popups': 0,
         'download.default_directory': os.path.abspath('DATA')
     }
     self.options.add_experimental_option('prefs', prefs)
     self.driver = webdriver.Chrome(chrome_options=self.options)
     self.name = str(name.encode('gbk'))[2:-1].replace('\\x', '%').upper()
Example #36
0
def test_context_and_kwargs_application_in_pipeline():
    """Test that both build context and user-supplied kwargs are applied
    to a series of tasks in a chain.
    """
    actions = [
        TaskAction('increment', num='0'),
        TaskAction('increment', name='increment_again', num='{{ increment }}'),
        TaskAction('increment', name='increment_once_more', num='{{ increment_again }}')
    ]
    source = None
    executor = Pipeline(source, actions)
    result = executor.schedule().get()
    assert result.results['increment_once_more'] == 3
Example #37
0
    def __init__(
        self,
        data_folder="Data",
        models_folder="Models",
        working_folder="Working_Folder",
        sim_data_folder="param_estim_data",
        sim_plots_folder="param_estim_plots",
    ):
        __doc__ = Pipeline.__init__.__doc__

        Pipeline.__init__(self, data_folder, models_folder, working_folder, sim_data_folder, sim_plots_folder)
        # The folder containing the updated Copasi models
        self.__updated_models_folder = "updated_models"
Example #38
0
    def test_resource_group_get_all_outputs(self):
        p = Pipeline()
        t1 = p.new_task()
        t1.declare_resource_group(foo={'bed': '{root}.bed', 'bim': '{root}.bim'})
        t1.command(f"cat {t1.foo.bed}")
        t2 = p.new_task()
        t2.command(f"cat {t1.foo.bed}")

        for r in [t1.foo.bed, t1.foo.bim]:
            assert(r in t1._outputs)
            assert(r in t2._inputs)
            assert(r in t1._mentioned)
            assert(r not in t2._mentioned)
Example #39
0
def main(argv):
    Common.init("/mnt/config/config.yml", docker_container=argv[1])
    
    Common.message(_('Loading pipeline...'))
    pipeline = Pipeline(path="/mnt/pipeline/pipeline.yml", host_path=argv[0])
    
    if len(argv) > 2 and argv[2] == "test":
        pipeline.run_tests()
        
    elif len(argv) > 2 and argv[2]:
        Common.message(_('Unknown argument')+": "+argv[2])
        
    else:
        Web(pipeline, host='0.0.0.0')
Example #40
0
def add_pics(rep, paths, process, recipe=None):
    """
    Add pictures to repository.
    
    Arguments:
    rep     -- Add pictures to this repository.
    paths   -- Paths of the pictures to be added (check if path exists).
    process -- Boolean flag if added pictures should be processed.
    recipe  -- Recipe to use for picture processing.
    """

    for path in paths:
        if not os.path.exists(path):
            log.warning("File not found: '%s'. Skipping it." % path)

    pics = [Picture(path) for path in paths if os.path.exists(path)]
    rep.index.add(pics)

    if process:
        log.info("Processing pictures.")
        if not recipe:  # set up pipeline
            process_recipe = \
                Recipe.fromString(rep.config['recipes.default'])
        pl = Pipeline('Pipeline1', process_recipe,
                      path=rep.connector.url.path)
        for pic in pics:
            pl.put(pic)
        pl.start()  # start processing threads
        pl.join()   # wait until threads exit

    log.info("Saving index to file.")
    with rep.connector.connected():
        rep.save_index_to_disk()
    return rep
Example #41
0
    def test_star_apply(self):
        pipe = Pipeline([10, 20, 30])
        self.assertIsInstance(Pipeline([]).star_apply(func=func), Pipeline)
        with self.assertRaises(ValueError):
            Pipeline([]).star_apply(object())
        with self.assertRaises(TypeError):
            Pipeline([]).star_apply(not_a_param=object())
        with self.assertRaises(TypeError):
            out, err = pipe.star_apply(add_two_params).run()

            def error_func():
                raise err[0]

            error_func()
        out, err = pipe.apply(lambda x: [x, x]).star_apply(
            add_two_params).run()
        self.assertEqual(len(out), 3)
        self.assertEqual(sum(out), 120)
        self.assertEqual(len(err), 0)
        out, err = pipe.apply(lambda x: {
            "a": x,
            "b": x
        }).star_apply(add_two_params).run()
        self.assertEqual(len(out), 3)
        self.assertEqual(sum(out), 120)
        self.assertEqual(len(err), 0)
Example #42
0
 def testGetAllDataDependencies(self):
     pipeline = Pipeline('project-input.0.txt')
     pipeline.get_all_data_dependencies()
     self.assertEqual(pipeline.data_dep[0], (3,1))
     self.assertEqual(pipeline.data_dep[1], (3,2))
     self.assertEqual(pipeline.data_dep[2], (4,3))
     self.assertEqual(pipeline.data_dep[3], (6,2))
     self.assertEqual(pipeline.data_dep[4], (6,3))
     self.assertEqual(pipeline.data_dep[5], (6,5))
     self.assertEqual(pipeline.data_dep[6], (7,3))
     self.assertEqual(pipeline.data_dep[7], (7,5))
     self.assertEqual(pipeline.data_dep[8], (8,1))
     self.assertEqual(pipeline.data_dep[9], (8,2))
     self.assertEqual(pipeline.data_dep[10], (8,6))
Example #43
0
def test_named_actions_in_pipeline():
    """Test that named actions store their name in build context.
    """
    dct = {
        "name": "mytask",
        "task": "named_action",
    }
    actions = [TaskAction(dct['task'], name=dct['name'])]

    executor = Pipeline(actions)
    ret = executor.schedule('42').get()

    assert 'mytask' in ret.results.keys()
    assert bool(ret.results['mytask'])
def main():
    # Read input file path from command line argument
    if len(sys.argv) != 2:
        print 'Please use the application as follows: python main.py <filepath>.wav'
        sys.exit(2)
    fpath = sys.argv[1]

    # If the file path ends with .wav only the given file will be processed
    # Else we assume that the input is a path to all the .wav files to be batch processed
    audio_files = []
    if fpath.endswith('.wav'):
        audio_files.append(fpath)
    else:
        audio_files.extend(glob.glob(fpath + '/*.wav'))

    # TODO: Compare human/model recognition
    # TODO: Maybe save (for instance) spectral images of sound_generation that failed

    # Setup logging
    logging.basicConfig(filename='pitch_perception.log', level=logging.INFO)

    n_channels = int(Config.get_config_option('n_channels'))

    transducer = BrianTransducer(n_channels)
    available_pitch_extractors = {'naive': NaivePitchExtractor, 'spectral': SpectralPitchExtractor(n_channels),
                                  'temporal': TemporalPitchExtractor, 'xcorr': XcorrPitchExtractor(n_channels)}

    pitch_extractor = available_pitch_extractors[Config.get_config_option('pitch_extraction')]

    # Init pipeline
    pipeline = Pipeline(transducer, pitch_extractor, test_mode=False)

    # Collect results
    results = []

    for af in audio_files:
        # Run processing
        pitch = pipeline.process(af)

        log_string = 'File: %s\tPitch: %i' % (af, pitch)
        logging.info(log_string)

        # Output final pitch
        print log_string

        results.append((af, pitch))

    # Export results
    CsvExporter.export('results.csv', pitch_extractor.__class__.__name__, results)
Example #45
0
    def process_image(self):
        '''
        Process the image by thresholding and other techniques to make the cells easy to extract.

        Return value
        ----------
        returns a black and white version of the image
        '''
        # build the prepocessing pipleine
        pipeline = Pipeline([
            Helpers.convert_to_grayscale, lambda image: Helpers.blur(image, 5),
            Helpers.thresholdify, Helpers.ellipse_morph
        ])

        return pipeline.process_pipeline(self.image)
Example #46
0
def handle_data():
    # Get Lyrics
    lyrics = request.form['lyrics']

    if not lyrics:
        return "No Lyrics Found!"
    # Convert to list if it is not.
    if type(lyrics) == type(""):
        lyrics = [lyrics]

    # Instantiate a pipeline object
    pipeline = Pipeline(lyrics)

    # Get the results.
    return pipeline.vectorize()
Example #47
0
def downloadMap(mapType, scene, extent, token):
    url = buildURL(utils.SK_KRAKEN_API, 'release', mapType, 'geojson')
    data = json.dumps({'sceneId': scene, 'extent': extent})
    try:
        pipeline = Pipeline(url, token, data)
        pipeline.start()
        spaceKnowLogger.info('Making Request for scene %s' % scene)
        jsonMap = pipeline.join()
        if not jsonMap or 'mapId' not in jsonMap or 'maxZoom' not in jsonMap or \
          'tiles' not in jsonMap:
            raise SpaceKnowError('Receive invalid map for scene %s' % scene,
                                 500)
        return jsonMap
    except SpaceKnowError as e:
        spaceKnowLogger.error('Error %d: %s' % (e.status_code, e.error))
Example #48
0
 def paintGL(self):
     self.step += 0.1
     self.camera.render()
     projection = ProjParams(self.width, self.height, 1.0, 100.0, 60.0)
     self.pipeline = Pipeline(rotation=[0, 30*self.step, 0],
                              translation=[0, 0, 3],
                              projection=projection)
     self.pipeline.set_camera(self.camera)
     glClear(GL_COLOR_BUFFER_BIT)
     glEnableVertexAttribArray(0)
     world_location = glGetUniformLocation(self.program, "gWorld")
     glUniformMatrix4fv(world_location, 1, GL_TRUE, self.pipeline.get_wvp())
     glDrawElements(GL_TRIANGLES, self.index.shape[0],
                    GL_UNSIGNED_INT, ctypes.c_void_p(0))
     glDisableVertexAttribArray(0)
Example #49
0
 def test_pipeline_unknown(self):
     os.environ["IN_KIND"] = "MEM"
     os.environ["OUT_KIND"] = "MEM"
     pipeline = Pipeline(args=["unknown"])
     del os.environ["IN_KIND"]
     del os.environ["OUT_KIND"]
     assert pipeline is not None
Example #50
0
def test_localizer(pipeline_config):
    pipeline = Pipeline([Filename], [Regions, LocalizerPositions], **pipeline_config)

    expected_stages = [ImageReader,
                       LocalizerPreprocessor,
                       Localizer]
    _assert_types(pipeline.pipeline, expected_stages)

    fname = os.path.dirname(__file__) + '/data/Cam_2_20150821161530_884267.jpeg'

    outputs = pipeline([fname])

    assert len(outputs) == 2
    assert Regions in outputs
    assert LocalizerPositions in outputs

    regions = outputs[Regions]
    assert(len(regions) > 0)

    positions = outputs[LocalizerPositions]
    assert(len(regions) == len(positions))

    for pos in positions:
        assert(pos[0] >= 0 and pos[0] < 3000)
        assert(pos[1] >= 0 and pos[1] < 4000)
Example #51
0
    def get_output_tasks(my):

        process = my.get_value("process")
        parent = my.get_parent()

        # get the pipeline
        pipeline_code = parent.get_value("pipeline_code", no_exception=True)
        if not pipeline_code:
            return []

        pipeline = Pipeline.get_by_code(pipeline_code)
        if not pipeline:
            return []

        processes = pipeline.get_output_processes(process)
        if not processes:
            return []

        tasks = []

        process_names = [x.get_name() for x in processes]

        search = Search("sthpw/task")
        search.add_filters("process", process_names)
        search.add_parent_filter(parent)
        tasks = search.get_sobjects()

        return tasks
Example #52
0
    def test_resource_group_get_all_outputs(self):
        p = Pipeline()
        t1 = p.new_task()
        t1.declare_resource_group(foo={
            'bed': '{root}.bed',
            'bim': '{root}.bim'
        })
        t1.command(f"cat {t1.foo.bed}")
        t2 = p.new_task()
        t2.command(f"cat {t1.foo.bed}")

        for r in [t1.foo.bed, t1.foo.bim]:
            assert (r in t1._outputs)
            assert (r in t2._inputs)
            assert (r in t1._mentioned)
            assert (r not in t2._mentioned)
Example #53
0
 def test_parse_normal(self):
     self.assertEqual(
         Pipeline.parse('Hello!'),
         Message(tokens=[
             PlainText(name='Hello!')
         ])
     )
Example #54
0
def test_generator_processor(tmpdir, bees_image, pipeline_config):
    def image_generator():
        ts = time.time()
        data_source = DataSource.new_message(filename='bees.jpeg')
        for i in range(2):
            img = imread(bees_image)
            yield data_source, img, ts + i

    repo = Repository(str(tmpdir))
    pipeline = Pipeline([Image, Timestamp], [PipelineResult], **pipeline_config)
    gen_processor = GeneratorProcessor(
        pipeline, lambda: BBBinaryRepoSink(repo, camId=2))

    gen_processor(image_generator())
    gen_processor(image_generator())
    fnames = list(repo.iter_fnames())
    assert len(fnames) == 2

    last_ts = 0
    for fname in repo.iter_fnames():
        print("{}: {}".format(fname, os.path.getsize(fname)))
        with open(fname, 'rb') as f:
            fc = FrameContainer.read(f)
        assert fc.dataSources[0].filename == 'bees.jpeg'
        assert last_ts < fc.fromTimestamp
        last_ts = fc.fromTimestamp
Example #55
0
def test_imagereader(bees_image, pipeline_config):
    pipeline = Pipeline([Filename], [Image, Timestamp, CameraIndex], **pipeline_config)

    expected_stages = [ImageReader]
    _assert_types(pipeline.pipeline, expected_stages)

    outputs = pipeline([bees_image])
    assert(len(outputs) == 3)

    assert Image in outputs
    assert Timestamp in outputs
    assert CameraIndex in outputs

    im = outputs[Image]
    ts = outputs[Timestamp]
    idx = outputs[CameraIndex]

    tz = pytz.timezone('Europe/Berlin')
    dt = datetime.datetime.fromtimestamp(ts, tz=pytz.utc)
    dt = dt.astimezone(tz)
    assert(im.shape == (3000, 4000))
    assert(dt.year == 2015)
    assert(dt.month == 8)
    assert(dt.day == 21)
    assert(dt.hour == 16)
    assert(dt.minute == 15)
    assert(dt.second == 30)
    assert(dt.microsecond == 884267)
    assert(idx == 2)
Example #56
0
    def sort_shot_tasks( tasks):
        ''' sort task by pipeline. It can be for assets or shots'''
        # first sort by pipeline

        # get the pipeline of the first task
        sobject = tasks[0].get_parent()
        if not sobject:
            return tasks

        pipeline = Pipeline.get_by_sobject(sobject)
        if not pipeline:
            return tasks


        # assign a number value to each process in the pipeline
        processes = pipeline.get_process_names()
        process_dict = {}
        count = 0
        for process in processes:
            process_dict[process] = count
            count += 1

        def process_compare(x,y):
            x_process = x.get_value("process")
            y_process = y.get_value("process")
            x_value = process_dict.get(x_process)
            y_value = process_dict.get(y_process)
            return cmp(x_value, y_value)

        tasks.sort(process_compare)
        return tasks
Example #57
0
def test_decoder(pipeline_config):
    pipeline = Pipeline([Filename], [LocalizerPositions, IDs, Radii], **pipeline_config)

    expected_stages = [ImageReader,
                       LocalizerPreprocessor,
                       Localizer,
                       Decoder]
    _assert_types(pipeline.pipeline, expected_stages)

    fname = os.path.dirname(__file__) + '/data/Cam_2_20150821161530_884267.jpeg'

    outputs = pipeline([fname])

    assert len(outputs) == 3
    assert IDs in outputs
    assert LocalizerPositions in outputs
    assert Radii in outputs

    positions = outputs[LocalizerPositions]
    ids = outputs[IDs]
    radii = outputs[Radii]

    assert(len(ids) == len(positions))
    assert(len(ids) == len(radii))

    for pos, id, radius in zip(positions, ids, radii):
        pos = np.round(pos).astype(np.int)
        id = ''.join([str(int(b)) for b in (np.round(id))])
        print('Detection at ({}, {}) \t ID: {} \t Radius: {}'.format(pos[0], pos[1], id, radius))
Example #58
0
def test_update_deployment_parameters(cls):
    cls.parameter_store = Mock()
    cls.parameter_store.put_parameter.return_value = None

    pipeline = Pipeline({
        "name": "pipeline",
        "params": [{"key": "value"}],
        "targets": [],
        "pipeline_type": "some_type"
    })
    pipeline.template_dictionary = {
        "targets": [[{"name": "some_pipeline", "path": "/fake/path"}]]
    }

    cls.update_deployment_parameters(pipeline)
    assert cls.account_ou_names['some_pipeline'] == '/fake/path'
Example #59
0
 def test_parse_escaped(self):
     self.assertEqual(
         Pipeline.parse('\\\\\\#\\#Hello\\\\world!\\#\\#'),  # means '\\\#\#Hello\\world!\#\#'
         Message(tokens=[
             PlainText(name='\\##Hello\\world!##')  # means '\##Hello\world!##'
         ])
     )
Example #60
0
def test_tagSimilarityEncoder(pipeline_config):
    pipeline = Pipeline([Filename], [Descriptors], **pipeline_config)
    fname = os.path.dirname(__file__) + '/data/Cam_2_20150821161530_884267.jpeg'

    outputs = pipeline([fname])
    assert Descriptors in outputs
    assert len(outputs[Descriptors]) > 20