コード例 #1
0
# --- minimal analysis information

settings = process_manager.service(ConfigObject)
settings['analysisName'] = 'esk111_load_datastore_from_file'
settings['version'] = 0

ds = process_manager.service(DataStore)
ds['number'] = 1
file_path = persistence.io_path('proc_service_data', 'temp_datastore.pkl')
ds.persist_in_file(file_path)

# --- update the number
ds['number'] = 2

# --- Reload from the pickle file with:
# >>> ds = DataStore.import_from_file(file_path)

# --- now set up the chains and links

ch = Chain('Start')
link = core_ops.ImportDataStore(name='importds', path=file_path)
link.logger.log_level = LogLevel.DEBUG
ch.add(link)

link = core_ops.PrintDs()
link.keys = ['number']
ch.add(link)

logger.debug(
    'Done parsing configuration file esk111_load_datastore_from_file.')
コード例 #2
0
{conf_path}
"""
logger.info(msg, data_path=settings['resultsDir'] + '/' + settings['analysisName'] + '/data/v0/',
            conf_path=settings['resultsDir'] + '/' + settings['analysisName'] + '/config/v0/')

# dummy information used in this macro, added to each chain below.
f = {'hello': 'world', 'v': [3, 1, 4, 1, 5], 'n_favorite': 7}
g = {'a': 1, 'b': 2, 'c': 3}
h = [2, 7]

#########################################################################################
# --- now set up the chains and links based on configuration flags

#########
# chain 1
ch = Chain('chain1')

# the link ToDsDict adds objects to the datastore at link execution.
link = core_ops.ToDsDict(name='intods_1')
link.store_key = 'f'
link.obj = f
ch.add(link)

# print contents of datastore
link = core_ops.PrintDs()
ch.add(link)

#########
# chain 2
ch = Chain('chain2')
コード例 #3
0
def first_word(x):
    """Take first word."""
    return x.split()[0]


#########################################################################################
# --- now set up the chains and links based on configuration flags

# This chain does 'mapping'. (macro B does 'reduction'.)

# --- mapper: chain with event looper
#     this eventlooper link serves as a mapper.
#     in this example the lines are converted to lower chars, and the first word is selected.
if settings['do_map']:
    ch = Chain("Mapper")
    looper = core_ops.EventLooper(name='listener')
    looper.skip_line_beginning_with = ['#']
    looper.line_processor_set = [first_word, to_lower]
    if settings['TESTING']:
        looper.filename = f.name
    ch.add(looper)

# --- reducer: chain with event looper
#     this eventlooper link serves as a reducer
#     in this example the lines are grouped together into unique sets.
if settings['do_reduce']:
    ch = Chain("Reducer")
    looper = core_ops.EventLooper(name='grouper')
    # reducer selects all unique lines
    looper.sort = True