コード例 #1
0
def translateCellPopModel(model):
    hmamodel = modeling.Model(model.name)
    translateStructureCellPopModel(model, hmamodel)
    translateReactionsCellPopModel(model, hmamodel)
    translateDeathRulesCellPopModel(model, hmamodel)
    translateDivisionsCellPopModel(model, hmamodel)
    return hmamodel
コード例 #2
0
def get_pretrain_loss(FLAGS, features, is_training):
    tf.logging.info("*** Features ***")
    for name in sorted(features.keys()):
        tf.logging.info("  name = %s, shape = %s" %
                        (name, features[name].shape))

    input_ids = features["input_ids"]
    input_mask = features["input_mask"]
    segment_ids = features["segment_ids"]
    masked_lm_positions = features["masked_lm_positions"]
    masked_lm_ids = features["masked_lm_ids"]
    masked_lm_weights = features["masked_lm_weights"]
    sentence_order_labels = features["next_sentence_labels"]
    shuffle_index = features["shuffle_index"]
    seq_ids = features["seq_ids"]

    model_config = modeling.ModelConfig.from_json_file(FLAGS.model_config_file)

    model = modeling.Model(model_config, is_training, input_ids, input_mask,
                           seq_ids, segment_ids)

    masked_lm_loss, masked_lm_example_loss, masked_lm_log_probs = get_masked_lm_loss(
        model_config, model.get_sequence_output(), model.get_embedding_table(),
        masked_lm_positions, masked_lm_ids, masked_lm_weights)

    sentence_order_loss, sentence_order_example_loss, sentence_order_log_probs = \
        get_classification_loss(model_config,
                                model.get_pooled_output(),
                                sentence_order_labels, 2)

    shuffle_loss, shuffle_example_loss, shuffle_probs = get_shuffle_loss(
        model_config, model.get_sequence_output(), shuffle_index, seq_ids)

    total_loss = masked_lm_loss + sentence_order_loss + shuffle_loss

    return total_loss, (masked_lm_example_loss, sentence_order_example_loss, shuffle_example_loss), \
           (masked_lm_log_probs, sentence_order_log_probs, shuffle_probs)
コード例 #3
0
#!/usr/bin/python

import modeling
import simulating


### model construction

model = modeling.Model ("solid_gol_pheno_imperfectDS")

# agents
model.addAgent ( name="Cell" , unique=False , properties= [ "CC_length" , "is_alive" , "death_type" ] )
model.addAgent ( name="Medium" , unique=True , properties= [ "messenger" ] )

# resolve spatially
model.resolveAsGrid ( agent="Medium" , two_dim=True , length_x=50.0 , length_y=50.0 , step_length=0.5 )

# currently being implemented
model.resolveAsSphere ( agent="Cell" , two_dim=True , length_x=50.0 , length_y=50.0 , max_radius=1.0 )


# continuous dynamics : messenger degradation 
model.addContinuousChange ( name="mess_degrad" , agent="Medium" , property="messenger" , 
							parameters={ "mess_degrad_rate":0. } ,
							rate_expression="- mess_degrad_rate * messenger" )

# continuous dynamics : messenger production
model.addContinuousChange ( name="mess_prod" , agent_source="Cell" , agent="Medium" , property="messenger" ,
							parameters={ "mess_prod_rate_per_cell":1.0 } ,
							rate_expression="mess_prod_rate_per_cell" )
コード例 #4
0
#!/usr/bin/python

import modeling
import simulating

### model construction

model = modeling.Model("solid_gol_pheno_ideal")

# agents
model.addAgent(name="Cell",
               unique=False,
               properties=[
                   "R", "X", "Y", "CC_length", "is_alive", "density_sensing",
                   "death_type"
               ])

# event: cell division
model.addDeterministicEvent(
    name="cell_division",
    agent="Cell",
    kind="creation",
    parameters={
        "CC_avg": 3.0,
        "CC_std": 0.25,
        "R_birth": 0.5**(1. / 3.)
    },
    trigger_expression="R * R * R > 2. * R_birth * R_birth * R_birth",
    realization_function=[
        "new.R = R_birth", "R = new.R",
        "new.CC_length = ran.norm ( CC_avg , CC_std )",
コード例 #5
0
ファイル: liquid_gol.py プロジェクト: fbertaux/CellPop
#!/usr/bin/python

import modeling
import simulating


### model construction

model = modeling.Model ("liquid_gol")

# agents
model.addAgent ( name="Cell" , unique=False , properties=[ "age" , "CC_length" ] )
model.addAgent ( name="Medium" , unique=True , properties=[ "IP" ] )

# event: cell division
model.addDeterministicEvent ( name="cell_division" , 
							  agent="Cell" ,
							  kind="creation" , 
							  parameters={"CC_avg":3.0,"CC_std":0.25} ,
							  trigger_expression="age > CC_length" ,
							  realization_function=
							  		["new.age = age - CC_length" ,
							  		 "age = new.age" ,
							  		 "new.CC_length = ran.norm ( CC_avg , CC_std ) " ,
							  		 "CC_length = ran.norm ( CC_avg , CC_std ) " ] )

# event: cell death
model.addStochasticEvent ( name="cell_death" ,
						   agent="Cell" , 
						   kind="destruction" , 
						   parameters={ "IP_threshold":1.0 , "IP_death_rate_slope":0.01 } ,
コード例 #6
0
def main(args):

        # Initialyze logging and tracking the data analysis/modeling experiment using Neptune
        log = ns.Neptune(args)
        
        # Create train and test datasets
        train_dataset = dataset.Dataset(log=log, type='train')
        test_dataset = dataset.Dataset(log=log, type='test')


        # Select settings and sensors that matters
        selected_settings = ['setting1', 'setting2']
        selected_sensors = [s for s in train_dataset.sensors_head if s not in ['sensor1', 'sensor5', 'sensor6', 'sensor10', 'sensor16', 'sensor18', 'sensor19']]


        

        # Create train and test analysis
        # train_dataset_analysis = analysis.DatasetAnalysis(log=log, dataset=train_dataset) 
        # test_dataset_analysis = analysis.DatasetAnalysis(log=log, dataset=test_dataset)

        
        # dummy_type_list = [('min', 'mean'), ('min', 'max'), ('mean', 'mean'), ('mean', 'max')]
        # for b, s in dummy_type_list:
        #        dummy_error = train_dataset_analysis.get_dummy_error(based_on=b, stats=s)
        #        log.exp.log_metric(f'dummy {s} error based on {b}', dummy_error)
        #        dummy_percentage_error = train_dataset_analysis.get_dummy_percentage_error(based_on=b, stats=s)
        #        log.exp.log_metric(f'dummy {s} percentage error based on {b}', dummy_percentage_error)
        

        # # Get the array containing the last cycle of each asset
        # train_assets_last_cycles_array = train_dataset.get_assets_last_cycle_array()
        # test_assets_last_cycles_array = test_dataset.get_assets_last_cycle_array()
        # train_dataset.set_assets_last_cycle_statistics()
        # train_dataset_analysis.log_violinchart(train_assets_last_cycles_array, log_category='train-target-charts', plot_name='failure-cycle')
        # train_dataset_analysis.log_boxchart(train_assets_last_cycles_array, log_category='train-target-charts', plot_name='failure-cycle')
        # test_dataset.set_assets_last_cycle_statistics()
        # test_dataset_analysis.log_violinchart(test_assets_last_cycles_array, log_category='test-target-charts', plot_name='failure-cycle')
        # test_dataset_analysis.log_boxchart(test_assets_last_cycles_array, log_category='test-target-charts', plot_name='failure-cycle')


        # for ss in selected_settings:
        #        train_dataset_analysis.log_feature_linechart_for_asset(asset_id=75, feature_name=ss)
        # for ss in selected_sensors:
        #        train_dataset_analysis.log_feature_linechart_for_asset(asset_id=75, feature_name=ss)                
        #        train_dataset_analysis.log_sensor_failure_value_linechart_for_assets(sensor_name=ss)       
        #        train_sensor_failure_values_array = train_dataset.get_sensors_last_value_for_assets(sensor_name=ss)
        #        train_dataset_analysis.log_violinchart(train_sensor_failure_values_array[:, 1], log_category='train-features-charts', plot_name=f'{ss}-failure-assets')
        #        train_dataset_analysis.log_boxchart(train_sensor_failure_values_array[:, 1], log_category='train-features-charts', plot_name=f'{ss}-failure-assets')
        # for ss in selected_settings:
        #        test_dataset_analysis.log_feature_linechart_for_asset(asset_id=75, feature_name=ss)
        # for ss in selected_sensors:
        #        test_dataset_analysis.log_feature_linechart_for_asset(asset_id=75, feature_name=ss)                
        #        test_dataset_analysis.log_sensor_failure_value_linechart_for_assets(sensor_name=ss)       
        #        test_sensor_failure_values_array = test_dataset.get_sensors_last_value_for_assets(sensor_name=ss)
        #        test_dataset_analysis.log_violinchart(test_sensor_failure_values_array[:, 1], log_category='test-features-charts', plot_name=f'{ss}-failure-assets')
        #        test_dataset_analysis.log_boxchart(test_sensor_failure_values_array[:, 1], log_category='test-features-charts', plot_name=f'{ss}-failure-assets')


        


        # Create train and test transformed datasets

        train_transformed_dataset = dataset.TransformedDataset(log=log, dataset=train_dataset, selected_settings_sensors_tuple=(selected_settings, selected_sensors))
        # print(f'train_dataset.dataframe has nan: {train_transformed_dataset.dataframe.isnull().values.any()}')
        # print(train_transformed_dataset.dataframe)
        # print(f'\ntrain_transformed_dataset.dataframe shape: {train_transformed_dataset.dataframe.shape}\n')
        # print(train_transformed_dataset.dataframe.dtypes)        
        # print(train_transformed_dataset.dataframe.describe())

        test_transformed_dataset = dataset.TransformedDataset(log=log, dataset=test_dataset, selected_settings_sensors_tuple=(selected_settings, selected_sensors))  
        # print(f'test_dataset.dataframe has nan: {test_transformed_dataset.dataframe.isnull().values.any()}')   
        # print(test_transformed_dataset.dataframe)
        # print(f'\ntest_transformed_dataset.dataframe shape: {test_transformed_dataset.dataframe.shape}\n')
        # print(test_transformed_dataset.dataframe.dtypes)        
        # print(test_transformed_dataset.dataframe.describe())



        

        ## Create train transformed dataset analysis
        # train_transformed_dataset_analysis = analysis.TransformedDatasetAnalysis(log=log, transformed_dataset=train_transformed_dataset)

        # train_transformed_dataset_analysis.log_correlation_matrix(log_category='transformed-dataset-train-charts')    

        # feature_array = train_transformed_dataset.get_feature_array(feature_name='monitoring-cycle')
        # train_transformed_dataset_analysis.log_violinchart(feature_array, log_category='transformed-dataset-train-charts', plot_name='monitoring-cycle')
        # train_transformed_dataset_analysis.log_boxchart(feature_array, log_category='transformed-dataset-train-charts', plot_name='monitoring-cycle')
        # train_transformed_dataset_analysis.log_scatterchart(feature_name='monitoring-cycle', log_category='transformed-dataset-train-charts')

        # feature_array = train_transformed_dataset.get_feature_array(feature_name='rul')
        # train_transformed_dataset_analysis.log_violinchart(feature_array, log_category='transformed-dataset-train-charts', plot_name='rul')
        # train_transformed_dataset_analysis.log_boxchart(feature_array, log_category='transformed-dataset-train-charts', plot_name='rul')
        # train_transformed_dataset_analysis.log_scatterchart(feature_name='rul', log_category='transformed-dataset-train-charts')

        # for ss in selected_settings:
        #         feature_array = train_transformed_dataset.get_feature_array(feature_name=ss)
        #         train_transformed_dataset_analysis.log_violinchart(feature_array, log_category='transformed-dataset-train-charts', plot_name=f'{ss}')
        #         train_transformed_dataset_analysis.log_boxchart(feature_array, log_category='transformed-dataset-train-charts', plot_name=f'{ss}')
        #         train_transformed_dataset_analysis.log_scatterchart(feature_name=ss, log_category='transformed-dataset-train-charts')

        # for ss in selected_sensors:
        #         feature_array = train_transformed_dataset.get_feature_array(feature_name=ss)
        #         train_transformed_dataset_analysis.log_violinchart(feature_array, log_category='transformed-dataset-train-charts', plot_name=f'{ss}')
        #         train_transformed_dataset_analysis.log_boxchart(feature_array, log_category='transformed-dataset-train-charts', plot_name=f'{ss}')
        #         train_transformed_dataset_analysis.log_scatterchart(feature_name=ss, log_category='transformed-dataset-train-charts')

        # for ss in train_transformed_dataset.selected_sensors_time_derivative:        
        #         feature_array = train_transformed_dataset.get_feature_array(feature_name=ss)
        #         train_transformed_dataset_analysis.log_violinchart(feature_array, log_category='transformed-dataset-train-charts', plot_name=f'{ss}')
        #         train_transformed_dataset_analysis.log_boxchart(feature_array, log_category='transformed-dataset-train-charts', plot_name=f'{ss}')
        #         train_transformed_dataset_analysis.log_scatterchart(feature_name=ss, log_category='transformed-dataset-train-charts')

        


        # # Create train transformed dataset analysis
        # test_transformed_dataset_analysis = analysis.TransformedDatasetAnalysis(log=log, transformed_dataset=test_transformed_dataset)
      

        # feature_array = test_transformed_dataset.get_feature_array(feature_name='monitoring-cycle')
        # test_transformed_dataset_analysis.log_violinchart(feature_array, log_category='transformed-dataset-test-charts', plot_name='monitoring-cycle')
        # test_transformed_dataset_analysis.log_boxchart(feature_array, log_category='transformed-dataset-test-charts', plot_name='monitoring-cycle')

        # for ss in selected_settings:
        #         feature_array = test_transformed_dataset.get_feature_array(feature_name=ss)
        #         test_transformed_dataset_analysis.log_violinchart(feature_array, log_category='transformed-dataset-test-charts', plot_name=f'{ss}')
        #         test_transformed_dataset_analysis.log_boxchart(feature_array, log_category='transformed-dataset-test-charts', plot_name=f'{ss}')

        # for ss in selected_sensors:
        #         feature_array = test_transformed_dataset.get_feature_array(feature_name=ss)
        #         test_transformed_dataset_analysis.log_violinchart(feature_array, log_category='transformed-dataset-test-charts', plot_name=f'{ss}')
        #         test_transformed_dataset_analysis.log_boxchart(feature_array, log_category='transformed-dataset-test-charts', plot_name=f'{ss}')

        # for ss in test_transformed_dataset.selected_sensors_time_derivative:        
        #         feature_array = test_transformed_dataset.get_feature_array(feature_name=ss)
        #         test_transformed_dataset_analysis.log_violinchart(feature_array, log_category='transformed-dataset-test-charts', plot_name=f'{ss}')
        #         test_transformed_dataset_analysis.log_boxchart(feature_array, log_category='transformed-dataset-test-charts', plot_name=f'{ss}')



        # Modeling
        train_preprocess_pipeline = modeling.DataPreprocessPipeline(log=log, transformed_dataset=train_transformed_dataset)
        train_modeling_pipeline = modeling.ModelingPipeline(log=log, data_preprocess_pipeline=train_preprocess_pipeline)
        train_model = modeling.Model(log=log, modeling_pipeline=train_modeling_pipeline)


        # Predicting

        prediction = modeling.PredictionPipeline(log=log, transformed_dataset=test_transformed_dataset, model=train_model, chosen_model_name=args.chosen_model_name, output_filename=args.output_filename)