def ExecuteRefinement_Task(pickled_model_coarse, pickled_parameters, min_size,
                           max_size):
    sample = GenerateSample()
    '''overwrite the old model serializer with the unpickled one'''
    model_serializer_coarse = pickle.loads(pickled_model_coarse)
    model_coarse = KratosMultiphysics.Model()
    model_serializer_coarse.Load("ModelSerialization", model_coarse)
    del (model_serializer_coarse)
    '''overwrite the old parameters serializer with the unpickled one'''
    serialized_parameters = pickle.loads(pickled_parameters)
    parameters_refinement = KratosMultiphysics.Parameters()
    serialized_parameters.Load("ParametersSerialization",
                               parameters_refinement)
    del (serialized_parameters)
    simulation_coarse = MonteCarloAnalysis(model_coarse, parameters_refinement,
                                           sample)
    simulation_coarse.Run()
    QoI = EvaluateQuantityOfInterest(simulation_coarse)
    '''refine'''
    model_refined = refinement.compute_refinement_from_analysisstage_object(
        simulation_coarse, min_size, max_size)
    '''initialize'''
    simulation = MonteCarloAnalysis(model_refined, parameters_refinement,
                                    sample)
    simulation.Initialize()
    '''serialize model and pickle it'''
    serialized_model = KratosMultiphysics.StreamSerializer()
    serialized_model.Save("ModelSerialization", simulation.model)
    pickled_model_refined = pickle.dumps(serialized_model, 2)
    return QoI, pickled_model_refined
Example #2
0
    def restore_object(cls, obj):
        """
        recreate objects bases on classes currently avaliable
        """

        if obj == None:
            return obj

        for a in cls.allowed:
            if isinstance(obj, a):
                return obj

        if isinstance(obj, list):
            return [cls.restore_object(x) for x in obj]

        if isinstance(obj, dict):

            if "_type" in obj:
                t = obj["_type"]

                # restored registered classes
                if t in cls.classes and "_content" in obj:
                    model = cls.classes[t]
                    return model.serial_loads(obj)

            # restored pickled classes
            if "_pickle" in obj:
                # object restoration

                ins = pickle.loads(str(obj["_pickle"]))

                return ins
            else:
                # recursive dictionary restore
                return {x: cls.restore_object(y) for x, y in obj.iteritems()}
Example #3
0
 def normalize_rpc(self, rpc):
     try:
         _rpc = pickle.loads(rpc)
         assert isinstance(_rpc, dict), 'RPC request format error.'
         return rpc
     except:
         return rpc
def ExecuteExactMonteCarloAnalysis_Task(pickled_model, pickled_parameters):
    '''overwrite the old model serializer with the unpickled one'''
    model_serializer = pickle.loads(pickled_model)
    current_model = KratosMultiphysics.Model()
    model_serializer.Load("ModelSerialization", current_model)
    del (model_serializer)
    '''overwrite the old parameters serializer with the unpickled one'''
    serialized_parameters = pickle.loads(pickled_parameters)
    current_parameters = KratosMultiphysics.Parameters()
    serialized_parameters.Load("ParametersSerialization", current_parameters)
    del (serialized_parameters)
    sample = 1.0
    simulation = MonteCarloAnalysis(current_model, current_parameters, sample)
    simulation.Run()
    ExactExpectedValueQoI = 0.25 * EvaluateQuantityOfInterest(simulation)
    return ExactExpectedValueQoI
Example #5
0
    def restore_object(cls, obj):
        """
        recreate objects bases on classes currently avaliable
        """

        if obj == None:
            return obj

        for a in cls.allowed:
            if isinstance(obj, a):
                return obj

        if isinstance(obj, list):
            return [cls.restore_object(x) for x in obj]

        if isinstance(obj, dict):

            if "_type" in obj:
                t = obj["_type"]

                # restored registered classes
                if t in cls.classes and "_content" in obj:
                    model = cls.classes[t]
                    return model.serial_loads(obj)

            # restored pickled classes
            if "_pickle" in obj:
                # object restoration

                ins = pickle.loads(str(obj["_pickle"]).encode("utf-8"))

                return ins
            else:
                # recursive dictionary restore
                return {x: cls.restore_object(y) for x, y in obj.items()}
Example #6
0
def ExecuteInstance_Task(pickled_model,pickled_parameters,heat_flux_list,instance):
    # overwrite the old model serializer with the unpickled one
    model_serializer = pickle.loads(pickled_model)
    current_model = KratosMultiphysics.Model()
    model_serializer.Load("ModelSerialization",current_model)
    del(model_serializer)
    # overwrite the old parameters serializer with the unpickled one
    serialized_parameters = pickle.loads(pickled_parameters)
    current_parameters = KratosMultiphysics.Parameters()
    serialized_parameters.Load("ParametersSerialization",current_parameters)
    del(serialized_parameters)
    # get sample
    sample = GetValueFromListList(heat_flux_list,instance) # take
    simulation = SimulationScenario(current_model,current_parameters,sample)
    simulation.Run()
    QoI = simulation.EvaluateQuantityOfInterest()
    return QoI
Example #7
0
def ExecuteInstanceAux_Task(pickled_model,pickled_project_parameters,current_analysis_stage,current_level):
    # overwrite the old model serializer with the unpickled one
    model_serializer = pickle.loads(pickled_model)
    current_model = KratosMultiphysics.Model()
    model_serializer.Load("ModelSerialization",current_model)
    del(model_serializer)
    # overwrite the old parameters serializer with the unpickled one
    serialized_project_parameters = pickle.loads(pickled_project_parameters)
    current_project_parameters = KratosMultiphysics.Parameters()
    serialized_project_parameters.Load("ParametersSerialization",current_project_parameters)
    del(serialized_project_parameters)
    # initialize the MonteCarloResults class
    mc_results_class = MonteCarloResults(current_level)
    sample = generator.GenerateSample()
    simulation = current_analysis_stage(current_model,current_project_parameters,sample)
    simulation.Run()
    QoI = simulation.EvaluateQuantityOfInterest()
    mc_results_class.QoI[current_level].append(QoI) # saving results in the corresponding list, for MC only list of level 0
    return mc_results_class
Example #8
0
def ExecuteInstanceAux_Task(pickled_model, pickled_project_parameters, sample,
                            current_analysis_stage, current_level):
    # def ExecuteInstanceAux_Task(serialized_model,serialized_project_parameters,current_analysis_stage,current_level):
    time_0 = time.time()
    # overwrite the old model serializer with the unpickled one
    serialized_model = pickle.loads(pickled_model)
    current_model = KratosMultiphysics.Model()
    serialized_model.Load("ModelSerialization", current_model)
    del (serialized_model)
    # overwrite the old parameters serializer with the unpickled one
    serialized_project_parameters = pickle.loads(pickled_project_parameters)
    current_project_parameters = KratosMultiphysics.Parameters()
    serialized_project_parameters.Load("ParametersSerialization",
                                       current_project_parameters)
    del (serialized_project_parameters)
    time_1 = time.time()
    # initialize the MonteCarloResults class
    mc_results_class = MonteCarloResults(current_level)
    simulation = current_analysis_stage(current_model,
                                        current_project_parameters, sample)
    simulation.Run()
    QoI = simulation.EvaluateQuantityOfInterest()
    time_2 = time.time()
    mc_results_class.QoI[current_level].append(
        QoI
    )  # saving results in the corresponding list, for MC only list of level 0
    # post process execution times
    # print("\n","#"*50," EXECUTE INSTANCE TASK TIMES ","#"*50,"\n")
    deserialization_time = time_1 - time_0
    Kratos_run_time = time_2 - time_1
    total_task_time = time_2 - time_0
    # print("[LEVEL] current level:",current_level)
    # print("[TIMER] total task time:", total_task_time)
    # print("[TIMER] Kratos run time:",Kratos_run_time)
    # print("[TIMER] Serializer time:",deserialization_time)
    # print("RATIOs: time of interest / total task time")
    # print("[RATIO] Relative serializer time:",deserialization_time/total_task_time)
    # print("[RATIO] Relative Kratos run time:",Kratos_run_time/total_task_time)
    # print("\n","#"*50," END EXECUTE INSTANCE TASK TIMES ","#"*50,"\n")
    # end post process execution times
    return mc_results_class
Example #9
0
 def receive(self):
     while True:
         try:
             data, addr = self.socket.recvfrom(1024)
             r = pickle.loads(data)
             for i in r:
                 if not i in self.addrUser:
                     print(addr, "client!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
                     self.addrUser[i] = True
             return r
         except socket.error:
             break
     return {}
Example #10
0
 def __getitem__(self, url):
     """Load data from disk for this URL
            """
     path = self.url_to_path(url)
     if os.path.exists(path):
         with open(path, 'rb') as fp:
             data = fp.read()
             if self.compress:
                 data = zlib.decompress(data)
             result, timestamp = pickle.loads(data)
             print(result, 'exist')
             #检验文件的过期时间(用自定义的has_expired()函数判断)
             if self.has_expired(timestamp):
                 raise KeyError(url + ' has expired')
             return result
     else:
         # url不存在的情况
         raise KeyError(url + ' does not exist')
Example #11
0
    def __init__(self, collection_name,  prefix='db_', ext='pkl'):
        """Build pickle file name and load data if exists.

        :param collection_name: Collection name
        :param prefix: File prefix; defaults to 'db_'
        :param ext: File extension; defaults to 'pkl'

        """
        # Build filename
        self.filename = prefix + collection_name + '.' + ext

        # Initialize empty store
        self.store = {}

        # Load file if exists
        if os.path.exists(self.filename):
            with open(self.filename, 'rb') as fp:
                data = fp.read()
                self.store = pickle.loads(data)
Example #12
0
    def __init__(self, collection_name, prefix='db_', ext='pkl'):
        """Build pickle file name and load data if exists.

        :param collection_name: Collection name
        :param prefix: File prefix; defaults to 'db_'
        :param ext: File extension; defaults to 'pkl'

        """
        # Build filename
        self.filename = prefix + collection_name + '.' + ext

        # Initialize empty store
        self.store = {}

        # Load file if exists
        if os.path.exists(self.filename):
            with open(self.filename, 'rb') as fp:
                data = fp.read()
                self.store = pickle.loads(data)
Example #13
0
    def test_model_serialization_with_pickling(self):
        current_model = KratosMultiphysics.Model()

        model_part = current_model.CreateModelPart("Main")
        model_part.AddNodalSolutionStepVariable(KratosMultiphysics.TEMPERATURE)
        model_part.CreateSubModelPart("Inlets")
        model_part.CreateSubModelPart("Temp")
        model_part.CreateNewNode(1, 0.0, 0.0, 0.0)
        other = current_model.CreateModelPart("Other")
        other.AddNodalSolutionStepVariable(KratosMultiphysics.PRESSURE)
        other.CreateNewNode(1, 0.0, 0.0, 0.0)

        serializer = KratosMultiphysics.StreamSerializer()
        serializer.Save("ModelSerialization", current_model)
        del (current_model)

        # ######## here we pickle the serializer
        try:
            import cpickle as pickle  # Use cPickle on Python 2.7
        except ImportError:
            import pickle

        #pickle dataserialized_data
        pickled_data = pickle.dumps(
            serializer, 2
        )  #second argument is the protocol and is NECESSARY (according to pybind11 docs)

        #overwrite the old serializer with the unpickled one
        serializer = pickle.loads(pickled_data)

        loaded_model = KratosMultiphysics.Model()
        serializer.Load("ModelSerialization", loaded_model)

        self.assertTrue(loaded_model["Main"].HasNodalSolutionStepVariable(
            KratosMultiphysics.TEMPERATURE))
        self.assertTrue(loaded_model["Other"].HasNodalSolutionStepVariable(
            KratosMultiphysics.PRESSURE))

        self.assertTrue(loaded_model.HasModelPart("Main.Inlets"))
        self.assertTrue(loaded_model.HasModelPart("Main.Temp"))
        self.assertTrue(1 in loaded_model["Main"].Nodes)
        self.assertTrue(1 in loaded_model["Other"].Nodes)
Example #14
0
 def unpickle(self, data):
     try:
         return pickle.loads(str(data))
     except:
         return None
Example #15
0
	def _ToObj(objVacio,datos):#lo necesito para poderlo usar de forma generica con los objetos complejos
		return pickle.loads(datos)
Example #16
0
 def unpickle(self, data):
     try:
       return pickle.loads(str(data))
     except:
       return None
Example #17
0
    def compute(database, model, sid, uid, workdir, hostname, username, password):
        """
    Computes the Time Series model. It fetches the necessary files from a
    remote server that were computed by the slycat-agent-compute-timeseries.py
    script.

    :param database:
    :param model:
    :param sid:      session ID
    :param uid:      user ID
    :param workdir:
    :param hostname:
    :param username:
    :param password:
    """
        workdir += "/slycat/pickle"  # route to the slycat directory
        try:
            database = slycat.web.server.database.couchdb.connect()
            model = database.get("model", model["_id"])
            model["model_compute_time"] = datetime.datetime.utcnow().isoformat()
            slycat.web.server.update_model(database, model)

            sid, inputs = get_remote_file(sid, hostname, username, password,
                                          "%s/slycat_timeseries_%s/arrayset_inputs.pickle" % (workdir, uid))
            inputs = pickle.loads(inputs)

            slycat.web.server.put_model_arrayset(database, model, inputs["aid"])
            attributes = inputs["attributes"]
            slycat.web.server.put_model_array(database, model, inputs["aid"], 0, attributes, inputs["dimensions"])

            sid, data = get_remote_file(sid, hostname, username, password,
                                        "%s/slycat_timeseries_%s/inputs_attributes_data.pickle" % (workdir, uid))
            attributes_data = pickle.loads(data)

            # TODO this can become multi processored
            for attribute in range(len(attributes)):
                slycat.web.server.put_model_arrayset_data(database, model, inputs["aid"], "0/%s/..." % attribute,
                                                          [attributes_data[attribute]])

            clusters = json.loads(
                slycat.web.server.get_remote_file(sid, "%s/slycat_timeseries_%s/file_clusters.json" % (workdir, uid)))
            clusters_file = json.JSONDecoder().decode(clusters["file"])
            timeseries_count = json.JSONDecoder().decode(clusters["timeseries_count"])

            slycat.web.server.post_model_file(model["_id"], True, sid,
                                              "%s/slycat_timeseries_%s/file_clusters.out" % (workdir, uid),
                                              clusters["aid"], clusters["parser"])
            # TODO this can become multi processored
            for file_name in clusters_file:
                sid, file_cluster_data = get_remote_file(sid, hostname, username, password,
                                                         "%s/slycat_timeseries_%s/file_cluster_%s.json" % (
                                                             workdir, uid, file_name))
                file_cluster_attr = json.loads(file_cluster_data)
                slycat.web.server.post_model_file(model["_id"], True, sid,
                                                  "%s/slycat_timeseries_%s/file_cluster_%s.out" % (workdir, uid, file_name),
                                                  file_cluster_attr["aid"], file_cluster_attr["parser"])

                database = slycat.web.server.database.couchdb.connect()
                model = database.get("model", model["_id"])
                slycat.web.server.put_model_arrayset(database, model, "preview-%s" % file_name)

                sid, waveform_dimensions_data = get_remote_file(sid, hostname, username, password,
                                                                "%s/slycat_timeseries_%s/waveform_%s_dimensions.pickle" % (
                                                                    workdir, uid, file_name))
                waveform_dimensions_array = pickle.loads(waveform_dimensions_data)
                sid, waveform_attributes_data = get_remote_file(sid, hostname, username, password,
                                                                "%s/slycat_timeseries_%s/waveform_%s_attributes.pickle" % (
                                                                    workdir, uid, file_name))
                waveform_attributes_array = pickle.loads(waveform_attributes_data)
                sid, waveform_times_data = get_remote_file(sid, hostname, username, password,
                                                           "%s/slycat_timeseries_%s/waveform_%s_times.pickle" % (
                                                               workdir, uid, file_name))
                waveform_times_array = pickle.loads(waveform_times_data)
                sid, waveform_values_data = get_remote_file(sid, hostname, username, password,
                                                            "%s/slycat_timeseries_%s/waveform_%s_values.pickle" % (
                                                                workdir, uid, file_name))
                waveform_values_array = pickle.loads(waveform_values_data)

                cherrypy.log.error("timeseries_count=%s" % timeseries_count)

                # TODO this can become multi processored
                for index in range(int(timeseries_count)):
                    try:
                        slycat.web.server.put_model_array(database, model, "preview-%s" % file_name, index,
                                                          waveform_attributes_array[index],
                                                          waveform_dimensions_array[index])
                        slycat.web.server.put_model_arrayset_data(database, model, "preview-%s" % file_name,
                                                                  "%s/0/...;%s/1/..." % (index, index),
                                                                  [waveform_times_array[index],
                                                                   waveform_values_array[index]])
                    except:
                        cherrypy.log.error("failed on index: %s" % index)
                        pass

        except:
            fail_model(model["_id"], "Timeseries model compute exception: %s" % sys.exc_info()[0])
            cherrypy.log.error("Timeseries model compute exception type: %s" % sys.exc_info()[0])
            cherrypy.log.error("Timeseries model compute exception value: %s" % sys.exc_info()[1])
            cherrypy.log.error("Timeseries model compute exception traceback: %s" % sys.exc_info()[2])
Example #18
0
#encoding:utf-8

try:
    import cpickle as pickle
except:
    import pickle
import pprint



data=[{'a':'A','b':2.0,'c':2.5}]
print 'BEFORE Date:',
pprint.pprint(data)

#pickle只包含ASCII字符
data_string=pickle.dumps(data)
data2=pickle.loads(data_string)
print 'AFTER pickle: ',
pprint.pprint(data2)


print 'SAME?:',(data is data2)
print 'EQL?:',(data==data2)

Example #19
0
    def compute(model_id, password):
        """
        Computes the Time Series model. It fetches the necessary files from a
        remote server that were computed by the slycat-agent-compute-timeseries.py
        script.
    
        :param model_id: uid for the model in the database
        :param sid:      session ID
        :param uid:      user ID
        :param workdir:
        :param hostname:
        :param username:
        :param password:
        """
        cherrypy.log.error("in thread")
        # workdir += "/slycat/pickle"  # route to the slycat directory
        database = slycat.web.server.database.couchdb.connect()
        model = database.get("model", model_id)
        slycat.web.server.put_model_parameter(database, model, "computing", True)
        computing = True

        database = slycat.web.server.database.couchdb.connect()
        model = database.get("model", model_id)
        model["model_compute_time"] = datetime.datetime.utcnow().isoformat()
        slycat.web.server.update_model(database, model, state="waiting")
        tries = 10
        while computing:
            tries = tries - 1
            if tries <= 0:
                database = slycat.web.server.database.couchdb.connect()
                model = database.get("model", model_id)
                slycat.web.server.put_model_parameter(database, model, "computing", False)
                computing = False
                fail_model(model_id, "Exceeded max number of tries to pull data over to the server.")
                cherrypy.log.error("[TIMESERIES] Exceeded max number of tries to pull data.")
                raise Exception("Exceeded max number of tries to pull data over to the server.")
            try:
                uid = slycat.web.server.get_model_parameter(database, model, "pickle_uid")
                workdir_raw = slycat.web.server.get_model_parameter(database, model, "working_directory")
                workdir = workdir_raw + "pickle"
                hostname = slycat.web.server.get_model_parameter(database, model, "hostname")
                username = slycat.web.server.get_model_parameter(database, model, "username")

                # get an active session
                sid = get_sid(hostname, model)

                cherrypy.log.error("sid:%s uid:%s work_dir:%s host:%s user:%s" % (
                    sid, uid, workdir, hostname, username))
                inputs = get_remote_file_server(model["creator"], sid,
                                                "%s/slycat_timeseries_%s/arrayset_inputs.pickle" % (workdir, uid))
                cherrypy.log.error("got inputs")
                inputs = pickle.loads(inputs)

                slycat.web.server.put_model_arrayset(database, model, inputs["aid"])
                attributes = inputs["attributes"]
                slycat.web.server.put_model_array(database, model, inputs["aid"], 0, attributes, inputs["dimensions"])

                data = get_remote_file_server(model["creator"], sid,
                                              "%s/slycat_timeseries_%s/inputs_attributes_data.pickle" % (workdir, uid))
                attributes_data = pickle.loads(data)

                # TODO this can become multi processored
                for attribute in range(len(attributes)):
                    slycat.web.server.put_model_arrayset_data(database, model, inputs["aid"], "0/%s/..." % attribute,
                                                              [attributes_data[attribute]])

                clusters = json.loads(
                    slycat.web.server.get_remote_file_server(model["creator"], sid,
                                                      "%s/slycat_timeseries_%s/file_clusters.json" % (workdir, uid)))
                clusters_file = json.JSONDecoder().decode(clusters["file"])
                timeseries_count = json.JSONDecoder().decode(clusters["timeseries_count"])

                slycat.web.server.post_model_file(model["_id"], True, sid,
                                                  "%s/slycat_timeseries_%s/file_clusters.out" % (workdir, uid),
                                                  clusters["aid"], clusters["parser"], client=model["creator"])
                # TODO this can become multi processored
                cherrypy.log.error("Pulling timeseries computed data")
                for file_name in clusters_file:
                    file_cluster_data = get_remote_file_server(model["creator"], sid,
                                                               "%s/slycat_timeseries_%s/file_cluster_%s.json" % (
                                                                   workdir, uid, file_name))
                    file_cluster_attr = json.loads(file_cluster_data)
                    slycat.web.server.post_model_file(model["_id"], True, sid,
                                                      "%s/slycat_timeseries_%s/file_cluster_%s.out" % (
                                                          workdir, uid, file_name),
                                                      file_cluster_attr["aid"], file_cluster_attr["parser"], client=model["creator"])
                    database = slycat.web.server.database.couchdb.connect()
                    model = database.get("model", model["_id"])
                    slycat.web.server.put_model_arrayset(database, model, "preview-%s" % file_name)

                    waveform_dimensions_data = get_remote_file_server(model["creator"], sid,
                                                                           "%s/slycat_timeseries_%s/waveform_%s_dimensions"
                                                                           ".pickle" % (
                                                                               workdir, uid, file_name))
                    waveform_dimensions_array = pickle.loads(waveform_dimensions_data)
                    waveform_attributes_data = get_remote_file_server(model["creator"], sid,
                                                                           "%s/slycat_timeseries_%s/waveform_%s_attributes"
                                                                           ".pickle" % (
                                                                               workdir, uid, file_name))
                    waveform_attributes_array = pickle.loads(waveform_attributes_data)
                    waveform_times_data = get_remote_file_server(model["creator"], sid,
                                                                      "%s/slycat_timeseries_%s/waveform_%s_times"
                                                                      ".pickle" % (
                                                                          workdir, uid, file_name))
                    waveform_times_array = pickle.loads(waveform_times_data)
                    waveform_values_data = get_remote_file_server(model["creator"], sid,
                                                                       "%s/slycat_timeseries_%s/waveform_%s_values"
                                                                       ".pickle" % (
                                                                           workdir, uid, file_name))
                    waveform_values_array = pickle.loads(waveform_values_data)

                    # cherrypy.log.error("timeseries_count=%s" % timeseries_count)

                    # TODO this can become multi processored
                    for index in range(int(timeseries_count)):
                        try:
                            slycat.web.server.put_model_array(database, model, "preview-%s" % file_name, index,
                                                              waveform_attributes_array[index],
                                                              waveform_dimensions_array[index])
                            slycat.web.server.put_model_arrayset_data(database, model, "preview-%s" % file_name,
                                                                      "%s/0/...;%s/1/..." % (index, index),
                                                                      [waveform_times_array[index],
                                                                       waveform_values_array[index]])
                        except:
                            cherrypy.log.error("failed on index: %s" % index)
                            pass
                database = slycat.web.server.database.couchdb.connect()
                model = database.get("model", model_id)
                model["computing"] = False
                slycat.web.server.update_model(database, model)
                computing = False
                cherrypy.log.error("finnished Pulling timeseries computed data")
                # TODO add finished to the model state
                # TODO add remove dir command by uncommenting below
                # payload = {
                #     "action": "run_remote_command",
                #     "command": ("rm -rf %s" % workdir_raw)
                # }
            except cherrypy._cperror.HTTPError as e:
                database = slycat.web.server.database.couchdb.connect()
                model = database.get("model", model_id)
                slycat.web.server.put_model_parameter(database, model, "computing", False)
                cherrypy.log.error("Timeseries model compute exception type: %s" % sys.exc_info()[0])
                cherrypy.log.error("Timeseries model compute exception value: %s" % sys.exc_info()[1])
                cherrypy.log.error("Timeseries model compute exception traceback: %s" % sys.exc_info()[2])
                raise Exception(e.message)
            except Exception as e:
                database = slycat.web.server.database.couchdb.connect()
                model = database.get("model", model_id)
                slycat.web.server.put_model_parameter(database, model, "computing", False)
                cherrypy.log.error("Timeseries model compute exception type: %s" % sys.exc_info()[0])
                cherrypy.log.error("Timeseries model compute exception value: %s" % sys.exc_info()[1])
                cherrypy.log.error("Timeseries model compute exception traceback: %s" % sys.exc_info()[2])
                # fail_model(model_id, "Timeseries model compute exception: %s" % sys.exc_info()[0])
                raise Exception(e.message)

        database = slycat.web.server.database.couchdb.connect()
        model = database.get("model", model_id)
        slycat.web.server.delete_model_parameter(database, model, "computing")
Example #20
0
    def compute(database, model, sid, uid, workdir, hostname, username,
                password):
        """
    Computes the Time Series model. It fetches the necessary files from a
    remote server that were computed by the slycat-agent-compute-timeseries.py
    script.

    :param database:
    :param model:
    :param sid:      session ID
    :param uid:      user ID
    :param workdir:
    :param hostname:
    :param username:
    :param password:
    """
        workdir += "/slycat/pickle"  # route to the slycat directory
        try:
            database = slycat.web.server.database.couchdb.connect()
            model = database.get("model", model["_id"])
            model["model_compute_time"] = datetime.datetime.utcnow().isoformat(
            )
            slycat.web.server.update_model(database, model)

            sid, inputs = get_remote_file(
                sid, hostname, username, password,
                "%s/slycat_timeseries_%s/arrayset_inputs.pickle" %
                (workdir, uid))
            inputs = pickle.loads(inputs)

            slycat.web.server.put_model_arrayset(database, model,
                                                 inputs["aid"])
            attributes = inputs["attributes"]
            slycat.web.server.put_model_array(database, model, inputs["aid"],
                                              0, attributes,
                                              inputs["dimensions"])

            sid, data = get_remote_file(
                sid, hostname, username, password,
                "%s/slycat_timeseries_%s/inputs_attributes_data.pickle" %
                (workdir, uid))
            attributes_data = pickle.loads(data)

            # TODO this can become multi processored
            for attribute in range(len(attributes)):
                slycat.web.server.put_model_arrayset_data(
                    database, model, inputs["aid"], "0/%s/..." % attribute,
                    [attributes_data[attribute]])

            clusters = json.loads(
                slycat.web.server.get_remote_file(
                    sid, "%s/slycat_timeseries_%s/file_clusters.json" %
                    (workdir, uid)))
            clusters_file = json.JSONDecoder().decode(clusters["file"])
            timeseries_count = json.JSONDecoder().decode(
                clusters["timeseries_count"])

            slycat.web.server.post_model_file(
                model["_id"], True, sid,
                "%s/slycat_timeseries_%s/file_clusters.out" % (workdir, uid),
                clusters["aid"], clusters["parser"])
            # TODO this can become multi processored
            for file_name in clusters_file:
                sid, file_cluster_data = get_remote_file(
                    sid, hostname, username, password,
                    "%s/slycat_timeseries_%s/file_cluster_%s.json" %
                    (workdir, uid, file_name))
                file_cluster_attr = json.loads(file_cluster_data)
                slycat.web.server.post_model_file(
                    model["_id"], True, sid,
                    "%s/slycat_timeseries_%s/file_cluster_%s.out" %
                    (workdir, uid, file_name), file_cluster_attr["aid"],
                    file_cluster_attr["parser"])

                database = slycat.web.server.database.couchdb.connect()
                model = database.get("model", model["_id"])
                slycat.web.server.put_model_arrayset(database, model,
                                                     "preview-%s" % file_name)

                sid, waveform_dimensions_data = get_remote_file(
                    sid, hostname, username, password,
                    "%s/slycat_timeseries_%s/waveform_%s_dimensions.pickle" %
                    (workdir, uid, file_name))
                waveform_dimensions_array = pickle.loads(
                    waveform_dimensions_data)
                sid, waveform_attributes_data = get_remote_file(
                    sid, hostname, username, password,
                    "%s/slycat_timeseries_%s/waveform_%s_attributes.pickle" %
                    (workdir, uid, file_name))
                waveform_attributes_array = pickle.loads(
                    waveform_attributes_data)
                sid, waveform_times_data = get_remote_file(
                    sid, hostname, username, password,
                    "%s/slycat_timeseries_%s/waveform_%s_times.pickle" %
                    (workdir, uid, file_name))
                waveform_times_array = pickle.loads(waveform_times_data)
                sid, waveform_values_data = get_remote_file(
                    sid, hostname, username, password,
                    "%s/slycat_timeseries_%s/waveform_%s_values.pickle" %
                    (workdir, uid, file_name))
                waveform_values_array = pickle.loads(waveform_values_data)

                cherrypy.log.error("timeseries_count=%s" % timeseries_count)

                # TODO this can become multi processored
                for index in range(int(timeseries_count)):
                    try:
                        slycat.web.server.put_model_array(
                            database, model, "preview-%s" % file_name, index,
                            waveform_attributes_array[index],
                            waveform_dimensions_array[index])
                        slycat.web.server.put_model_arrayset_data(
                            database, model, "preview-%s" % file_name,
                            "%s/0/...;%s/1/..." % (index, index), [
                                waveform_times_array[index],
                                waveform_values_array[index]
                            ])
                    except:
                        cherrypy.log.error("failed on index: %s" % index)
                        pass

        except:
            fail_model(
                model["_id"],
                "Timeseries model compute exception: %s" % sys.exc_info()[0])
            cherrypy.log.error("Timeseries model compute exception type: %s" %
                               sys.exc_info()[0])
            cherrypy.log.error("Timeseries model compute exception value: %s" %
                               sys.exc_info()[1])
            cherrypy.log.error(
                "Timeseries model compute exception traceback: %s" %
                sys.exc_info()[2])
def ExecuteMultilevelMonteCarloAnalisys_Task(
        current_MLMC_level, pickled_coarse_model, pickled_coarse_parameters,
        size_meshes, pickled_settings_metric_refinement,
        pickled_settings_remesh_refinement, sample, current_level,
        mlmc_results_class):
    '''unpickle model and build Kratos Model object'''
    serialized_model = pickle.loads(pickled_coarse_model)
    current_model = KratosMultiphysics.Model()
    serialized_model.Load("ModelSerialization", current_model)
    del (serialized_model)
    '''unpickle parameters and build Kratos Parameters object'''
    serialized_parameters = pickle.loads(pickled_coarse_parameters)
    current_parameters = KratosMultiphysics.Parameters()
    serialized_parameters.Load("ParametersSerialization", current_parameters)
    del (serialized_parameters)
    '''start time'''
    start_MLMC_time = time.time()
    '''refine if current current_level > 0, adaptive refinement based on the solution of previous level'''
    if (current_level > 0):
        '''unpickle metric and remesh refinement parameters and build Kratos Parameters objects'''
        settings_metric_refinement_serializer = pickle.loads(
            pickled_settings_metric_refinement)
        settings_remesh_refinement_serializer = pickle.loads(
            pickled_settings_remesh_refinement)
        current_settings_metric_refinement = KratosMultiphysics.Parameters()
        current_settings_remesh_refinement = KratosMultiphysics.Parameters()
        settings_metric_refinement_serializer.Load(
            "MetricRefinementParametersSerialization",
            current_settings_metric_refinement)
        settings_remesh_refinement_serializer.Load(
            "RemeshRefinementParametersSerialization",
            current_settings_remesh_refinement)
        del (settings_metric_refinement_serializer,
             settings_remesh_refinement_serializer)
        '''refine the model Kratos object'''
        refined_model, refined_parameters = refinement.compute_refinement_hessian_metric(
            current_model, current_parameters, size_meshes[current_level],
            size_meshes[current_level - 1], current_settings_metric_refinement,
            current_settings_remesh_refinement)
        '''initialize the model Kratos object'''
        simulation = MultilevelMonteCarloAnalysis(refined_model,
                                                  refined_parameters, sample)
        simulation.Initialize()
        '''update model Kratos object'''
        current_model = simulation.model
        current_parameters = simulation.project_parameters
        del (simulation)
    simulation = MultilevelMonteCarloAnalysis(current_model,
                                              current_parameters, sample)
    simulation.Run()
    QoI = EvaluateQuantityOfInterest(simulation)
    '''save model and parameters as StreamSerializer Kratos objects'''
    serialized_finer_model = KratosMultiphysics.StreamSerializer()
    serialized_finer_model.Save("ModelSerialization", simulation.model)
    serialized_finer_parameters = KratosMultiphysics.StreamSerializer()
    serialized_finer_parameters.Save("ParametersSerialization",
                                     simulation.project_parameters)
    '''pickle model and parameters'''
    pickled_finer_model = pickle.dumps(
        serialized_finer_model, 2
    )  # second argument is the protocol and is NECESSARY (according to pybind11 docs)
    pickled_finer_parameters = pickle.dumps(
        serialized_finer_parameters, 2
    )  # second argument is the protocol and is NECESSARY (according to pybind11 docs)
    del (simulation)
    end_MLMC_time = time.time()
    '''register results of the current level in the MultilevelMonteCarloResults class'''
    mlmc_results_class.time_ML[current_level].append(
        end_MLMC_time - start_MLMC_time
    )  # saving each result in the corresponding list in order to ensure the correctness of the results order and the levels
    mlmc_results_class.QoI[current_level].append(
        QoI
    )  # saving each result in the corresponding list in order to ensure the correctness of the results order and the levels
    return mlmc_results_class, pickled_finer_model, pickled_finer_parameters
Example #22
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

# -----------------------------------------------------
#  FileName :    pickle_string.py
#  Author :      linuxme@
#  Project :     monitor_maintain
#  Date :        2013-09-01 10:32
#  Description :
# -----------------------------------------------------

try:
    import cpickle as pickle
except:
    import pickle
import pprint

data1 = [{'a': 'A', 'b': 2, 'c': 3.0}]
print 'BEFORE: ',
pprint.pprint(data1)

data1_string = pickle.dumps(data1)

data2 = pickle.loads(data1_string)
print 'AFTER : ',
pprint.pprint(data2)

print 'SAME? :', (data1 is data2)
print 'EQUAL?:', (data1 == data2)
Example #23
0
    def compute(model_id, stop_event, calling_client):
        """
        Computes the Time Series model. It fetches the necessary files from a
        remote server that were computed by the slycat-agent-compute-timeseries.py
        script.
    
        :param model_id: uid for the model in the database
        :param sid:      session ID
        :param uid:      user ID
        :param workdir:
        :param hostname:
        :param username:
        """
        try:
            total_file_delta_time = []
            #cherrypy.log.error("in thread")
            # workdir += "/slycat/pickle"  # route to the slycat directory
            start_time = time.time()
            database = slycat.web.server.database.couchdb.connect()
            model = database.get("model", model_id)
            model["model_compute_time"] = datetime.datetime.utcnow().isoformat()
            with slycat.web.server.get_model_lock(model["_id"]):
                database.save(model)
            slycat.web.server.update_model(database, model, state="waiting", message="starting data pull Timeseries")
            model = database.get("model", model_id)
            uid = slycat.web.server.get_model_parameter(database, model, "pickle_uid")
            workdir_raw = slycat.web.server.get_model_parameter(database, model, "working_directory")
            workdir = workdir_raw + "pickle"
            hostname = slycat.web.server.get_model_parameter(database, model, "hostname")
            username = slycat.web.server.get_model_parameter(database, model, "username")

            # get an active session
            sid = get_sid(hostname, model)
            # load inputs
            slycat.web.server.update_model(database, model, progress=50, message="loading inputs")
            use_tar = True
            # keep this blank unless we need it
            pickle_path = ''
            input_tar=None
            try:
                myfiles_tar_gz = get_remote_file_server(hostname, model,
                                                "%s/slycat_timeseries_%s/slycat-timeseries.tar.gz" % (workdir, uid),
                                                total_file_delta_time,
                                                calling_client)
                myfiles_tar_gz = io.BytesIO(myfiles_tar_gz)
                input_tar = tarfile.open(fileobj=myfiles_tar_gz, mode="r:gz")
            except:
                  # looks like the file is too large lets just grab one file at a time
                  use_tar = False
                  pickle_path = "%s/slycat_timeseries_%s/" % (workdir, uid)
            inputs = helpGetFile("%sarrayset_inputs.pickle" % (pickle_path),
                                    use_tar, hostname, model, total_file_delta_time,calling_client, input_tar)
            inputs = pickle.loads(inputs)
            slycat.web.server.put_model_arrayset(database, model, inputs["aid"])
            # load attributes
            slycat.web.server.update_model(database, model, progress=55, message="loading attributes")
            attributes = inputs["attributes"]
            slycat.web.server.put_model_array(database, model, inputs["aid"], 0, attributes, inputs["dimensions"])
            # load attribute data
            data = helpGetFile("%sinputs_attributes_data.pickle" % (pickle_path),
                                use_tar, hostname, model, total_file_delta_time,calling_client, input_tar)
            attributes_data = pickle.loads(data)

            # push attribute arraysets
            # TODO this can become multi processored
            for attribute in range(len(attributes)):
                model = database.get("model", model["_id"])
                slycat.web.server.put_model_arrayset_data(database, model, inputs["aid"], "0/%s/..." % attribute,
                                                          [attributes_data[attribute]])
            # load clusters data
            slycat.web.server.update_model(database, model, progress=60, message="loading cluster data")
            clusters = helpGetFile("%sfile_clusters.json" % (pickle_path),
                                    use_tar, hostname, model, total_file_delta_time,calling_client, input_tar)
            clusters = json.loads(clusters)
            clusters_file = json.JSONDecoder().decode(clusters["file"])
            timeseries_count = json.JSONDecoder().decode(clusters["timeseries_count"])
            slycat.web.server.post_model_file(model["_id"], True, sid,
                                              "%s/slycat_timeseries_%s/file_clusters.out" % (workdir, uid),
                                              clusters["aid"], clusters["parser"], client=calling_client)
            # TODO this can become multi processored
            cherrypy.log.error("Pulling timeseries computed data")
            slycat.web.server.update_model(database, model, progress=65, message="Pulling timeseries computed data for %s cluster files" % len(clusters_file))
            progress = 65
            progress_part = 30/len(clusters_file)
            for file_name in clusters_file:
                progress = progress + progress_part
                slycat.web.server.update_model(database, model, progress=progress, message="loading %s cluster file" % file_name)
                file_cluster_data = helpGetFile("%sfile_cluster_%s.json" % (pickle_path, file_name),
                                    use_tar, hostname, model, total_file_delta_time,calling_client, input_tar)
                file_cluster_attr = json.loads(file_cluster_data)
                slycat.web.server.post_model_file(model["_id"], True, sid,
                                                  "%s/slycat_timeseries_%s/file_cluster_%s.out" % (
                                                      workdir, uid, file_name),
                                                  file_cluster_attr["aid"], file_cluster_attr["parser"], client=calling_client)
                database = slycat.web.server.database.couchdb.connect()
                model = database.get("model", model["_id"])
                slycat.web.server.put_model_arrayset(database, model, "preview-%s" % file_name)

                waveform_dimensions_data = helpGetFile("%swaveform_%s_dimensions.pickle" % (pickle_path, file_name),
                                    use_tar, hostname, model, total_file_delta_time,calling_client, input_tar)
                waveform_dimensions_array = pickle.loads(waveform_dimensions_data)

                waveform_attributes_data = helpGetFile("%swaveform_%s_attributes.pickle" % (pickle_path, file_name),
                                    use_tar, hostname, model, total_file_delta_time,calling_client, input_tar)
                waveform_attributes_array = pickle.loads(waveform_attributes_data)

                waveform_times_data = helpGetFile("%swaveform_%s_times.pickle" % (pickle_path, file_name),
                                    use_tar, hostname, model, total_file_delta_time,calling_client, input_tar)
                waveform_times_array = pickle.loads(waveform_times_data)

                waveform_values_data = helpGetFile("%swaveform_%s_values.pickle" % (pickle_path, file_name),
                                    use_tar, hostname, model, total_file_delta_time,calling_client, input_tar)
                waveform_values_array = pickle.loads(waveform_values_data)

                for index in range(int(timeseries_count)):
                    try:
                        model = database.get("model", model["_id"])
                        slycat.web.server.put_model_array(database, model, "preview-%s" % file_name, index,
                                                          waveform_attributes_array[index],
                                                          waveform_dimensions_array[index])
                        model = database.get("model", model["_id"])
                        slycat.web.server.put_model_arrayset_data(database, model, "preview-%s" % file_name,
                                                                  "%s/0/...;%s/1/..." % (index, index),
                                                                  [waveform_times_array[index],
                                                                    waveform_values_array[index]])
                    except:
                        cherrypy.log.error("failed on index: %s" % index)
                        pass
            if input_tar:
                input_tar.close()
            database = slycat.web.server.database.couchdb.connect()
            model = database.get("model", model_id)
            slycat.web.server.update_model(database, model, message="finished loading all data")
            slycat.web.server.put_model_parameter(database, model, "computing", False)
            cherrypy.log.error("finished Pulling timeseries computed data")
            finish_time = time.time()
            file_stats = {
              "min": min(total_file_delta_time),
              "max": max(total_file_delta_time),
              "mean": statistics.mean(total_file_delta_time),
              "median": statistics.median(total_file_delta_time),
              "number_of_files_pulled":len(total_file_delta_time),
              "total_time_Pulling_data": sum(total_file_delta_time),
              "total_time": (finish_time - start_time)
            }
            cherrypy.log.error("File Stats %s" % str(file_stats))
            total_file_delta_time = []
            finish(model["_id"])
            stop_event.set()
            # TODO add finished to the model state
            # TODO add remove dir command by uncommenting below
            # payload = {
            #     "action": "run_remote_command",
            #     "command": ("rm -rf %s" % workdir_raw)
            # }
        except cherrypy._cperror.HTTPError as e:
            database = slycat.web.server.database.couchdb.connect()
            model = database.get("model", model_id)
            slycat.web.server.put_model_parameter(database, model, "computing", False)
            cherrypy.log.error(traceback.format_exc())
            cherrypy.log.error("Timeseries cperror model compute exception type: %s" % sys.exc_info()[0])
            cherrypy.log.error("Timeseries model compute exception value: %s" % sys.exc_info()[1])
            cherrypy.log.error("Timeseries model compute exception traceback: %s" % sys.exc_info()[2])
            stop_event.set()
        except:
            database = slycat.web.server.database.couchdb.connect()
            model = database.get("model", model_id)
            slycat.web.server.put_model_parameter(database, model, "computing", False)
            cherrypy.log.error(traceback.format_exc())
            cherrypy.log.error("Timeseries model compute exception type: %s" % sys.exc_info()[0])
            cherrypy.log.error("Timeseries model compute exception value: %s" % sys.exc_info()[1])
            cherrypy.log.error("Timeseries model compute exception traceback: %s" % sys.exc_info()[2])
            stop_event.set()
Example #24
0
  def compute(database, model, sid, uid, workdir, hostname, username, password):
    database = slycat.web.server.database.couchdb.connect()
    model = database.get("model", model["_id"])
    model["model_compute_time"] = datetime.datetime.utcnow().isoformat()
    slycat.web.server.update_model(database, model)

    sid, inputs = get_remote_file(sid, hostname, username, password, "%s/slycat_timeseries_%s/arrayset_inputs.pickle" % (workdir, uid))
    inputs = pickle.loads(inputs)

    slycat.web.server.put_model_arrayset(database, model, inputs["aid"])
    attributes = inputs["attributes"]
    slycat.web.server.put_model_array(database, model, inputs["aid"], 0, attributes, inputs["dimensions"])

    for attribute in range(len(attributes)):
      sid, data = get_remote_file(sid, hostname, username, password, "%s/slycat_timeseries_%s/inputs_attributes_data_%s.pickle" % (workdir, uid, attribute))
      data = pickle.loads(data)
      slycat.web.server.put_model_arrayset_data(database, model, inputs["aid"], "0/%s/..." % attribute, [data])

    clusters = json.loads(slycat.web.server.get_remote_file(sid, "%s/slycat_timeseries_%s/file_clusters.json" % (workdir, uid)))
    clusters_file = json.JSONDecoder().decode(clusters["file"])

    slycat.web.server.post_model_file(model["_id"], True, sid, "%s/slycat_timeseries_%s/file_clusters.out" % (workdir, uid), clusters["aid"], clusters["parser"])

    for f in clusters_file:
      cherrypy.log.error("Processing file cluster %s" % f)
      sid, file_cluster_data = get_remote_file(sid, hostname, username, password, "%s/slycat_timeseries_%s/file_cluster_%s.json" % (workdir, uid, f))
      cherrypy.log.error("Got remote file cluster %s JSON file" % f)
      file_cluster_attr = json.loads(file_cluster_data)
      cherrypy.log.error("Loaded cluster data JSON file")
      slycat.web.server.post_model_file(model["_id"], True, sid, "%s/slycat_timeseries_%s/file_cluster_%s.out" % (workdir, uid, f), file_cluster_attr["aid"], file_cluster_attr["parser"])
      cherrypy.log.error("Posted cluster %s OUT file to model" % f)

      sid, waveforms = get_remote_file(sid, hostname, username, password, "%s/slycat_timeseries_%s/waveforms_%s.pickle" % (workdir, uid, f))
      cherrypy.log.error("Got remote file waveforms_%s.pickle" % f)
      try:
        waveforms = pickle.loads(waveforms)
      except Exception as e:
        cherrypy.log.error("Loading waveforms exception caught: %s" % e)
      cherrypy.log.error("Loaded waveforms from pickle file")

      database = slycat.web.server.database.couchdb.connect()
      model = database.get("model", model["_id"])
      cherrypy.log.error("Putting model arrayset for cluster %s" % f)
      slycat.web.server.put_model_arrayset(database, model, "preview-%s" % f)
      cherrypy.log.error("Starting to enumerate waveforms for %s" % f)
      for index, waveform in enumerate(waveforms):
        cherrypy.log.error("Processing waveform %s - %s" % (f, index))
        sid, waveform_dimensions = get_remote_file(sid, hostname, username, password, "%s/slycat_timeseries_%s/waveform_%s_%s_dimensions.pickle" % (workdir, uid, f, index))
        cherrypy.log.error("Got remote pickle dimensions file %s - %s" % (f, index))
        waveform_dimensions = pickle.loads(waveform_dimensions)
        cherrypy.log.error("Loaded pickle dimensions file %s - %s" % (f, index))
        sid, waveform_attributes = get_remote_file(sid, hostname, username, password, "%s/slycat_timeseries_%s/waveform_%s_%s_attributes.pickle" % (workdir, uid, f, index))
        cherrypy.log.error("Got remote pickle attributes file %s - %s" % (f, index))
        waveform_attributes = pickle.loads(waveform_attributes)
        cherrypy.log.error("Loaded pickle attributes file %s - %s" % (f, index))
        slycat.web.server.put_model_array(database, model, "preview-%s" % f, index, waveform_attributes, waveform_dimensions)
        cherrypy.log.error("Put model array for preview-%s" % f)

        sid, waveform_times = get_remote_file(sid, hostname, username, password, "%s/slycat_timeseries_%s/waveform_%s_%s_times.pickle" % (workdir, uid, f, index))
        cherrypy.log.error("Got remote pickle times file %s - %s" % (f, index))
        waveform_times = pickle.loads(waveform_times)
        cherrypy.log.error("Loaded pickle times file %s - %s" % (f, index))
        sid, waveform_values = get_remote_file(sid, hostname, username, password, "%s/slycat_timeseries_%s/waveform_%s_%s_values.pickle" % (workdir, uid, f, index))
        cherrypy.log.error("Got remote pickle values file %s - %s" % (f, index))
        waveform_values = pickle.loads(waveform_values)
        cherrypy.log.error("Loaded pickle values file %s - %s" % (f, index))
        slycat.web.server.put_model_arrayset_data(database, model, "preview-%s" % f, "%s/0/...;%s/1/..." % (index, index), [waveform_times, waveform_values])
        cherrypy.log.error("Put model arrayset data for preview-%s" % f)
Example #25
0
 def items(self):
     return [(pickle.loads(app_rpc), app)
             for app_rpc, app in self.applications
             if app_rpc != 'transport']
Example #26
0
def ExecuteMultilevelMonteCarloAnalisys_Task(
        finest_level, pickled_coarse_model, pickled_coarse_parameters,
        size_meshes, pickled_settings_metric_refinement,
        pickled_settings_remesh_refinement):
    '''unpickle model and build Kratos Model object'''
    model_serializer = pickle.loads(pickled_coarse_model)
    current_model = KratosMultiphysics.Model()
    model_serializer.Load("ModelSerialization", current_model)
    del (model_serializer)
    '''unpickle parameters and build Kratos Parameters object'''
    serialized_parameters = pickle.loads(pickled_coarse_parameters)
    current_parameters = KratosMultiphysics.Parameters()
    serialized_parameters.Load("ParametersSerialization", current_parameters)
    del (serialized_parameters)
    '''unpickle metric and remesh refinement parameters and build Kratos Parameters objects'''
    settings_metric_refinement_serializer = pickle.loads(
        pickled_settings_metric_refinement)
    settings_remesh_refinement_serializer = pickle.loads(
        pickled_settings_remesh_refinement)
    current_settings_metric_refinement = KratosMultiphysics.Parameters()
    current_settings_remesh_refinement = KratosMultiphysics.Parameters()
    settings_metric_refinement_serializer.Load(
        "MetricRefinementParametersSerialization",
        current_settings_metric_refinement)
    settings_remesh_refinement_serializer.Load(
        "RemeshRefinementParametersSerialization",
        current_settings_remesh_refinement)
    del (settings_metric_refinement_serializer,
         settings_remesh_refinement_serializer)
    '''generate the sample'''
    sample = GenerateSample()
    '''initialize the MultilevelMonteCarloResults class and prepare the results'''
    mlmc_results_class = mlmc.MultilevelMonteCarloResults()
    QoI = []
    start_MLMC_time = time.time()
    end_MLMC_time = []
    if (finest_level == 0):
        simulation = MultilevelMonteCarloAnalysis(current_model,
                                                  current_parameters, sample)
        simulation.Run()
        QoI.append(EvaluateQuantityOfInterest(simulation))
        del (simulation)
        end_MLMC_time.append(time.time())
    else:
        for lev in range(finest_level + 1):
            simulation = MultilevelMonteCarloAnalysis(current_model,
                                                      current_parameters,
                                                      sample)
            simulation.Run()
            QoI.append(EvaluateQuantityOfInterest(simulation))
            end_MLMC_time.append(time.time())
            '''refine if level < finest level exploiting the solution just computed'''
            if (lev < finest_level):
                '''refine the model Kratos object'''
                model_refined = refinement.compute_refinement_hessian_metric(
                    simulation, size_meshes[lev + 1], size_meshes[lev],
                    current_settings_metric_refinement,
                    current_settings_remesh_refinement)
                '''initialize the model Kratos object'''
                simulation = MultilevelMonteCarloAnalysis(
                    model_refined, current_parameters, sample)
                simulation.Initialize()
                '''update model Kratos object'''
                current_model = simulation.model
            del (simulation)
    '''prepare results of the simulation in the MultilevelMonteCarloResults class'''
    mlmc_results_class.finer_level = finest_level
    for lev in range(finest_level + 1):
        mlmc_results_class.time_ML.append(end_MLMC_time[lev] - start_MLMC_time)
        mlmc_results_class.QoI.append(QoI[lev])
    return mlmc_results_class
Example #27
0
 def query(key):
     result = self.col.find_one()
     dump = result[key]
     return cpickle.loads(dump)
Example #28
0
逆操作
store['obj2']


5.9 pickle  python对象序列化======
序列化是指把对象的层级几个转换成字节流的过程
 
 5.9.1 用 cPickle实现python对象序列化
 import  cpickle as pickle
 dumps()函数
 pickled_data = pickle.dumps(data)

 数据序列化后,再写入文件或用套接字,管道等发送就很简单

 反序列化 loads()函数
 nframe = pickle.loads(pickled_data)


 ----5.9.2 用pandas实现对象序列化-----  ‘’‘重点====
 无需导入cpickle模块,所有的操作都是隐士进行的
 frame.to_pickle('frame.pkl')
 读取数据
 pd.read_pickle('trame.pkl')





  --------5.10 对接数据库------
  from sqlalchemy import create_engine
Example #29
0
try:
    import cpickle as pickle
except ImportError:
    import pickle

s = pickle.dumps(d)

f = open('dump.txt', 'wb')
pickle.dump(d, f)
f.close()

f = open('dump.txt', 'rb')
h = pickle.load(f)
f.close()

pickle.loads(pickle.dumps(d))

import json
w = json.dumps(d)
i = json.loads(w)

import json


class Student(object):
    def __init__(self, name, age, score):
        self.name = name
        self.age = age
        self.score = score

Example #30
0
#!/usr/lib/python

import urllib2, sys
try:
	import cpickle as pickle
except ImportError as e:
	import pickle

url = 'http://www.pythonchallenge.com/pc/def/peak.html'

if __name__ ==  "__main__":

	banner_url = 'http://www.pythonchallenge.com/pc/def/banner.p'
	try:
		raw_data = urllib2.urlopen(banner_url).read()
		data = pickle.loads(raw_data)
	except urllib2.URLError as e:
		print e.reason
		raise

	for row in data:
		for (char, freq) in row:
			sys.stdout.write(char * freq)
		print
Example #31
0
 def query(key):
     result = self.col.find_one()
     dump = result[key]
     return cpickle.loads(dump)