def process_tuple(self, tup): buf = a2b_base64(tup.values[0]) msg = Message() deserialize(msg, buf) self.log('received message: {}'.format(msg)) self.ack(tup)
def get_hosts_from_zk(zk_client): """This function will read all registered hosts in /hosts and return a list of Host objects. """ try: hosts_list = [] if not zk_client.exists(HOSTS_PREFIX): log.debug("%s doesn't exist" % (HOSTS_PREFIX, )) return [] hosts = zk_client.get_children(HOSTS_PREFIX) for host in hosts: try: path = "%s/%s" % (HOSTS_PREFIX, host) (value, stat) = zk_client.get(path) config = HostConfig() deserialize(config, value) _host = Host(config.agent_id, config.address.host, config.address.port) hosts_list.append(_host) except NoNodeError: log.debug("host %s not found" % (host, )) continue return hosts_list except Exception, e: log.exception(e)
def get_thrift_profile(self, query_id, timeout=10, interval=1): """Returns thrift profile of the specified query ID, if available""" page_name = "query_profile_encoded?query_id=%s" % (query_id) try: response = self.open_debug_webpage(page_name, timeout=timeout, interval=interval) tbuf = response.read() except Exception as e: LOG.info("Thrift profile for query %s not yet available: %s", query_id, str(e)) return None else: tree = TRuntimeProfileTree() try: deserialize(tree, zlib.decompress(base64.b64decode(tbuf)), protocol_factory=TCompactProtocol. TCompactProtocolFactory()) tree.validate() return tree except Exception as e: LOG.info( "Exception while deserializing query profile of %s: %s", query_id, str(e)) # We should assert that the response code is not 200 once # IMPALA-6332: Impala webserver should return HTTP error code for missing query # profiles, is fixed. if str(e) == 'Incorrect padding': assert "Could not obtain runtime profile" in tbuf, tbuf return None
def decode_profile_line(line): space_separated = line.split(" ") if len(space_separated) == 3: ts = int(space_separated[0]) print datetime.datetime.fromtimestamp( ts / 1000.0).isoformat(), space_separated[1] base64_encoded = space_separated[2] elif len(space_separated) == 1: base64_encoded = space_separated[0] else: raise Exception("Unexpected line: " + line) possibly_compressed = base64.b64decode(base64_encoded) # Handle both compressed and uncompressed Thrift profiles try: thrift = zlib.decompress(possibly_compressed) except zlib.error: thrift = possibly_compressed tree = TRuntimeProfileTree() deserialize(tree, thrift, protocol_factory=TCompactProtocol.TCompactProtocolFactory()) tree.validate() return tree
def _bench_thrift(loops=1000): """Measure using a thrift-generated library N times. The target is a simple addressbook. We measure the following: * create an addressbook with 1 person in it * serialize it * deserialize it into a new addressbook For each iteration we repeat this 100 times. """ # proto_factory = TBinaryProtocolFactory() proto_factory = TBinaryProtocolAcceleratedFactory() elapsed = 0 times = [] for _ in range(loops): # This is a macro benchmark for a Python implementation # so "elapsed" covers more than just how long the Addressbook ops take. t0 = pyperf.perf_counter() for _ in range(100): # First, create the addressbook. ab = make_addressbook() # Then, round-trip through serialization. encoded = serialize(ab, proto_factory) ab2 = ttypes.AddressBook() deserialize(ab2, encoded, proto_factory) t1 = pyperf.perf_counter() elapsed += t1 - t0 times.append(t0) times.append(pyperf.perf_counter()) return elapsed, times
def get_hosts_from_zk(zk_client): """This function will read all registered hosts in /hosts and return a list of Host objects. """ try: hosts_list = [] if not zk_client.exists(HOSTS_PREFIX): log.debug("%s doesn't exist" % (HOSTS_PREFIX,)) return [] hosts = zk_client.get_children(HOSTS_PREFIX) for host in hosts: try: path = "%s/%s" % (HOSTS_PREFIX, host) (value, stat) = zk_client.get(path) config = HostConfig() deserialize(config, value) _host = Host(config.agent_id, config.address.host, config.address.port) hosts_list.append(_host) except NoNodeError: log.debug("host %s not found" % (host,)) continue return hosts_list except Exception, e: log.exception(e)
def get_thrift_profile(self, query_id, timeout=10, interval=1): """Returns thrift profile of the specified query ID, if available""" page_name = "query_profile_encoded?query_id=%s" % (query_id) try: response = self.open_debug_webpage(page_name, timeout=timeout, interval=interval) tbuf = response.text except Exception as e: LOG.info("Thrift profile for query %s not yet available: %s", query_id, str(e)) return None else: tree = TRuntimeProfileTree() try: deserialize(tree, zlib.decompress(base64.b64decode(tbuf)), protocol_factory=TCompactProtocol.TCompactProtocolFactory()) tree.validate() return tree except Exception as e: LOG.info("Exception while deserializing query profile of %s: %s", query_id, str(e)); # We should assert that the response code is not 200 once # IMPALA-6332: Impala webserver should return HTTP error code for missing query # profiles, is fixed. if str(e) == 'Incorrect padding': assert "Could not obtain runtime profile" in tbuf, tbuf return None
def decode(n, proto_factory=TBinaryProtocolFactory()): ab = ttypes.AddressBook() ab_encoded = serialize(make_addressbook()) start = time.time() for i in range(n): deserialize(ab, ab_encoded, proto_factory) end = time.time() print("decode\t-> {}".format(end - start))
def parse_address(value): """Deserialize the given thrift encoded string into a address tuple. :type value: str :rtype: tuple """ address = ServerAddress() deserialize(address, value) return address.host, address.port
def verify(self, serialized, factory): self.assertEqual(serialized, serialize(self.message, factory)) self.assertEqual( "hello thrift", deserialize(Message(), serialized, factory).body, ) self.assertEqual(42, deserialize(Message(), serialized, factory).num) self.assertRaises(EOFError, deserialize, Message(), b'', factory)
def get_hierarchy_from_zk(zk_client): """ This function will read /hosts and /roles and try to construct a hierarchy. """ try: res = get_service_leader(zk_client, ROOT_SCHEDULER_SERVICE) if not res: log.debug("Couldn't find a root scheduler leader!") res = (None, None) if not zk_client.exists(ROLES_PREFIX): log.error("%s doesn't exist" % (ROLES_PREFIX,)) return root_host = Host(ROOT_SCHEDULER_ID, res[0], res[1]) root_sch = Scheduler(ROOT_SCHEDULER_ID, ROOT_SCHEDULER_TYPE, root_host) if not zk_client.exists(ROLES_PREFIX): log.debug("%s doesn't exist" % (ROLES_PREFIX,)) return # TODO(Maithem): cross reference with /hosts scheduler_hosts = zk_client.get_children(ROLES_PREFIX) for sch_host in scheduler_hosts: try: path = "%s/%s" % (ROLES_PREFIX, sch_host) (value, stat) = zk_client.get(path) role = Roles() deserialize(role, value) if (not role or not role.schedulers or len(role.schedulers) != 1): log.debug("Incorrect role for scheduler host %s" % (sch_host,)) continue leaf_role = role.schedulers[0] leaf = Scheduler(leaf_role.id, LEAF_SCHEDULER_TYPE, None, root_sch) for childHost in (leaf_role.host_children or []): host = Host(childHost.id, childHost.address, childHost.port, leaf) if childHost.id == sch_host: leaf.owner = host leaf.add_child(host) root_sch.add_child(leaf) except NoNodeError: log.debug("Scheduler host %s not found" % (sch_host,)) continue return root_sch except Exception, e: log.exception(e)
def get_hierarchy_from_zk(zk_client): """ This function will read /hosts and /roles and try to construct a hierarchy. """ try: res = get_service_leader(zk_client, ROOT_SCHEDULER_SERVICE) if not res: log.debug("Couldn't find a root scheduler leader!") res = (None, None) if not zk_client.exists(ROLES_PREFIX): log.error("%s doesn't exist" % (ROLES_PREFIX, )) return root_host = Host(ROOT_SCHEDULER_ID, res[0], res[1]) root_sch = Scheduler(ROOT_SCHEDULER_ID, ROOT_SCHEDULER_TYPE, root_host) if not zk_client.exists(ROLES_PREFIX): log.debug("%s doesn't exist" % (ROLES_PREFIX, )) return # TODO(Maithem): cross reference with /hosts scheduler_hosts = zk_client.get_children(ROLES_PREFIX) for sch_host in scheduler_hosts: try: path = "%s/%s" % (ROLES_PREFIX, sch_host) (value, stat) = zk_client.get(path) role = Roles() deserialize(role, value) if (not role or not role.schedulers or len(role.schedulers) != 1): log.debug("Incorrect role for scheduler host %s" % (sch_host, )) continue leaf_role = role.schedulers[0] leaf = Scheduler(leaf_role.id, LEAF_SCHEDULER_TYPE, None, root_sch) for childHost in (leaf_role.host_children or []): host = Host(childHost.id, childHost.address, childHost.port, leaf) if childHost.id == sch_host: leaf.owner = host leaf.add_child(host) root_sch.add_child(leaf) except NoNodeError: log.debug("Scheduler host %s not found" % (sch_host, )) continue return root_sch except Exception, e: log.exception(e)
def testSerializeThenDeserialize(self): obj = Xtruct2(i32_thing=1, struct_thing=Xtruct(string_thing="foo")) s1 = serialize(obj) for i in range(10): self.assertEquals(s1, serialize(obj)) objcopy = Xtruct2() deserialize(objcopy, serialize(obj)) self.assertEquals(obj, objcopy) obj = Xtruct(string_thing="bar") objcopy = Xtruct() deserialize(objcopy, serialize(obj)) self.assertEquals(obj, objcopy) # test booleans obj = Bools(im_true=True, im_false=False) objcopy = Bools() deserialize(objcopy, serialize(obj)) self.assertEquals(obj, objcopy) # test enums for num, name in Numberz._VALUES_TO_NAMES.items(): obj = Bonk(message='enum Numberz value %d is string %s' % (num, name), type=num) objcopy = Bonk() deserialize(objcopy, serialize(obj)) self.assertEquals(obj, objcopy)
def testSerializeThenDeserialize(self): obj = Xtruct2(i32_thing=1, struct_thing=Xtruct(string_thing="foo")) s1 = serialize(obj) for i in range(10): self.assertEquals(s1, serialize(obj)) objcopy = Xtruct2() deserialize(objcopy, serialize(obj)) self.assertEquals(obj, objcopy) obj = Xtruct(string_thing="bar") objcopy = Xtruct() deserialize(objcopy, serialize(obj)) self.assertEquals(obj, objcopy)
def decode_profile_line(line): space_separated = line.split(" ") if len(space_separated) == 3: ts = int(space_separated[0]) print datetime.datetime.fromtimestamp(ts / 1000.0).isoformat(), space_separated[1] base64_encoded = space_separated[2] elif len(space_separated) == 1: base64_encoded = space_separated[0] else: raise Exception("Unexpected line: " + line) possibly_compressed = base64.b64decode(base64_encoded) # Handle both compressed and uncompressed Thrift profiles try: thrift = zlib.decompress(possibly_compressed) except zlib.error: thrift = possibly_compressed tree = TRuntimeProfileTree() deserialize(tree, thrift, protocol_factory=TCompactProtocol.TCompactProtocolFactory()) tree.validate() return tree
def test_register_host(self): """ Register against the chairman and verify it is persisted in zk """ host_prefix = "/hosts" # Const in java impl. # Register two hosts with the chairman. host = [] h = self.get_register_host_request() host.append(h.config) retries = 0 while (retries < 10): rc = self.chairman_client.register_host(h) if (rc.result == RegisterHostResultCode.NOT_IN_MAJORITY): # Possible because the chairman is yet to connect to zk retries += 1 time.sleep(1) continue break self.assertEqual(rc.result, RegisterHostResultCode.OK) h = self.get_register_host_request() host.append(h.config) rc = self.chairman_client.register_host(h) self.assertEqual(rc.result, RegisterHostResultCode.OK) # Validate the state persisted in zk. client = self._get_nonchroot_client() client.start() self.assertTrue(client.exists(host_prefix)) read_hosts = client.get_children(host_prefix) for h in read_hosts: path = host_prefix + "/" + h (value, stat) = client.get(path) host_config = HostConfig() deserialize(host_config, value) self.assertTrue(host_config in host) client.stop()
def deserialize_content(content, flag=False): """ Десериализация контента товара из БД Cassandra. Десерилизация и приведение объектов к словарю. :param content: контент, который необходимо десериализовать :return: словарь с данными """ if flag: content_dict = instance_to_dict(deserialize(WareContentDto(), content)) else: content_dict = content service_log.put("Deserialized content: %s" % str(content_dict)) return content_dict
def testSerializeThenDeserialize(self): obj = Xtruct2(i32_thing=1, struct_thing=Xtruct(string_thing="foo")) s1 = serialize(obj) for i in range(10): self.assertEquals(s1, serialize(obj)) objcopy = Xtruct2() deserialize(objcopy, serialize(obj)) self.assertEquals(obj, objcopy) obj = Xtruct(string_thing="bar") objcopy = Xtruct() deserialize(objcopy, serialize(obj)) self.assertEquals(obj, objcopy) # test booleans obj = Bools(im_true=True, im_false=False) objcopy = Bools() deserialize(objcopy, serialize(obj)) self.assertEquals(obj, objcopy) # test enums def _enumerate_enum(enum_class): if hasattr(enum_class, '_VALUES_TO_NAMES'): # old-style enums for num, name in enum_class._VALUES_TO_NAMES.items(): yield (num, name) else: # assume Python 3.4+ IntEnum-based from enum import IntEnum self.assertTrue((issubclass(enum_class, IntEnum))) for num in enum_class: yield (num.value, num.name) for num, name in _enumerate_enum(Numberz): obj = Bonk(message='enum Numberz value %d is string %s' % (num, name), type=num) objcopy = Bonk() deserialize(objcopy, serialize(obj)) self.assertEquals(obj, objcopy)
if len(sys.argv) == 1 or sys.argv[1] == "-": input_data = sys.stdin elif len(sys.argv) == 2: input_data = file(sys.argv[1]) else: print >> sys.stderr, "Usage: %s [file]" % (sys.argv[0],) sys.exit(1) for line in input_data: space_separated = line.split(" ") if len(space_separated) == 3: ts = int(space_separated[0]) print datetime.datetime.fromtimestamp(ts/1000.0).isoformat(), space_separated[1], base64_encoded = space_separated[2] elif len(space_separated) == 1: base64_encoded = space_separated[0] else: raise Exception("Unexpected line: " + line) possibly_compressed = base64.b64decode(base64_encoded) # Handle both compressed and uncompressed Thrift profiles try: thrift = zlib.decompress(possibly_compressed) except zlib.error: thrift = possibly_compressed tree = TRuntimeProfileTree() deserialize(tree, thrift, protocol_factory=TCompactProtocol.TCompactProtocolFactory()) tree.validate() print tree
def decoder_string(self, data): return deserialize(test_ttypes.Str(), data).data
def extract_node_data(client, path, thrift_type): (value, stat) = client.get(path) t_type_instance = thrift_type() deserialize(t_type_instance, value) return t_type_instance
def main(): #sys.setrecursionlimit(100000) interresults = [None] try: #-------------------------------------------------------------------------- # parse input and deserialize the (thrift) optimization problem #-------------------------------------------------------------------------- parser = argparse.ArgumentParser() parser.add_argument("optfile",help="filename of input optimization file") parser.add_argument("resultfile",help="filename of optimization results") args = parser.parse_args() assert isinstance(args.resultfile, str) resultFile = args.resultfile dir_path = os.path.dirname(os.path.realpath(resultFile)) interresults[0]=os.path.join(dir_path,"interresults.txt") f_optfile = open(args.optfile, "rb") blob_opt = f_optfile.read() print("read "+str(len(blob_opt))+" bytes from "+args.optfile) f_optfile.close() ''' struct OptProblem { 1: required string mathModelSbmlFile; 2: required int numberOfOptimizationRuns; 3: required ParameterDescriptionList parameterDescriptionList; 4: required ReferenceVariableList referenceVariableList; 5: required string experimentalDataCSV; 6: required CopasiOptimizationMethod optimizationMethod; } ''' vcellOptProblem = VCELLOPT.OptProblem() protocol_factory = TBinaryProtocol.TBinaryProtocolFactory deserialize(vcellOptProblem, blob_opt, protocol_factory = protocol_factory()) print("done with deserialization") #----------------------------------------------------------------------------- # add CDataModel # assert(COPASI.CCopasiRootContainer.getRoot() != None) # create a datamodel dataModel = COPASI.CCopasiRootContainer.addDatamodel(); assert(isinstance(dataModel,COPASI.CCopasiDataModel)) assert(COPASI.CCopasiRootContainer.getDatamodelList().size() == 1) try: #sbmlFile = "C:\\COPASI-4.19.140-Source\\copasi\\bindings\\python\\examples\\exampleDeni.xml" sbmlString = vcellOptProblem.mathModelSbmlContents dataModel.importSBMLFromString(str(sbmlString)) print("data model loaded") except: e_info = sys.exc_info() traceback.print_exception(e_info[0],e_info[1],e_info[2],file=sys.stdout) sys.stderr.write("exception: error importing sbml file: "+str(e_info[0])+": "+str(e_info[1])+"\n") sys.stderr.flush() return -1 model = dataModel.getModel() assert(isinstance(model,COPASI.CModel)) model.compileIfNecessary() printModel(model) mathContainer = model.getMathContainer() assert(isinstance(mathContainer,COPASI.CMathContainer)) # dataModel.saveModel("optModel.cps", True) #--------------------------------------------------------------------------- # add CFitTask #--------------------------------------------------------------------------- fitTask = dataModel.addTask(COPASI.CTaskEnum.parameterFitting) assert(isinstance(fitTask, COPASI.CFitTask)) ''' enum OptimizationMethodType { EvolutionaryProgram, SRES, GeneticAlgorithm, GeneticAlgorithmSR, HookeJeeves, LevenbergMarquardt, NelderMead, ParticleSwarm, RandomSearch, SimulatedAnnealing, SteepestDescent, Praxis, TruncatedNewton } ''' methodTypeDict = dict() methodTypeDict[VCELLOPT.OptimizationMethodType.EvolutionaryProgram] = COPASI.CTaskEnum.EvolutionaryProgram methodTypeDict[VCELLOPT.OptimizationMethodType.SRES] = COPASI.CTaskEnum.SRES methodTypeDict[VCELLOPT.OptimizationMethodType.GeneticAlgorithm] = COPASI.CTaskEnum.GeneticAlgorithm methodTypeDict[VCELLOPT.OptimizationMethodType.GeneticAlgorithmSR] = COPASI.CTaskEnum.GeneticAlgorithmSR methodTypeDict[VCELLOPT.OptimizationMethodType.HookeJeeves] = COPASI.CTaskEnum.HookeJeeves methodTypeDict[VCELLOPT.OptimizationMethodType.LevenbergMarquardt] = COPASI.CTaskEnum.LevenbergMarquardt methodTypeDict[VCELLOPT.OptimizationMethodType.NelderMead] = COPASI.CTaskEnum.NelderMead methodTypeDict[VCELLOPT.OptimizationMethodType.ParticleSwarm] = COPASI.CTaskEnum.ParticleSwarm methodTypeDict[VCELLOPT.OptimizationMethodType.RandomSearch] = COPASI.CTaskEnum.RandomSearch methodTypeDict[VCELLOPT.OptimizationMethodType.SimulatedAnnealing] = COPASI.CTaskEnum.SimulatedAnnealing methodTypeDict[VCELLOPT.OptimizationMethodType.SteepestDescent] = COPASI.CTaskEnum.SteepestDescent methodTypeDict[VCELLOPT.OptimizationMethodType.Praxis] = COPASI.CTaskEnum.Praxis methodTypeDict[VCELLOPT.OptimizationMethodType.TruncatedNewton] = COPASI.CTaskEnum.TruncatedNewton # # set CFitMethod # copasiFitMethodType = methodTypeDict[vcellOptProblem.optimizationMethod.optimizationMethodType] if (copasiFitMethodType not in fitTask.getValidMethods()): print "fit method not allowed" return 1 fitTask.setMethodType(copasiFitMethodType) fitMethod = fitTask.getMethod() assert(isinstance(fitMethod,COPASI.COptMethod)) ''' enum OptimizationParameterType { Number_of_Generations, Number_of_Iterations, Population_Size, Random_Number_Generator, Seed, IterationLimit, Tolerance, Rho, Scale, Swarm_Size, Std_Deviation, Start_Temperature, Cooling_Factor, Pf } ''' methodParamDict = dict() methodParamDict[VCELLOPT.OptimizationParameterType.Number_of_Generations] = "Number of Generations" methodParamDict[VCELLOPT.OptimizationParameterType.Number_of_Iterations] = "Number of Iterations" methodParamDict[VCELLOPT.OptimizationParameterType.Population_Size] = "Population Size" methodParamDict[VCELLOPT.OptimizationParameterType.Random_Number_Generator] = "Random Number Generator" methodParamDict[VCELLOPT.OptimizationParameterType.Seed] = "Seed" methodParamDict[VCELLOPT.OptimizationParameterType.IterationLimit] = "Iteration Limit" methodParamDict[VCELLOPT.OptimizationParameterType.Tolerance] = "Tolerance" methodParamDict[VCELLOPT.OptimizationParameterType.Rho] = "Rho" methodParamDict[VCELLOPT.OptimizationParameterType.Scale] = "Scale" methodParamDict[VCELLOPT.OptimizationParameterType.Swarm_Size] = "Swarm Size" methodParamDict[VCELLOPT.OptimizationParameterType.Std_Deviation] = "Std Deviation" methodParamDict[VCELLOPT.OptimizationParameterType.Start_Temperature] = "Start Temperature" methodParamDict[VCELLOPT.OptimizationParameterType.Cooling_Factor] = "Cooling Factor" methodParamDict[VCELLOPT.OptimizationParameterType.Pf] = "Pf" # # set FitMethod parameters # ''' <CopasiOptimizationMethod name="Evolutionary Programming"> <CopasiOptimizationParameter name="Number of Generations" value="200.0" dataType="int"/> <CopasiOptimizationParameter name="Population Size" value="20.0" dataType="int"/> <CopasiOptimizationParameter name="Random Number Generator" value="1.0" dataType="int"/> <CopasiOptimizationParameter name="Seed" value="0.0" dataType="int"/> </CopasiOptimizationMethod> <Method name="Evolutionary Programming" type="EvolutionaryProgram"> <Parameter name="Number of Generations" type="unsignedInteger" value="444"/> <Parameter name="Population Size" type="unsignedInteger" value="77"/> <Parameter name="Random Number Generator" type="unsignedInteger" value="5"/> <Parameter name="Seed" type="unsignedInteger" value="44"/> </Method> ''' vcellOptParamList = vcellOptProblem.optimizationMethod.optimizationParameterList assert(isinstance(vcellOptParamList, list)) for vcellOptParam in vcellOptParamList: assert(isinstance(vcellOptParam,VCELLOPT.CopasiOptimizationParameter)) print methodParamDict[vcellOptParam.paramType] fitMethod.removeParameter(methodParamDict[vcellOptParam.paramType]) if (vcellOptParam.dataType == VCELLOPT.OptimizationParameterDataType.INT): fitMethod.addParameter(methodParamDict[vcellOptParam.paramType], COPASI.CCopasiParameter.INT) fitParameter = fitMethod.getParameter(methodParamDict[vcellOptParam.paramType]) assert (isinstance(fitParameter, COPASI.CCopasiParameter)) fitParameter.setIntValue(int(vcellOptParam.value)) else: fitMethod.addParameter(methodParamDict[vcellOptParam.paramType], COPASI.CCopasiParameter.DOUBLE) fitParameter = fitMethod.getParameter(methodParamDict[vcellOptParam.paramType]) assert (isinstance(fitParameter, COPASI.CCopasiParameter)) fitParameter.setDblValue(vcellOptParam.value) # # get FitProblem # fitProblem = fitTask.getProblem() assert(isinstance(fitProblem, COPASI.COptProblem)) # works for all COPASI builds >= 140 #assert(isinstance(fitProblem, COPASI.CFitProblem)) # not CFitProblem in COPASI build 140 #fitProblem.setRandomizeStartValues(True) experimentSet = fitProblem.getParameter("Experiment Set") assert(isinstance(experimentSet,COPASI.CExperimentSet)) assert(experimentSet.getExperimentCount() == 0) # first experiment experiment = COPASI.CExperiment(dataModel) assert(isinstance(experiment,COPASI.CExperiment)) experiment.setIsRowOriented(True) # Use the TemporaryFile context manager for easy clean-up tmpExportCSVFile = tempfile.NamedTemporaryFile(delete=False) varNameList = list(v.varName for v in vcellOptProblem.referenceVariableList) csvString = ", ".join(map(str, varNameList)) + "\n" for row in vcellOptProblem.experimentalDataSet.rows: csvString += ", ".join(map(str, row.data)) + "\n" num_lines = len(vcellOptProblem.experimentalDataSet.rows) + 1 tmpExportCSVFile.write(csvString) tmpExportCSVFile.close() experiment.setFileName(str(tmpExportCSVFile.name)) experiment.setFirstRow(1) experiment.setKeyValue("Experiment_1") experiment.setLastRow(num_lines) experiment.setHeaderRow(1) experiment.setSeparator(",") experiment.setExperimentType(COPASI.CTaskEnum.timeCourse) experiment.setNormalizeWeightsPerExperiment(True) vcellReferenceVariableList = vcellOptProblem.referenceVariableList assert(isinstance(vcellReferenceVariableList, list)) num_ref_variables = len(vcellReferenceVariableList) experiment.setNumColumns(num_ref_variables) # one independent (time), all the other dependent # experiment object map objectMap = experiment.getObjectMap() assert (isinstance(objectMap, COPASI.CExperimentObjectMap)) result = objectMap.setNumCols(num_ref_variables) assert result == True # map time column to model time ''' <variable type="independent" name="t"/> ''' result = objectMap.setRole(0, COPASI.CExperiment.time) assert (result==True) assert objectMap.getRole(0) == COPASI.CExperiment.time timeReference = model.getValueReference() #timeReference = model.getObject(COPASI.CCopasiObjectName("Reference=Time")) assert(timeReference!=None) assert(isinstance(timeReference,COPASI.CCopasiObject)) objectMap.setObjectCN(0,timeReference.getCN().getString()) # getObjectCN returns a string whereas getCN returns a CCopasiObjectName assert(objectMap.getObjectCN(0)==timeReference.getCN().getString()) # map rest of data columns as dependent variables ''' <variable type="dependent" name="C_cyt"/> <variable type="dependent" name="RanC_cyt"/> ''' for refIndex in range(1,num_ref_variables): # skip first columnn (time) refVar = vcellReferenceVariableList[refIndex] assert(isinstance(refVar,VCELLOPT.ReferenceVariable)) modelValue = getModelValue(model,str(refVar.varName)) assert(isinstance(modelValue,COPASI.CModelValue)) objectMap.setRole(refIndex, COPASI.CExperiment.dependent) modelValueReference = modelValue.getObject(COPASI.CCopasiObjectName("Reference=Value")) assert(isinstance(modelValueReference,COPASI.CCopasiObject)) print "modelValue CN is "+str(modelValue.getCN())+", modelValueReference CN is "+str(modelValueReference.getCN()) objectMap.setObjectCN(refIndex, modelValueReference.getCN().getString()) experimentSet.addExperiment(experiment) # addExperiment makes a copy assert experimentSet.getExperimentCount() == 1 experiment = experimentSet.getExperiment(0) # need to get the correct instance assert(isinstance(experiment,COPASI.CExperiment)) #--------------------------------------------------------------------------------------- # define CFitItems #--------------------------------------------------------------------------------------- ''' <parameterDescription> <parameter name="Kf" low="0.1" high="10.0" init="5.0" scale="5.0"/> <parameter name="Kr" low="100.0" high="10000.0" init="500.0" scale="500.0"/> </parameterDescription> ''' assert(fitProblem.getOptItemSize()==0) vcellParameterDescriptionList = vcellOptProblem.parameterDescriptionList for vcellParam in vcellParameterDescriptionList: assert(isinstance(vcellParam,VCELLOPT.ParameterDescription)) paramModelValue = getModelValue(model,str(vcellParam.name)) assert(isinstance(paramModelValue,COPASI.CModelValue)) paramModelValueRef = paramModelValue.getInitialValueReference() assert(isinstance(paramModelValueRef,COPASI.CCopasiObject)) fitItem = COPASI.CFitItem(dataModel) assert(isinstance(fitItem,COPASI.CFitItem)) fitItem.setObjectCN(paramModelValueRef.getCN()) fitItem.setStartValue(vcellParam.initialValue) fitItem.setLowerBound(COPASI.CCopasiObjectName(str(vcellParam.minValue))) fitItem.setUpperBound(COPASI.CCopasiObjectName(str(vcellParam.maxValue))) # todo: what about scale? # add the fit item to the correct parameter group optimizationItemGroup = fitProblem.getParameter("OptimizationItemList") assert(isinstance(optimizationItemGroup,COPASI.CCopasiParameterGroup)) optimizationItemGroup.addParameter(fitItem) # addParameter makes a copy of the fit item, so we have to get it back #fitItem = optimizationItemGroup.getParameter(0) #assert(isinstance(fitItem,COPASI.CFitItem)) model.compileIfNecessary() #print optimizationItemGroup.printToString() # debug anyway, not present in COPASI build 140 # -------------------------------------------------------------------------------------- # Run the optimization (N times) # -------------------------------------------------------------------------------------- leastError = 1e8 paramNames = [] paramValues = [] numObjFuncEvals = 0 numParamsToFit = fitProblem.getOptItemSize() # Create intermediate results file with open(interresults[0], 'a') as foutput: foutput.write(str(vcellOptProblem.numberOfOptimizationRuns)+"\n") foutput.close() for i in range(0, vcellOptProblem.numberOfOptimizationRuns): result = True try: print ("This can take some time...") initialize = (i==0) result = fitTask.processWithOutputFlags(initialize, COPASI.CCopasiTask.NO_OUTPUT) # NO_OUTPUT except: print "Unexpected error:", sys.exc_info()[0] return 1 if result == False: sys.stderr.write("An error occured while running the Parameter estimation.\n") # dataModel.saveModel('test_failed.cps', True) sys.stderr.write("fitTask warning: '" + str(fitTask.getProcessWarning()) + "'") sys.stderr.write("fitTask error: '" + str(fitTask.getProcessError()) + "'") # check if there are additional error messages if COPASI.CCopasiMessage.size() > 0: # print the messages in chronological order sys.stderr.write(COPASI.CCopasiMessage.getAllMessageText(True)) return 1 currentFuncValue = fitProblem.getSolutionValue() print "currFuncValue = " + str(currentFuncValue) if (currentFuncValue < leastError) or (i == 0): # current run has the smallest error so far bestObjectiveFunction = currentFuncValue numObjFuncEvals = fitProblem.getFunctionEvaluations() paramNames = [] paramValues = [] for j in range(0, numParamsToFit): optItem = fitProblem.getOptItemList()[j] paramName = optItem.getObject().getCN().getRemainder().getRemainder().getElementName(0) paramNames.append(paramName) paramValue = fitProblem.getSolutionVariables().get(j) paramValues.append(paramValue) print "param " + paramName + " --> " + str(paramValue) assert isinstance(currentFuncValue, float) leastError = currentFuncValue with open(interresults[0], 'a') as foutput: foutput.write(str(i)+" "+str(leastError)+" "+str(numObjFuncEvals)+"\n") foutput.close() #result = dataModel.saveModel('test_succeeded.cps', True) #assert(result==True) optRun = VCELLOPT.OptRun() optRun.optProblem = vcellOptProblem optRun.statusMessage = "complete" optRun.status = VCELLOPT.OptRunStatus.Complete optResultSet = VCELLOPT.OptResultSet() optResultSet.numFunctionEvaluations = numObjFuncEvals optResultSet.objectiveFunction = leastError optResultSet.optParameterValues = [] paramValueDict = dict(zip(paramNames,paramValues)) for paramName in paramNames: optParameterValue = VCELLOPT.OptParameterValue(paramName,paramValueDict[paramName]) optResultSet.optParameterValues.append(optParameterValue) optRun.optResultSet = optResultSet protocol_factory = TBinaryProtocol.TBinaryProtocolFactory optRunBlob = serialize(vcellOptProblem, protocol_factory = protocol_factory()) transportOut = TTransport.TMemoryBuffer() protocolOut = TBinaryProtocol.TBinaryProtocol(transportOut) optRun.write(protocolOut) with open(resultFile, 'wb') as foutput: foutput.write(transportOut.getvalue()) foutput.close() with open(interresults[0], 'a') as foutput: foutput.write("done") foutput.close() # writeOptSolverResultSet(resultFile, leastError, numObjFuncEvals, paramNames, paramValues) except: e_info = sys.exc_info() traceback.print_exception(e_info[0],e_info[1],e_info[2],file=sys.stdout) sys.stderr.write("exception: "+str(e_info[0])+": "+str(e_info[1])+"\n") sys.stderr.flush() if interresults[0] != None: with open(interresults[0], 'a') as foutput: foutput.write("exception: "+str(e_info[0])+": "+str(e_info[1])) foutput.close() return -1 else: return 0 finally: os.unlink(tmpExportCSVFile.name)
def decoder_boolean(self, data): return deserialize(test_ttypes.Bool(), data).data
def _func(self, cache, cache_type, TType, key): value = cache.get_value(key) if value and cache_type != self.ProcessCache and TType: value = deserialize(TType(), value) _exit = True if value != None else False return (value, _exit)
# print("parsing "+str(sys.argv[i])) # if sys.argv[i] == "-simreffile": # simreffile = sys.argv[i+1] # i = i + 1 # i = i + 1 #if (args.resourcedif): # ex.resourcedir = args.resourcedif if (simreffile): print("opening simreffile " + simreffile) f_simref = open(simreffile, "rb") blob_simref = f_simref.read() print("read " + str(len(blob_simref)) + " bytes from " + simreffile) f_simref.close() simref = SimulationDataSetRef() protocol_factory = TBinaryProtocol.TBinaryProtocolFactory # deserialize(visMesh, blob_vismesh, protocol_factory = protocol_factory()) print("starting deserialization") deserialize(simref, blob_simref, protocol_factory=protocol_factory()) print("done with deserialization") ex.initialTimer = QtCore.QTimer(ex) ex.initialTimer.setSingleShot(True) def load(): ex.loadSim(simref) ex.initialTimer.singleShot(50, load) ex.show()
def deserialize_msg(msg): kafka_message = KafkaMessage() deserialize(kafka_message, msg.value) return kafka_message
def decoder_simple_list(self, data): return deserialize( test_ttypes.SimpleList(), data, ).ints
def decoder_bytes(self, data): return deserialize(test_ttypes.Bin(), data).data
def fromBytes(self, bytes): deserialize(self.message, bytes)
def decoder_integer(self, data): return deserialize(test_ttypes.Int(), data).data
def decoder_float(self, data): return deserialize(test_ttypes.Float(), data).data
def loads(self, data, **kw): return deserialize(kw.get('thrift_base_type'), data)
def decoder_struct_map(self, data): return deserialize( struct_map_ttypes.StructMap(), data, ).__dict__
ws.send(packer) retData = ws.recv() print retData # 发送第一条消息 b = Action() b.userId= 12321 b.gameId= 11111 b.clientId="H5_2.0_weixin.weixin.0-hall20418.weixin.dasfs" bodyBin = serialize(b) bodyDict={ "cmd": "helloUT", "subproto": [ "param/thrift/user.Action" ], "param": bodyBin } body = msgpack.packb(bodyDict, use_bin_type=True) ws.send(body) print time.strftime('%Y-%m-%d %H:%M:%S') body = ws.recv() body=body[4:] msgDict = msgpack.unpackb(body) print msgDict binStr=msgDict.get("result") protoObj = Action() x = deserialize(protoObj, binStr) print x print x.userId, x.gameId, x.clientId, x.resCmd ws.close()
def decoder_struct_10(self, data): return deserialize( test_ttypes.Struct10(), data, ).__dict__
def decoder_points_list(self, data): return deserialize( test_ttypes.PointsList(), data, ).points
def main(): #sys.setrecursionlimit(100000) try: parser = argparse.ArgumentParser() list_of_meshtypes = [ "chombovolume", "chombomembrane", "finitevolume", "movingboundary", "comsolvolume" ] parser.add_argument( "meshtype", help="type of visMesh processing required and index file generated", choices=list_of_meshtypes) parser.add_argument("domainname", help="domain name for output mesh") parser.add_argument( "vismeshfile", help= "filename of input visMesh to be processed (thrift serialization via TBinaryProtocol)" ) parser.add_argument( "vtkfile", help="filename of output vtk mesh (VTK XML unstructured grid") parser.add_argument( "indexfile", help= "filename of output ChomboIndexData or FiniteVolumeIndexData (thrift serialization via TBinaryProtocol)" ) args = parser.parse_args() f_vismesh = open(args.vismeshfile, "rb") blob_vismesh = f_vismesh.read() print("read " + str(len(blob_vismesh)) + " bytes from " + args.vismeshfile) f_vismesh.close() visMesh = VisMesh() protocol_factory = TBinaryProtocol.TBinaryProtocolFactory # deserialize(visMesh, blob_vismesh, protocol_factory = protocol_factory()) print("starting deserialization") deserialize(visMesh, blob_vismesh, protocol_factory=protocol_factory()) print("done with deserialization") if args.meshtype == "chombovolume": writeChomboVolumeVtkGridAndIndexData(visMesh, args.domainname, args.vtkfile, args.indexfile) elif args.meshtype == "chombomembrane": writeChomboMembraneVtkGridAndIndexData(visMesh, args.domainname, args.vtkfile, args.indexfile) elif args.meshtype == "finitevolume": writeFiniteVolumeSmoothedVtkGridAndIndexData( visMesh, args.domainname, args.vtkfile, args.indexfile) elif args.meshtype == "movingboundary": writeMovingBoundaryVolumeVtkGridAndIndexData( visMesh, args.domainname, args.vtkfile, args.indexfile) elif args.meshtype == "comsolvolume": writeComsolVolumeVtkGridAndIndexData(visMesh, args.domainname, args.vtkfile, args.indexfile) else: raise Exception("meshtype " + str(args.meshtype) + " not supported") except: e_info = sys.exc_info() traceback.print_exception(e_info[0], e_info[1], e_info[2], file=sys.stdout) sys.stderr.write("exception: " + str(e_info[0]) + ": " + str(e_info[1]) + "\n") sys.stderr.flush() sys.exit(-1) else: sys.exit(0)
def decoder_array(self, data): return deserialize(test_ttypes.Array(), data).data
elif len(sys.argv) == 2: input_data = file(sys.argv[1]) else: print >> sys.stderr, "Usage: %s [file]" % (sys.argv[0], ) sys.exit(1) for line in input_data: space_separated = line.split(" ") if len(space_separated) == 3: ts = int(space_separated[0]) print datetime.datetime.fromtimestamp( ts / 1000.0).isoformat(), space_separated[1], base64_encoded = space_separated[2] elif len(space_separated) == 1: base64_encoded = space_separated[0] else: raise Exception("Unexpected line: " + line) possibly_compressed = base64.b64decode(base64_encoded) # Handle both compressed and uncompressed Thrift profiles try: thrift = zlib.decompress(possibly_compressed) except zlib.error: thrift = possibly_compressed tree = TRuntimeProfileTree() deserialize(tree, thrift, protocol_factory=TCompactProtocol.TCompactProtocolFactory()) tree.validate() print tree
def decoder_map(self, data): return deserialize(test_ttypes.Map(), data).data