def test_Transport_MCNP(): """ Test the default object creation for MCNP input. """ # Read user input inputs = UserInputs(coeusInputPath=INPUTFNAME) inputs.read_inputs() # Read transport input (MCNP Example) trans = Transport(PATH + inputs.transInput) assert_equal(trans.transPath, PATH + "test_mcnp.inp") assert_equal(trans.code, "mcnp6") assert_equal( trans.sampVars, { 'h1': ' 1:5> decimal', 'h2': ' 1:5> decimal', 'h3': ' 1:5> decimal', 'h4': ' 1:5> decimal', 'h5': ' 1:5> decimal', 'h6': ' 1:5> decimal', 'd1': ' 1:3> integer', 'r1': ' 0.5:3> decimal', 'mat1': ' 1,2,3,4,5,6,7,8,9,10,11,12> material' }) assert_equal( trans.corrVars, { 'dens1': ' -2.7,-7.8,-6.5,-8.9,-7.3,-2.7,-16.6,-19.3,-18.7,-11.3,-1.16500e-09,101> mat1', 'nu1': ' 1,2,3,4,5,6,7,8,9,10,11,12> mat1' }) assert_equal( trans.transInput[0:78], "C ****************************************************************************" ) assert_equal(trans.transInput[-12:], "2.010000e+01")
def __init__(self, c_instance): self.__c_instance = c_instance self.__components = [] self.__main_display = MainDisplay(self) self.__components.append(self.__main_display) self.__main_display_controller = MainDisplayController(self, self.__main_display) self.__components.append(self.__main_display_controller) self.__time_display = TimeDisplay(self) self.__components.append(self.__time_display) self.__software_controller = SoftwareController(self) self.__components.append(self.__software_controller) self.__transport = Transport(self) self.__components.append(self.__transport) self.__channel_strips = [ ChannelStrip(self, i) for i in range(NUM_CHANNEL_STRIPS) ] for s in self.__channel_strips: self.__components.append(s) self.__master_strip = MasterChannelStrip(self) self.__components.append(self.__master_strip) self.__channel_strip_controller = ChannelStripController(self, self.__channel_strips, self.__master_strip, self.__main_display_controller) self.__components.append(self.__channel_strip_controller) self.__shift_is_pressed = False self.__option_is_pressed = False self.__ctrl_is_pressed = False self.__alt_is_pressed = False self.is_pro_version = False self._received_firmware_version = False self._refresh_state_next_time = 0
def __init__(self, c_instance): self._NanoKontrol__c_instance = c_instance self._NanoKontrol__current_track = self.song().view.selected_track self._NanoKontrol__current_device = self._NanoKontrol__current_track.view.selected_device self.song().add_tracks_listener(self._NanoKontrol__tracks_changed) self._NanoKontrol__transport_unit = Transport(self) self._NanoKontrol__encoder_unit = Encoders(self, True) self._NanoKontrol__slider_unit = SliderSection(self) self._NanoKontrol__pad_unit = Pads(self)
def __init__(self, id = 0, name = "test face", ip_address = None, node = None): """ id - this is the id of the face in the node name - this is the name of the face transport - this is the networking interface of this face """ self.__id = id self.__name = name self.__node = node self.__transport = Transport(ip_address = ip_address, face = self)
def connect(self): # 创建连接 self.robot = Transport(HOST, PORT) # 设置机器人信息反馈回调函数 self.robot.subscribe_speed(print_speed) self.robot.subscribe_pose(print_pose) # 启动连接 self.robot.connect() keep = threading.Thread(target=self.keep_connection, name='connection') keep.setDaemon(True) keep.start()
def handle(self): # handle function must be overrided addr = self.client_address client = self.request log.i('%s connected' % str(addr)) transport = Transport(addr, client) transport.setDaemon(True) transport.start() while True: if not transport.check_status(): transport.shutdown() client.close() log.w('%s disconnected' % str(addr)) break sleep(self.__CHECK_CONNECTION_INTERVAL)
def __init__(self, transport=None): """ """ self.alive = True self.transport = transport # Default message flags byte, used if None is specified in method calls self.flags = 0 if transport == None: # use default transport logging.warn("MMP is using default transport") self.transport = Transport() # keep track of how many errors have occured self.errors_rx = 0 self.errors_timeout = 0 self.errors_log = 0 # keep track of how many messages and log strings have been received self.num_log = 0 self.num_msg = 0 # init reader thread self.reader = threading.Thread(target=self.readerThread) self.reader.start()
def __init__(self): if len(sys.argv) < 2: self.host = input("Enter host ip address: ") self.s = Transport() else: self.host = sys.argv[1]
"""Tests transport framework""" from REST import REST from Transport import Transport TRANSPORT = Transport(debug=True) # HTTP Code tests def test_defined_response(): """Test HTTP expected response""" assert TRANSPORT.check_http_response(200, 200) is True def test_bad_response(): """Tests unexpected response is returns as False""" assert TRANSPORT.check_http_response(999) is False def test_good_http_responses(): """Checks good responses are True""" demo_api = REST(url='postman-echo.com') for code in (200, 201, 204): assert TRANSPORT.check_http_response( demo_api.get("/status/%i" % code).status_code) is True def test_bad_http_responses(): """Checks good responses are False""" demo_api = REST(url='postman-echo.com') for code in (400, 401, 403, 404, 422): assert TRANSPORT.check_http_response(
#coding=utf-8 ''' demo ''' from __future__ import print_function from Transport import Transport from Config import HOST, PORT def print_speed(speed): print(u'[机器人速度] x:', speed[0], ' y:', speed[1], ' w:', speed[2]) def print_pose(pose): print(u'[机器人位置] x:', pose[0], ' y:', pose[1], ' yaw:', pose[2]) if __name__ == "__main__": # 创建连接 robot = Transport(HOST, PORT) # 设置机器人信息反馈回调函数 robot.subscribe_speed(print_speed) robot.subscribe_pose(print_pose) # 启动连接 robot.connect() # 保持连接 robot.keep_alive(5)
def mostcalcs(self): ''' Get total latent and/or sensible and/or wastewater heat fluxes for domestic & industrial Assuming spatial units are already consistent and in shapefiles of identical projection :param qfConfig: :param qfParams: :param qfDataSources: :param disaggregated: :param outputFolder: :param logFolder: :return: ''' # Get partitioning and heat of combustion values partitions = Partitions(self.config, self.parameters) props = partitions.fluxProp # Proportion of possible fluxes (latent, wastewater, sensible based on what user selected) to include in results startDates = self.config.dt_start endDates = self.config.dt_end # Set up UTC time bins @ 30 min intervals for i in range(0, len(startDates), 1): bins = pd.date_range(pd.datetime.strptime( startDates[i].strftime('%Y-%m-%d %H:%M'), '%Y-%m-%d %H:%M') + timedelta(seconds=1800), pd.datetime.strptime( endDates[i].strftime('%Y-%m-%d %H:%M'), '%Y-%m-%d %H:%M'), tz='UTC', freq='30Min') if i == 0: timeBins = bins else: timeBins = timeBins.append(bins) # Make some aliases for the output layer for brevity outShp = self.ds.outputAreas_spat['shapefile'] outFeatIds = self.ds.outputAreas_spat['featureIds'] outEpsg = self.ds.outputAreas_spat['epsgCode'] # Populate temporal disaggregation objects # Building energy daily loadings (temporal disaggregation from Annual to daily) dailyE = DailyEnergyLoading( self.parameters.city, useUKholidays=self.parameters.useUKholidays) # Each component has 1..n data sources. Add each one, looping over them [dailyE.addLoadings(den['profileFile']) for den in self.ds.dailyEnergy] # Building energy diurnal cycles (temporal disaggregation from daily to half-hourly). These are provided in local time (London) diurnalE = EnergyProfiles( self.parameters.city, use_uk_holidays=self.parameters.useUKholidays, customHolidays=self.parameters.customHolidays) [diurnalE.addDomElec(de['profileFile']) for de in self.ds.diurnDomElec] [diurnalE.addDomGas(dg['profileFile']) for dg in self.ds.diurnDomGas] [diurnalE.addEconomy7(e7['profileFile']) for e7 in self.ds.diurnEco7] [diurnalE.addIndElec(ie['profileFile']) for ie in self.ds.diurnIndElec] [diurnalE.addIndGas(ig['profileFile']) for ig in self.ds.diurnIndGas] # Diurnal traffic patterns diurnalT = TransportProfiles( self.parameters.city, use_uk_holidays=self.parameters.useUKholidays, customHolidays=self.parameters.customHolidays) [ diurnalT.addProfiles(tr['profileFile']) for tr in self.ds.diurnalTraffic ] # Workday metabolism profile hap = HumanActivityProfiles( self.parameters.city, use_uk_holidays=self.parameters.useUKholidays, customHolidays=self.parameters.customHolidays) [hap.addProfiles(ha['profileFile']) for ha in self.ds.diurnMetab] pop = Population() pop.setOutputShapefile(outShp, outEpsg, outFeatIds) [ pop.injectResPop(rp['file'], makeUTC(rp['startDate']), rp['attribute'], rp['EPSG']) for rp in self.processedDataList['resPop'] ] [ pop.injectWorkPop(wp['file'], makeUTC(wp['startDate']), wp['attribute'], wp['EPSG']) for wp in self.processedDataList['workPop'] ] bldgEnergy = EnergyUseData() bldgEnergy.setOutputShapefile(outShp, outEpsg, outFeatIds) [ bldgEnergy.injectDomesticElec(rp['file'], makeUTC(rp['startDate']), rp['attribute'], rp['EPSG']) for rp in self.processedDataList['domElec'] ] [ bldgEnergy.injectDomesticGas(rp['file'], makeUTC(rp['startDate']), rp['attribute'], rp['EPSG']) for rp in self.processedDataList['domGas'] ] [ bldgEnergy.injectEconomy7Elec(rp['file'], makeUTC(rp['startDate']), rp['attribute'], rp['EPSG']) for rp in self.processedDataList['domEco7'] ] [ bldgEnergy.injectIndustrialElec(rp['file'], makeUTC(rp['startDate']), rp['attribute'], rp['EPSG']) for rp in self.processedDataList['indElec'] ] [ bldgEnergy.injectIndustrialGas(rp['file'], makeUTC(rp['startDate']), rp['attribute'], rp['EPSG']) for rp in self.processedDataList['indGas'] ] fc = FuelConsumption(self.ds.fuelConsumption[0]['profileFile']) trans = Transport(fc, self.parameters) trans.setOutputShapefile(outShp, outEpsg, outFeatIds) [ trans.injectFuelConsumption(rp['file'], makeUTC(rp['startDate']), rp['EPSG']) for rp in self.processedDataList['transport'] ] ds = None # Get daily factors df = DailyFact(self.parameters.useUKholidays) # Use UK holidays # Get area of each output feature, along with its identifier areas = (bldgEnergy.domGas.getOutputFeatureAreas()) for tb in timeBins: WH = QF(areas.index.tolist(), tb.to_pydatetime(), 1800, bldgEnergy, diurnalE, dailyE, pop, trans, diurnalT, hap, df, props, self.parameters.heatOfCombustion) # Write out the full time step to a file WH.to_csv(os.path.join(self.modelOutputPath, tb.strftime(self.dateStructure)), index_label='featureId') # Write log files to disk for traceability bldgEnergy.logger.writeFile( os.path.join(self.logPath, 'EnergyUseSpatial.txt')) pop.logger.writeFile( os.path.join(self.logPath, 'PopulationSpatial.txt')) trans.logger.writeFile( os.path.join(self.logPath, 'transportSpatial.txt')) hap.logger.writeFile( os.path.join(self.logPath, 'humanActivityProfiles.txt')) diurnalT.logger.writeFile( os.path.join(self.logPath, 'diurnalTransport.txt')) dailyE.logger.writeFile(os.path.join(self.logPath, 'dailyEnergy.txt'))
def create_instance(c_instance): return Transport(c_instance)
def disaggregate(qfDataSources, qfParams, outputFolder): ''' Function that performs all spatial disaggregation of GreaterQF inputs, and writes to output files. Returns dict of information that amounts to a new data sources file :param qfDataSources: GreaterQF Data sources object (contains locations and metadata regarding sharefiles) :param qfParams: GreaterQF config object (contains assumptions where these are needed) :param outputFolder: string path to folder in which to store disaggregated shapefiles :return: dict of {dataName:{shapefileName, startDate, field(s)ToUse} ''' returnDict = {} outShp = qfDataSources.outputAreas_spat['shapefile'] outFeatIds = qfDataSources.outputAreas_spat['featureIds'] outEpsg = qfDataSources.outputAreas_spat['epsgCode'] pop = Population() pop.setOutputShapefile(outShp, outEpsg, outFeatIds) # Population data (may be the same as that used for disaggregation, but still needs specifying explicitly) # These get used to disaggregate energy, so population must be completely populated first # Raw residential population data: disaggregate and save returnDict['resPop'] = [] returnDict['workPop'] = [] returnDict['indElec'] = [] returnDict['indGas'] = [] returnDict['domElec'] = [] returnDict['domGas'] = [] returnDict['domEco7'] = [] returnDict['transport'] = [] for rp in qfDataSources.resPop_spat: pop.setResPop(rp['shapefile'], startTime=makeUTC(rp['startDate']), attributeToUse=rp['attribToUse'], inputFieldId=rp['featureIds'], weight_by=None, epsgCode=rp['epsgCode']) outFile = os.path.join( outputFolder, 'resPop_starting' + rp['startDate'].strftime('%Y-%m-%d') + '.shp') (ds, attrib) = pop.getResPopLayer(makeUTC(rp['startDate'])) saveLayerToFile(ds, outFile, pop.getOutputLayer().crs(), 'Res pop scaled') returnDict['resPop'].append({ 'file': outFile, 'EPSG': rp['epsgCode'], 'startDate': rp['startDate'], 'attribute': attrib, 'featureIds': outFeatIds }) # Raw residential population data: disaggregate and save for wp in qfDataSources.workPop_spat: pop.setWorkPop(wp['shapefile'], startTime=makeUTC(wp['startDate']), attributeToUse=wp['attribToUse'], inputFieldId=wp['featureIds'], weight_by=None, epsgCode=wp['epsgCode']) outFile = os.path.join( outputFolder, 'WorkPop_starting' + wp['startDate'].strftime('%Y-%m-%d') + '.shp') (ds, attrib) = pop.getWorkPopLayer(makeUTC(wp['startDate'])) saveLayerToFile(ds, outFile, pop.getOutputLayer().crs(), 'Work pop scaled') returnDict['workPop'].append({ 'file': outFile, 'EPSG': wp['epsgCode'], 'startDate': wp['startDate'], 'attribute': attrib, 'featureIds': outFeatIds }) # Set up building energy use data: total energy use for each area. bldgEnergy = EnergyUseData() # Industrial electricity, downscaled by workplace population for ie in qfDataSources.indElec_spat: (workpop, workpopattrib) = pop.getWorkPopLayer( makeUTC(ie['startDate']) ) # Note: Population data is used for the output features. This means that population changing over time has to be incorporated bldgEnergy.setOutputShapefile( workpop, workpop.dataProvider().crs().authid().split(':')[1], outFeatIds) bldgEnergy.setIndustrialElec(ie['shapefile'], startTime=makeUTC(ie['startDate']), attributeToUse=ie['attribToUse'], inputFieldId=ie['featureIds'], weight_by=workpopattrib, epsgCode=ie['epsgCode']) (ds, attrib) = bldgEnergy.getIndustrialElecLayer(makeUTC(ie['startDate'])) outFile = os.path.join( outputFolder, 'IndElec_starting' + ie['startDate'].strftime('%Y-%m-%d') + '.shp') saveLayerToFile(ds, outFile, bldgEnergy.getOutputLayer().crs(), 'ind elec gas downscaled') returnDict['indElec'].append({ 'file': outFile, 'EPSG': ie['epsgCode'], 'startDate': ie['startDate'], 'attribute': attrib, 'featureIds': outFeatIds }) # Industrial gas for ig in qfDataSources.indGas_spat: (output, workpopattrib) = pop.getWorkPopLayer(makeUTC( ig['startDate'])) # Disaggregate by workplace pop bldgEnergy.setOutputShapefile( output, output.dataProvider().crs().authid().split(':')[1], outFeatIds) bldgEnergy.setIndustrialGas(ig['shapefile'], startTime=makeUTC(ig['startDate']), attributeToUse=ig['attribToUse'], inputFieldId=ig['featureIds'], weight_by=workpopattrib, epsgCode=ig['epsgCode']) outFile = os.path.join( outputFolder, 'IndGas_starting' + ig['startDate'].strftime('%Y-%m-%d') + '.shp') (ds, attrib) = bldgEnergy.getIndustrialGasLayer(makeUTC(ie['startDate'])) saveLayerToFile(ds, outFile, bldgEnergy.getOutputLayer().crs(), 'ind gas downscaled') returnDict['indGas'].append({ 'file': outFile, 'EPSG': ig['epsgCode'], 'startDate': ig['startDate'], 'attribute': attrib, 'featureIds': outFeatIds }) # Domestic gas for dg in qfDataSources.domGas_spat: (output, respopattrib) = pop.getResPopLayer(makeUTC( dg['startDate'])) # Disaggregate by residential pop bldgEnergy.setOutputShapefile( output, output.dataProvider().crs().authid().split(':')[1], outFeatIds) bldgEnergy.setDomesticGas(dg['shapefile'], startTime=makeUTC(dg['startDate']), attributeToUse=dg['attribToUse'], inputFieldId=dg['featureIds'], weight_by=respopattrib, epsgCode=dg['epsgCode']) outFile = os.path.join( outputFolder, 'DomGas_starting' + dg['startDate'].strftime('%Y-%m-%d') + '.shp') (ds, attrib) = bldgEnergy.getDomesticGasLayer(makeUTC(ie['startDate'])) saveLayerToFile(ds, outFile, bldgEnergy.getOutputLayer().crs(), 'dom gas downscaled') returnDict['domGas'].append({ 'file': outFile, 'EPSG': dg['epsgCode'], 'startDate': dg['startDate'], 'attribute': attrib, 'featureIds': outFeatIds }) # Domestic elec for de in qfDataSources.domElec_spat: (output, respopattrib) = pop.getResPopLayer(makeUTC( dg['startDate'])) # Disaggregate by residential pop if type(respopattrib) is list: respopattrib = respopattrib[0] bldgEnergy.setOutputShapefile( output, output.dataProvider().crs().authid().split(':')[1], outFeatIds) bldgEnergy.setDomesticElec(de['shapefile'], startTime=makeUTC(de['startDate']), attributeToUse=de['attribToUse'], inputFieldId=de['featureIds'], weight_by=respopattrib, epsgCode=de['epsgCode']) outFile = os.path.join( outputFolder, 'DomElec_starting' + de['startDate'].strftime('%Y-%m-%d') + '.shp') (ds, attrib) = bldgEnergy.getDomesticElecLayer(makeUTC(ie['startDate'])) saveLayerToFile(ds, outFile, bldgEnergy.getOutputLayer().crs(), 'dom elec downscaled') returnDict['domElec'].append({ 'file': outFile, 'EPSG': de['epsgCode'], 'startDate': de['startDate'], 'attribute': attrib, 'featureIds': outFeatIds }) # Domestic elec economy 7 for e7 in qfDataSources.eco7_spat: (output, respopattrib) = pop.getResPopLayer(makeUTC( e7['startDate'])) # Disaggregate by residential pop if type(respopattrib) is list: respopattrib = respopattrib[0] bldgEnergy.setOutputShapefile( output, output.dataProvider().crs().authid().split(':')[1], outFeatIds) bldgEnergy.setEconomy7Elec(e7['shapefile'], startTime=makeUTC(e7['startDate']), attributeToUse=e7['attribToUse'], inputFieldId=e7['featureIds'], weight_by=respopattrib, epsgCode=e7['epsgCode']) outFile = os.path.join( outputFolder, 'Eco7_starting' + e7['startDate'].strftime('%Y-%m-%d') + '.shp') (ds, attrib) = bldgEnergy.getEconomy7ElecLayer(makeUTC(ie['startDate'])) saveLayerToFile(ds, outFile, bldgEnergy.getOutputLayer().crs(), 'Economy 7 downscaled') returnDict['domEco7'].append({ 'file': outFile, 'EPSG': e7['epsgCode'], 'startDate': e7['startDate'], 'attribute': attrib, 'featureIds': outFeatIds }) # Set up transport fuel consumption in each output area fc = FuelConsumption(qfDataSources.fuelConsumption[0]['profileFile']) t = Transport(fc, qfParams) t.setOutputShapefile(outShp, outEpsg, outFeatIds) for tr in qfDataSources.transport_spat: sf = t.addTransportData(shapefile=tr['shapefile'], startTime=makeUTC(tr['startDate']), epsgCode=tr['epsgCode'], roadTypeField=tr['class_field'], roadTypeNames=tr['road_types'], speedConversionFactor=tr['speed_multiplier'], inputIdField=tr['featureIds'], totalAADTField=tr['AADT_total'], vAADTFields=tr['AADT_fields'], speedDataField=tr['speed_field']) outFile = os.path.join( outputFolder, 'DailyFuelUse_starting' + tr['startDate'].strftime('%Y-%m-%d') + '.shp') saveLayerToFile(sf, outFile, bldgEnergy.getOutputLayer().crs(), 'Fuel use daily') returnDict['transport'].append({ 'file': outFile, 'EPSG': tr['epsgCode'], 'startDate': tr['startDate'], 'featureIds': outFeatIds }) # Pickle the dictionary as a manifest file with open(os.path.join(outputFolder, 'MANIFEST'), 'wb') as outpickle: pickle.dump(returnDict, outpickle) # Return the output folder containing all this stuff return outputFolder