def check_recording(self, values): #print "check recording on %s" % self.name if not os.path.exists(self.sensor_data_dir): os.makedirs(self.sensor_data_dir) if self.listener.get_start_recording_request() and not self.recording:#check whether it is already recouding self.listener.reset_start_recording_request() self.recording = True print 'start recording.......' self.next_store_dir = next_file_name(prefix='store', suffix='', path=self.sensor_data_dir) path,self.new_store_name = os.path.split(self.next_store_dir) if not os.path.exists(self.next_store_dir): os.makedirs(self.next_store_dir) self.target = open(self.next_store_dir + '/out.csv', 'w') if self.listener.get_stop_recording_request(): self.listener.reset_stop_recording_request() self.recording = False self.target.close() self.announce_new_record() #self.send_payload(payload=';'.join(payload)) if self.recording: print "recording...." print(self.sensors_dir + self.name) self.target.write(','.join(values) + '\n')
def check_recording(self, values): #print "check recording on %s" % self.name if not os.path.exists(self.sensor_data_dir): os.makedirs(self.sensor_data_dir) if self.listener.get_start_recording_request( ) and not self.recording: #check whether it is already recouding self.listener.reset_start_recording_request() self.recording = True print 'start recording.......' self.next_store_dir = next_file_name(prefix='store', suffix='', path=self.sensor_data_dir) path, self.new_store_name = os.path.split(self.next_store_dir) if not os.path.exists(self.next_store_dir): os.makedirs(self.next_store_dir) self.target = open(self.next_store_dir + '/out.csv', 'w') if self.listener.get_stop_recording_request(): self.listener.reset_stop_recording_request() self.recording = False self.target.close() self.announce_new_record() #self.send_payload(payload=';'.join(payload)) if self.recording: print "recording...." print(self.sensors_dir + self.name) self.target.write(','.join(values) + '\n')
def run_swarm_ng(working_dir, sensors_dir, sensor, store, swarmconfig): store_dir = sensors_dir + sensor + '/stores/' + store + '/' swarms_dir = store_dir + '/swarms/' swarm_dir = next_file_name(prefix='swarm', suffix='', path=swarms_dir) path, swarm = os.path.split(swarm_dir) swarm_dir = swarm_dir + '/' egg_file = swarm_dir + "eggs.csv" store_file = store_dir + 'out.csv' swarmconfig['streamDef']['streams'][0]['source'] = 'file://' + egg_file print swarmconfig['streamDef']['streams'][0]['source'] if not os.path.exists(swarms_dir): os.makedirs(swarms_dir) if not os.path.exists(swarm_dir): os.makedirs(swarm_dir) fieldNames = map(lambda x: x['fieldName'], swarmconfig['includedFields']) fieldTypes = map(lambda x: x['fieldType'], swarmconfig['includedFields']) prepare_swarm(field_names=fieldNames, field_types=fieldTypes, egg_file=egg_file, store_file=store_file) #swarm_config = {} # complete swarm config here #nupic_config = permutations_runner.runWithJsonFile(os.getcwd() + "/nupic/search_def.json",{'maxWorkers': 6, 'overwrite': True}, "test", os.getcwd() + "/working") nupic_config = permutations_runner.runWithConfig(swarmconfig, { 'maxWorkers': 6, 'overwrite': True }, outDir=swarm_dir, outputLabel='', permWorkDir=swarm_dir, verbosity=1) #dump the model params generated by this swarm in 2 different file formats pickle_file = open(swarm_dir + 'model_params.pkl', 'w') pickle.dump(nupic_config, pickle_file, pickle.HIGHEST_PROTOCOL) pickle_file.close() json_file = open(swarm_dir + 'model_params.json', 'w') json.dump(nupic_config, json_file) json_file.close() #dump the swarmconfig used for this swarm in 2 different file formats pickle_file = open(swarm_dir + 'swarm_config.pkl', 'w') pickle.dump(swarmconfig, pickle_file, pickle.HIGHEST_PROTOCOL) pickle_file.close() json_file = open(swarm_dir + 'swarm_config.json', 'w') json.dump(swarmconfig, json_file) json_file.close() return swarm
def run_swarm_ng(working_dir,sensors_dir, sensor, store, swarmconfig): store_dir = sensors_dir + sensor + '/stores/' + store +'/' swarms_dir = store_dir + '/swarms/' swarm_dir = next_file_name(prefix='swarm',suffix='',path=swarms_dir) path,swarm = os.path.split(swarm_dir) swarm_dir = swarm_dir + '/' egg_file = swarm_dir + "eggs.csv" store_file = store_dir + 'out.csv' swarmconfig['streamDef']['streams'][0]['source'] = 'file://' + egg_file print swarmconfig['streamDef']['streams'][0]['source'] if not os.path.exists(swarms_dir): os.makedirs(swarms_dir) if not os.path.exists(swarm_dir): os.makedirs(swarm_dir) fieldNames = map(lambda x: x['fieldName'],swarmconfig['includedFields']) fieldTypes = map(lambda x: x['fieldType'],swarmconfig['includedFields']) prepare_swarm(field_names=fieldNames, field_types= fieldTypes, egg_file=egg_file, store_file = store_file) #swarm_config = {} # complete swarm config here #nupic_config = permutations_runner.runWithJsonFile(os.getcwd() + "/nupic/search_def.json",{'maxWorkers': 6, 'overwrite': True}, "test", os.getcwd() + "/working") nupic_config = permutations_runner.runWithConfig(swarmconfig, {'maxWorkers': 6, 'overwrite': True }, outDir= swarm_dir, outputLabel='', permWorkDir= swarm_dir, verbosity=1) #dump the model params generated by this swarm in 2 different file formats pickle_file = open(swarm_dir + 'model_params.pkl', 'w') pickle.dump(nupic_config,pickle_file,pickle.HIGHEST_PROTOCOL) pickle_file.close() json_file = open(swarm_dir +'model_params.json', 'w') json.dump(nupic_config, json_file) json_file.close() #dump the swarmconfig used for this swarm in 2 different file formats pickle_file = open(swarm_dir + 'swarm_config.pkl', 'w') pickle.dump(swarmconfig,pickle_file,pickle.HIGHEST_PROTOCOL) pickle_file.close() json_file = open(swarm_dir+ 'swarm_config.json', 'w') json.dump(swarmconfig, json_file) json_file.close() return swarm