def add_subject_permanent_trial_record_store(self, subject_id): if not os.path.exists( os.path.join(get_base_path(), 'BCoreData', 'SubjectData', 'SessionRecords', subject_id)): os.mkdir( os.path.join(get_base_path(), 'BCoreData', 'SubjectData', 'SessionRecords', subject_id))
def save_server(self): srcDir = os.path.join(get_base_path(), 'BCoreData', 'ServerData') desDir = os.path.join(get_base_path(), 'BCoreData', 'ServerData', 'backupDBs') if not os.path.isdir(self.server_data_path): # assume that these are never made alone... self._setup_paths() if os.path.isfile(os.path.join(srcDir, 'db.BServer')): # old db exists print(('Old db.Bserver found. moving to backup')) old = BServerLocal() # standardLoad to old desName = 'db_' + get_time_stamp(old.creation_time) + '.BServer' shutil.copyfile( os.path.join(srcDir, 'db.BServer'), # source os.path.join(desDir, desName) # destination ) print( "BSERVER:BSERVERLOCAL:SAVE_SERVER:Moved to backup... deleting old copy" ) os.remove(os.path.join(srcDir, 'db.BServer')) # there might be some attributes that need to be deleted # delete them here before continuing print("BSERVER:BSERVERLOCAL:SAVE_SERVER:Cleaning and pickling object") cleanedBServer = copy.deepcopy(self) cleanedBServer.StationConnections = {} with open(os.path.join(srcDir, 'db.BServer'), 'wb') as f: pickle.dump(cleanedBServer, f)
def load_backup(self): """ Use this only if you specifically require the deletion of current db.BServer and replacement with an older backup. Only the latest back up is used. """ desDir = os.path.join(get_base_path(), 'BCoreData', 'ServerData') srcDir = os.path.join(get_base_path(), 'BCoreData', 'ServerData', 'backupDBs') # delete the original database os.remove(os.path.join(desDir, 'db.BServer')) # find the latest file in the backupDBs newestBkup = max(os.listdir(srcDir), key=os.path.getctime) shutil.copyfile( os.path.join(srcDir, newestBkup), # source os.path.join(desDir, 'db.BServer') # destination ) # delete the newest backup os.remove(os.path.join(srcDir, newestBkup))
def __init__(self): self.server_id = 0 self.server_data_path = os.path.join(get_base_path(), 'BCoreData', 'ServerData') self.server_ip = 'http://localhost' self.creation_time = time.time() self.stations = [] self.subjects = [] self.assignments = {} print( "BSERVER:BSERVERLOCAL:__INIT__:Initialized new BServerLocal object" )
def load_from_dict(self, data): self.station_version = Ver(data['station_version']) self.creation_time = datetime.datetime.strptime( data['creation_time'], DATETIME_TO_STR) self.station_id = data['station_id'] self.station_name = data['station_name'] self.station_path = os.path.join(get_base_path(), 'BCoreData', 'StationData', str(self.station_id)) self.station_location = data['station_location'] self.mac_address = get_mac_address() self.ip_address = get_ip_addr() self.port = get_port() return self
def load_server(): # use standard location for path, # make sure to never modify server here: dbLoc = os.path.join(get_base_path(), 'BCoreData', 'ServerData', 'db.BServer') if os.path.isfile(dbLoc): with open(dbLoc, 'rb') as f: server = json.load(f) print('BServer loaded') else: raise RuntimeError( 'db.Server not found. Ensure it exists before calling loadServer' ) return server
def load_server(path=None): # if path not provided, use standard location for path, # make sure to never modify server here: if not path: dbLoc = os.path.join(get_base_path(), 'BCoreData', 'ServerData', 'db.BServer') else: dbLoc = path if os.path.isfile(dbLoc): with open(dbLoc, 'rb') as f: server = pickle.load(f) print("BSERVER:BSERVERLOCAL:LOAD_SERVER:Loading server") else: raise RuntimeError('db.Server not found. Ensure it exists before \ calling loadServer') return server
def _setup_paths(server): # create 'BServerData' os.mkdir(os.path.join(get_base_path(), 'BCoreData')) # create 'ServerData','Stations','PermanentTrialRecordStore' in # BServerData os.mkdir(os.path.join(get_base_path(), 'BCoreData', 'ServerData')) os.mkdir(os.path.join(get_base_path(), 'BCoreData', 'StationData')) os.mkdir(os.path.join(get_base_path(), 'BCoreData', 'SubjectData')) os.mkdir(os.path.join(get_base_path(), 'BCoreData', 'ChangeParams')) # create 'replacedDBs' in 'ServerData' os.mkdir( os.path.join(get_base_path(), 'BCoreData', 'ServerData', 'backupDBs')) # create 'Full' and 'Compiled' in 'SubjectData' os.mkdir( os.path.join(get_base_path(), 'BCoreData', 'SubjectData', 'SessionRecords')) os.mkdir( os.path.join(get_base_path(), 'BCoreData', 'SubjectData', 'CompiledTrialRecords'))
def get_standard_server_path(): return os.path.join(get_base_path(), 'BCoreData', 'ServerData', 'dB.BServer')
def create_base_compiled_record_file(self, subject_id): compiled_folder_path = os.path.join(get_base_path(), 'BCoreData', 'SubjectData', 'CompiledTrialRecords') compiled_file_for_subject = [ f for f in os.listdir(compiled_folder_path) if subject_id in f ] if not compiled_file_for_subject: cR = {} # Available in Station.do_trials() cR["session_number"] = [] cR["session_number"].append(0) cR["trial_number"] = [] cR["trial_number"].append(0) cR["station_id"] = [] cR["station_id"].append(None) cR["station_name"] = [] cR["station_name"].append(None) cR["station_version_number"] = [] cR["station_version_number"].append(None) cR["num_ports_in_station"] = [] cR["num_ports_in_station"].append(None) cR["trial_start_time"] = [] cR["trial_start_time"].append(None) cR["trial_stop_time"] = [] cR["trial_stop_time"].append(None) # Available in Subject.do_trial() cR["subject_id"] = [] cR["subject_id"].append(None) cR["subject_version_number"] = [] cR["subject_version_number"].append(None) cR["protocol_name"] = [] cR["protocol_name"].append(None) cR["protocol_version_number"] = [] cR["protocol_version_number"].append(None) cR["current_step"] = [] cR["current_step"].append(None) cR["current_step_name"] = [] cR["current_step_name"].append(None) cR["num_steps"] = [] cR["num_steps"].append(None) cR["criterion_met"] = [] cR["criterion_met"].append(None) # Available in TrainingStep.do_trial() cR["trial_manager_name"] = [] cR["trial_manager_name"].append(None) cR["session_manager_name"] = [] cR["session_manager_name"].append(None) cR["criterion_name"] = [] cR["criterion_name"].append(None) cR["reinforcement_manager_name"] = [] cR["reinforcement_manager_name"].append(None) cR["trial_manager_class"] = [] cR["trial_manager_class"].append(None) cR["session_manager_class"] = [] cR["session_manager_class"].append(None) cR["criterion_class"] = [] cR["criterion_class"].append(None) cR["reinforcement_manager_class"] = [] cR["reinforcement_manager_class"].append(None) cR["trial_manager_version_number"] = [] cR["trial_manager_version_number"].append(None) cR["session_manager_version_number"] = [] cR["session_manager_version_number"].append(None) cR["criterion_version_number"] = [] cR["criterion_version_number"].append(None) cR["reinforcement_manager_version_number"] = [] cR["reinforcement_manager_version_number"].append(None) cR["graduate"] = [] cR["graduate"].append(None) # Available in TrialManager.do_trial() cR["errored_out"] = [] cR["errored_out"].append(None) cR["manual_quit"] = [] cR["manual_quit"].append(None) cR["correct"] = [] cR["correct"].append(None) cR['LUT'] = [] cR['compiled_details'] = {} cR_file_name = '{0}.1-0.compiled_records'.format(subject_id) with open(os.path.join(compiled_folder_path, cR_file_name), 'wb') as f: pickle.dump(cR, f, pickle.HIGHEST_PROTOCOL)
def _setup_paths(self, force_delete=False): if force_delete: import shutil shutil.rmtree(os.path.join(get_base_path(), 'BCoreData')) if not os.path.exists(os.path.join(get_base_path(), 'BCoreData')): os.mkdir(os.path.join(get_base_path(), 'BCoreData')) if not os.path.exists( os.path.join(get_base_path(), 'BCoreData', 'ServerData')): os.mkdir(os.path.join(get_base_path(), 'BCoreData', 'ServerData')) if not os.path.exists( os.path.join(get_base_path(), 'BCoreData', 'ServerData', 'backupDBs')): os.mkdir( os.path.join(get_base_path(), 'BCoreData', 'ServerData', 'backupDBs')) if not os.path.exists( os.path.join(get_base_path(), 'BCoreData', 'SubjectData')): os.mkdir(os.path.join(get_base_path(), 'BCoreData', 'SubjectData')) if not os.path.exists( os.path.join(get_base_path(), 'BCoreData', 'SubjectData', 'SessionRecords')): os.mkdir( os.path.join(get_base_path(), 'BCoreData', 'SubjectData', 'SessionRecords')) if not os.path.exists( os.path.join(get_base_path(), 'BCoreData', 'SubjectData', 'CompiledTrialRecords')): os.mkdir( os.path.join(get_base_path(), 'BCoreData', 'SubjectData', 'CompiledTrialRecords'))