def _save_series_id(self): """Save the series id to json file that tracks last series ran by user on a per system basis.""" sys_vars = system_variables.get_vars(True) sys_name = sys_vars['sys_name'] json_file = self.pav_cfg.working_dir/'users' json_file /= '{}.json'.format(utils.get_login()) lockfile_path = json_file.with_suffix('.lock') with LockFile(lockfile_path): data = {} try: with json_file.open('r') as json_series_file: try: data = json.load(json_series_file) except json.decoder.JSONDecodeError: # File was empty, therefore json couldn't be loaded. pass with PermissionsManager(json_file, self.pav_cfg['shared_group'], self.pav_cfg['umask']), \ json_file.open('w') as json_series_file: data[sys_name] = self.sid json_series_file.write(json.dumps(data)) except FileNotFoundError: # File hadn't been created yet. with PermissionsManager(json_file, self.pav_cfg['shared_group'], self.pav_cfg['umask']), \ json_file.open('w') as json_series_file: data[sys_name] = self.sid json_series_file.write(json.dumps(data))
def _do_lock_concurrency(self, pav_cfg, test): """Acquire the concurrency lock for this scheduler, if necessary. :param pav_cfg: The pavilion configuration. :param pavilion.pav_config.test.TestRun test: The pavilion test to lock concurrency for. """ if test.config[self.name]['concurrent'] in ('false', 'False'): return None lock_name = '{s.name}_sched.lock'.format(s=self) # Most schedulers shouldn't have to do this. lock_path = pav_cfg.working_dir / lock_name lock = LockFile( lock_path, group=pav_cfg.shared_group, # Expire after 24 hours. expires_after=60 * 60 * 24, ) test.status.set( STATES.SCHEDULED, "Test is non-concurrent, and waiting on the " "concurrency lock for scheduler {s.name}.".format(s=self)) lock.lock() return lock
def __init__(self, file_name, max_bytes=0, backup_count=0, lock_timeout=10, encoding=None): """Initialize the Locking File Handler. This will attempt to open the file and use the lockfile, just to check permissions. :param Union(str,Path) file_name: The path to the log file. :param int max_bytes: The limit of how much data can go in a single log file before rolling over. Zero denotes no limit. :param int backup_count: How many backups (logfile.1, etc) to keep. :param int lock_timeout: Wait this long before declaring a lock deadlock, and giving up. :param str encoding: The file encoding to use for the log file. """ self.file_name = Path(file_name) self.max_bytes = max_bytes self.backup_count = backup_count self.mode = 'a' self.encoding = encoding self.lock_timeout = lock_timeout lockfile_path = self.file_name.parent/(self.file_name.name + '.lock') self.lock_file = LockFile(lockfile_path, timeout=self.lock_timeout) super().__init__() # Test acquire the lock file and test open the file. with self.lock_file: with self.file_name.open(self.mode, encoding=self.encoding): pass