def __legend_wrapped(*args, position=None, position_offset=0.02, **kwargs): if position == 'right': t = 1.0 + position_offset __orig_legend(*args, loc='center left', bbox_to_anchor=(t, 0.5), **kwargs) elif position == 'left': t = -position_offset __orig_legend(*args, loc='center right', bbox_to_anchor=(t, 0.5), **kwargs) elif position == 'above': t = 1.0 + position_offset __orig_legend(*args, loc='lower center', bbox_to_anchor=(0.5, t), **kwargs) elif position == 'below': t = -position_offset __orig_legend(*args, loc='upper center', bbox_to_anchor=(0.5, t), **kwargs) else: if position is not None: warning( f'pyplot.legend got an unknown position \'{position}\'. Try \'right\' or \'above\'.' ) __orig_legend(*args, **kwargs)
def handle_connection(self): # Get query information data = self.conn.recv(4096) args, kwargs = pickle.loads(data) self.status('Querying database.') kwargs['returns'] = 'cache_path' try: cache_path = query_db(*args, **kwargs) self.status('Preparing result.') with open(cache_path, 'rb') as f: data = f.read() self.status('Compressing...') orig_size = len(data) before = time.time() data = compress(data, 1) after = time.time() dt = int(after - before) self.status(f'Compressed to {len(data)*100//orig_size}% in {fmt_time(dt)}') self.status('Sending result') self.send_message(m_type='preamble', size=len(data)) self.conn.sendall(data) except Exception as e: # if something goes wrong, send exception back to client warning(f'An error occurred: {e}') self.send_message(m_type='exception', exception=str(e))
def read(self) -> dict: with open(self.path) as indexf: fc = indexf.read() try: index = json.loads(fc) except: warning(f'Error encountered while parsing index "{fc}".') raise return index
def remove(self, key: [str, list]): if isinstance(key, list): for k in key: self.remove(k) else: path = self.index[key].path os.remove(path) hsh = self.get_hashed_name(key, returns='hash') warning(f'Deleted object {hsh[:3]}...{hsh[-3:]} from cache.') self.index.remove(key)
def clean(self): marked_for_removal = list() for key, obj_data in self.index.items(): if obj_data.is_invalid(): marked_for_removal.append(key) self.remove(marked_for_removal) n = len(marked_for_removal) if n: s = 's' if n > 1 else '' warning(f'Removed {n} invalid object{s} from cache.')
def async_get(args_and_kwargs): q, args, kwargs = args_and_kwargs if q: set_q(q) set_worker() try: rv = GuessLog(*args, **kwargs) except GenericRheoprocException as e: if 'ignore_exceptions' in kwargs and kwargs['ignore_exceptions']: warning('ignored exception') warning(e) return None else: raise e return rv
def filt(t, v, freq=10, order=10, bt='low'): ''' apply a frequency filter (butterworth) to a timeseries signal ''' dt = np.average(np.diff(t)) sample_freq = 1.0 / dt if isinstance(freq, list): w = [f/(sample_freq*0.5) for f in freq] else: w = freq / (sample_freq * 0.5) if w >= 1.0: warning("filter freq out of range: not applying filter") return v b, a = signal.butter(order, w, btype=bt) output = signal.filtfilt(b, a, v) return output
def remove_standard_wobble(pos, lc, motor, method, **kwargs): if method == 'any': for method in ['filter', 'subtract']: try: return remove_standard_wobble(pos, lc, motor, method, exceptions=True) except (SQLiteError, WobbleError): pass warning(f'No filtering method available: ignoring.') return lc elif method == 'subtract': return subtract_standard_wobble(pos, lc, motor, **kwargs) elif method == 'filter': return filter_standard_wobble(pos, lc, motor, **kwargs) elif method == 'dont': return lc else: warning(f'Unrecognised wobble removal method \'{method}\'; ignoring.') return lc
def subtract_standard_wobble(pos, lc, motor, exceptions=False): try: waveform = load_wobble_waveform(motor) except Exception as e: warning(f'Could not load waveform: {e}') if exceptions: raise else: return lc standard_pos, standard_lc = waveform standard_pos, standard_lc = stretch(pos, standard_pos, standard_lc) nlc = norm(lc) phase = match_phase(pos, nlc, standard_pos, standard_lc) standard_lc = np.interp(pos, np.subtract(standard_pos, phase), standard_lc) subtracted_normalised = np.subtract(nlc, standard_lc) subtracted = unnorm(subtracted_normalised, np.mean(lc), np.max(lc) - np.min(lc)) timestamp(f'standard wobble subtracted.') return subtracted
def filter_standard_wobble(pos, lc, motor, exceptions=False): try: frequencies = load_wobble_frequencies(motor) except Exception as e: warning(f'Could not load frequencies: {e}') if exceptions: raise else: return lc maxfreq = 1/np.nanmean(np.diff(pos)) filtered = lc w = 0.2 for freq in frequencies: band = [freq-w, freq+w] try: sos = signal.butter(3, band, btype='bandstop', output='sos', fs=maxfreq) filtered = signal.sosfiltfilt(sos, filtered) except Exception as e: raise WobbleError('Error when filtering frequencies') from e timestamp(f'standard wobble filtered.') return filtered
def load_object(self, key): if key not in self.index: warning('Key not in index.') return None if '--fresh' in sys.argv: warning('Clearing cached version of requested object.') self.remove(key) return None obj_data = self.index[key] if reason := obj_data.is_invalid(): warning(reason) self.remove(key) return None
timestamp(f'processing {len(results)} logs over {processes} processes.') data_dir = '/'.join(database.split('/')[:-1]) if processes > 1: mp.set_start_method('fork', True) m = mp.Manager() q = m.Queue() printer_thread = threading.Thread(target=printer, args=(q,pb), daemon=True) printer_thread.start() else: q = None list_of_args_kwargs = [(q, (dict(res), data_dir), kwargs) for res in results] if processes == 1: warning('Only using one core: this could take a while.') for a_kw in list_of_args_kwargs: r = async_get(a_kw) pb.update() if r: processed_results[r.ID] = r else: with mp.Pool(processes=processes) as pool: for r in pool.imap_unordered(async_get, list_of_args_kwargs): pb.update() if r: processed_results[r.ID] = r if processes > 1: q.put(0) printer_thread.join()
class CacheSingleton: def __init__(self, *args, **kwargs): self.check_paths() self.index = CacheIndex(*args, **kwargs) self.clean() timestamp(f'Cache at \'{self.path}\' initialised.') def clean(self): marked_for_removal = list() for key, obj_data in self.index.items(): if obj_data.is_invalid(): marked_for_removal.append(key) self.remove(marked_for_removal) n = len(marked_for_removal) if n: s = 's' if n > 1 else '' warning(f'Removed {n} invalid object{s} from cache.') def check_paths(self): if not os.path.isdir(self.path): os.makedirs(self.path) if not os.path.isfile(f'{self.path}/index.json'): with open(f'{self.path}/index.json', 'w') as indexf: json.dump(dict(), indexf) def get_hashed_name(self, name: str, returns='both'): hsh = hashlib.sha1(name.encode()).hexdigest() name = f'{self.path}/{hsh}.pickle' if returns == 'both': return name, hsh elif returns == 'hash': return hsh elif returns == 'name': return name raise ValueError(f'Unknown returns value \'{returns}\'. Valid values are \'hash\', \'name\', and \'both\' (the default).') def get_path_in_cache_of(self, key): return self.index[key].path def save_object(self, key, obj, depends_on=None, expires=None, expires_in_seconds=None, expires_in_days=None): name, hsh = self.get_hashed_name(key) if key not in self.index: obj_data = CachedObjectData(path=name) obj_data.path = name if depends_on: if isinstance(depends_on, str): depends_on = [depends_on] obj_data.depends_on = depends_on if expires is not None: obj_data.expires = expires elif expires_in_seconds is not None: obj_data.expires = time.time() + expires_in_seconds elif expires_in_days is not None: obj_data.expires = time.time() + (expires_in_days * 60. * 60. * 24.) self.index[key] = obj_data.as_dict() timestamp(f'Saving object {hsh[:3]}...{hsh[-3:]} to cache.') with open(name, 'wb') as pf: pickle.dump(obj, pf, protocol=4) def load_object(self, key): if key not in self.index: warning('Key not in index.') return None if '--fresh' in sys.argv: warning('Clearing cached version of requested object.') self.remove(key) return None obj_data = self.index[key] if reason := obj_data.is_invalid(): warning(reason) self.remove(key) return None try: with open(obj_data.path, 'rb') as pf: o = pickle.load(pf) except Exception as e: warning(f'Error loading cached file: {e}') self.remove(key) return None return o
def warning(self, *args, **kwargs): if not self.very_quiet: warning(f'[{self.ID}]', *args, **kwargs)
strain.append(np.average(dt) * strainrate[0]) for dti, gdi in zip(dt, strainrate[1:]): strain.append(dti * gdi + strain[-1]) # strain is in C/Gs - unitless! position = list() position.append(np.average(dt) * speed[0]) # speed in rot/s for dti, spi in zip(dt, speed[1:]): position.append(dti * spi + position[-1]) # position in rotations try: loadcell = remove_standard_wobble(position, loadcell, self.motor, standard_wobble_method) except GenericRheoprocException: warning(f'Error removing wobbling in log {self.ID}') raise # LC value seems to be hovering around 2**31, halfway up a 32-bit integer. Somewhere I've made a mistake # converting an unsigned int. load_torque = apply_calibration(loadcell, speed, self.override_calibration, self.date) stress = ns.divide(load_torque, 2.0 * np.pi * RIN * RIN * (0.001 * self.fill_depth)) viscosity = ns.divide(stress, strainrate) expected_viscosity = get_material_viscosity( self.material, np.array(temperature, dtype=np.float64)) self.timestamp('Processing complete') data = {