Example #1
0
 def load_object_or_run_script(self, key, script_path):
     o = self.load_object(key)
     if o is not None:
         return o
     timestamp(f'Object not in cache: running script \'{script_path}\'.')
     run_other_script(script_path)
     o = self.load_object(key)
     if o is None:
         raise ScriptCacheError(f'Cache expected object with name {key} to be created by script "{script_path}", but object not created by script.')
     return o
Example #2
0
def save_wobble_waveform(waveform, motor='ANY', date=None, database='../data/.database.db'):
    if date is None:
        today = datetime.today()
        date = float(today.strftime('%Y%m%d.%H%M%S'))

    waveform = [[float(b) for b in a] for a in waveform]
    assert len(waveform) == 2 and len(waveform[0]) == len(waveform[1])
    timestamp(f'Saving wobble waveform ({len(waveform[0])} points) to database, date={date}')
    waveform_blob = pickle.dumps(waveform)
    insert_with_blob(
        'WOBBLE',
        (date, motor, 'waveform', waveform_blob),
        ('DATE', 'MOTOR', 'TYPE', 'DATA'),
        database)
Example #3
0
def save_wobble_frequencies(frequencies, motor='ANY', date=None, database='../data/.database.db'):
    if date is None:
        today = datetime.today()
        date = float(today.strftime('%Y%m%d.%H%M%S'))

    frequencies = [f for f in frequencies if f == f]
    frequencies = list(sorted(set(frequencies)))
    timestamp(f'Saving wobble frequencies ({frequencies}) to database, date={date}')
    frequencies_blob = pickle.dumps(tuple(frequencies))
    insert_with_blob(
        'WOBBLE',
        (date, motor, 'frequencies', frequencies_blob),
        ('DATE', 'MOTOR', 'TYPE', 'DATA'),
        database)
Example #4
0
 def run(self):
     with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
         s.bind(('', PORT))
         s.listen()
         timestamp(f'Listening on {PORT}')
         self.running = True
         while self.running:
             try:
                 self.conn, self.addr = s.accept()
                 self.handle_connection()
                 self.conn.close()
                 self.conn = None
             except KeyboardInterrupt:
                 break
             except Exception:
                 pass
Example #5
0
def get_plot_name(subplot_name=None, ext='.pdf'):
    n = inspect.stack()[1].filename
    d = os.path.dirname(n)
    loc = '.' if not d else os.path.relpath(d, '.')
    n = os.path.basename(n).replace('.py', '')

    if subplot_name:
        name = f'../img/{loc}/{n}-{subplot_name}{ext}'
    else:
        name = f'../img/{loc}/{n}{ext}'

    loc = os.path.dirname(name)
    if not os.path.isdir(loc):
        timestamp(f'Creating previously absent directory "{loc}"')
        os.mkdir(loc)
    timestamp(f'Plotting "{name}"')

    return name
Example #6
0
def subtract_standard_wobble(pos, lc, motor, exceptions=False):
    try:
        waveform = load_wobble_waveform(motor)
    except Exception as e:
        warning(f'Could not load waveform: {e}')
        if exceptions:
            raise
        else:
            return lc

    standard_pos, standard_lc = waveform
    standard_pos, standard_lc = stretch(pos, standard_pos, standard_lc)
    nlc = norm(lc)
    phase = match_phase(pos, nlc, standard_pos, standard_lc)
    standard_lc = np.interp(pos, np.subtract(standard_pos, phase), standard_lc)
    subtracted_normalised = np.subtract(nlc, standard_lc)
    subtracted =  unnorm(subtracted_normalised, np.mean(lc), np.max(lc) - np.min(lc))
    timestamp(f'standard wobble subtracted.')
    return subtracted
Example #7
0
def filter_standard_wobble(pos, lc, motor, exceptions=False):
    try:
        frequencies = load_wobble_frequencies(motor)
    except Exception as e:
        warning(f'Could not load frequencies: {e}')
        if exceptions:
            raise
        else:
            return lc

    maxfreq = 1/np.nanmean(np.diff(pos))
    filtered = lc
    w = 0.2
    for freq in frequencies:
        band = [freq-w, freq+w]
        try:
            sos = signal.butter(3, band, btype='bandstop', output='sos', fs=maxfreq)
            filtered = signal.sosfiltfilt(sos, filtered)
        except Exception as e:
            raise WobbleError('Error when filtering frequencies') from e
    timestamp(f'standard wobble filtered.')
    return filtered
Example #8
0
    def save_object(self, key, obj, depends_on=None, expires=None, expires_in_seconds=None, expires_in_days=None):
        name, hsh = self.get_hashed_name(key)

        if key not in self.index:
            obj_data = CachedObjectData(path=name)
            obj_data.path = name

            if depends_on:
                if isinstance(depends_on, str):
                    depends_on = [depends_on]
                obj_data.depends_on = depends_on

            if expires is not None:
                obj_data.expires = expires
            elif expires_in_seconds is not None:
                obj_data.expires = time.time() + expires_in_seconds
            elif expires_in_days is not None:
                obj_data.expires = time.time() + (expires_in_days * 60. * 60. * 24.)

            self.index[key] = obj_data.as_dict()

        timestamp(f'Saving object {hsh[:3]}...{hsh[-3:]} to cache.')
        with open(name, 'wb') as pf:
            pickle.dump(obj, pf, protocol=4)
Example #9
0
def get_from_server(server_addr, *args, timeout=10, **kwargs):
    data = (args, kwargs)
    data_encoded = pickle.dumps(data)

    with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
        s.settimeout(timeout)
        s.connect((server_addr, PORT))
        timestamp(f'Querying rheoproc server at {server_addr}:{PORT}')
        s.sendall(data_encoded)

        BUFFLEN = 4096
        size = -1
        while True:
            msg = read_message(s)
            if msg['type'] == 'exception':
                raise Exception(msg['exception'])
            elif msg['type'] == 'status':
                timestamp('remote:', msg['status'])
            elif msg['type'] == 'preamble':
                size = msg['size']
                break

        unit = 'b'
        div = 1
        size_b = int(size)
        if size > 1024:
            size /= 1024
            div *= 1024
            unit = 'kb'
        if size > 1024:
            size /= 1024
            div *= 1024
            unit = 'Mb'
        if size > 1024:
            size /= 1024
            div *= 1024
            unit = 'Gb'

        timestamp(f'Downloading {size:.1f} {unit}')

        data = bytearray()
        ds = DownloadSpeedo()
        pb = ProgressBar(size_b + 1, info_func=ds.info)
        i = 0
        while part := s.recv(BUFFLEN):
            data.extend(part)
            i += 1
            if i > 1000:
                i = 0
                npos = len(data)
                if npos != pb.pos:
                    pb.update(npos)
        pb.update(pb.length)
Example #10
0
def query_db(query, *args, database='../data/.database.db', server=None, returns='data', **kwargs):

    args = (query, *args)
    kwargs['database'] = database
    kwargs['returns'] = returns

    if not query.startswith("SELECT * FROM"):
        raise QueryError(f'SQL query to database must be in the form "SELECT * FROM <TABLE> [WHERE ...];"\n Troublesome query: {query}')

    global table
    table = query.replace(';', '').split(' ')[3]

    if not table in ACCEPTED_TABLES:
        raise QueryError(f'SQL queries can only be used to access data-containing tables in the database: \n Troublesome table: {table}')

    cache_key = f'QUERY: {query}, KWARGS: {kwargs}'
    cache = Cache()
    obj = cache.load_object(cache_key)
    if obj is not None:
        try:
            n = len(obj) # very unlikely, but may raise TypeError if obj is not Iterable.
            timestamp(f'Loaded {n} logs from cache.')
        except TypeError:
            timestamp(f'Loaded {type(obj)} from cache.')
        if returns == 'cache_path':
            return cache.get_path_in_cache_of(cache_key)
        else:
            return obj

    processed_results = get_from_local(*args, **kwargs) if not server else get_from_server(server, *args, **kwargs)

    timestamp('Caching')
    depends_on = [log.path for log in processed_results]
    depends_on.append(database)
    cache.save_object(cache_key, processed_results, depends_on)

    if returns == 'data':
        return processed_results
    elif returns == 'cache_path':
        return cache.get_path_in_cache_of(cache_key)
Example #11
0
 def status(self, status_msg):
     timestamp(status_msg)
     self.send_message(m_type='status', status=status_msg)
Example #12
0
 def __init__(self):
     self.running = False
     timestamp('Procserver started')
     self.conn = None
     self.addr = None
Example #13
0
    types = list({GuessLogType(row, table) for row in results})

    rv = list()

    pb = ProgressBar(len(results))

    processed_results = dict()
    order = [r['ID'] for r in results]

    processes = get_n_processes()
    if '--max-proc' in sys.argv:
        max_processes = int(sys.argv[sys.argv.index('--max-proc')+1])
    processes = min([processes, max_processes])

    timestamp(f'processing {len(results)} logs over {processes} processes.')

    data_dir = '/'.join(database.split('/')[:-1])

    if processes > 1:
        mp.set_start_method('fork', True)
        m = mp.Manager()
        q = m.Queue()
        printer_thread = threading.Thread(target=printer, args=(q,pb), daemon=True)
        printer_thread.start()
    else:
        q = None
    list_of_args_kwargs = [(q, (dict(res), data_dir), kwargs) for res in results]
    if processes == 1:
        warning('Only using one core: this could take a while.')
        for a_kw in list_of_args_kwargs:
Example #14
0
            size /= 1024
            div *= 1024
            unit = 'Gb'

        timestamp(f'Downloading {size:.1f} {unit}')

        data = bytearray()
        ds = DownloadSpeedo()
        pb = ProgressBar(size_b + 1, info_func=ds.info)
        i = 0
        while part := s.recv(BUFFLEN):
            data.extend(part)
            i += 1
            if i > 1000:
                i = 0
                npos = len(data)
                if npos != pb.pos:
                    pb.update(npos)
        pb.update(pb.length)

    try:
        timestamp('Decompressing data')
        data = decompress(data)
    except Exception as e:
        timestamp(f'Error while decompressing: {e}')
        pass
    data = pickle.loads(data)
    if isinstance(data, str):
        raise Exception(data)
    return data
Example #15
0
 def __init__(self, *args, **kwargs):
     self.check_paths()
     self.index = CacheIndex(*args, **kwargs)
     self.clean()
     timestamp(f'Cache at \'{self.path}\' initialised.')
Example #16
0
import atexit


def closing_message():
    from rheoproc.error import timestamp
    timestamp(f'done!')


atexit.register(closing_message)

del atexit

del args_check
del version_check
del modules_check
del sys

import rheoproc.plot as plot
import rheoproc.query as query
import rheoproc.nansafemath as nansafemath
import rheoproc.util as util
import rheoproc.fft as fft

from rheoproc.plot import plot_init, get_plot_name, MultiPagePlot, pyplot
from rheoproc.data import get_data
from rheoproc.query import get_log, get_logs, get_group, query_db
from rheoproc.error import timestamp, warning
from rheoproc.version import version

timestamp(f'rheoproc {version} initialised')
Example #17
0
def closing_message():
    from rheoproc.error import timestamp
    timestamp(f'done!')
Example #18
0
 def timestamp(self, *args, **kwargs):
     if not self.quiet:
         timestamp(f'[{self.ID}]', *args, **kwargs)