Esempio n. 1
0
 def _gen(self):
     mylist = []
     mylen = 0
     while True:
         if not self.dirty and self.refresh is REFRESH.EVERY_LOOP:
             self.dirty = self.dirty_check(mylist)
         if self.dirty:
             # breakpoint()
             cn = mylist[0].__class__.__name__ if mylist else ""
             info(f"refreshing dirty {cn} generator")
             mylist = self.snapshot_list()
             mylen = len(mylist)
             if mylen == 0: err('source empty')
             self.dirty = False
         i = -1
         if self.shuffle is SHUFFLE.EVERY_LOOP: random.shuffle(mylist)
         while i < (mylen - 1):
             if not self.dirty and self.refresh is REFRESH.EVERY_ITER:
                 self.dirty = self.dirty_check(mylist)
                 # will also force a shuffle even if SHUFFLE.EVERY_LOOP.
                 # therefore, a source that's constantly dirty when REFRESH is EVERY_ITER
                 # always de facto causes SHUFFLE.EVERY_ITER
                 if self.dirty: break
             if self.shuffle is SHUFFLE.EVERY_ITER: random.shuffle(mylist)
             if mylen == 0: raise StopIteration  # should never happen
             i += 1
             yield mylist[i]
             if self.dirty: break  # might be set externally
Esempio n. 2
0
File: obj.py Progetto: mgroth0/mlib
    def __init_subclass__(cls, **kwargs):
        from mlib.boot.crunch import SUPER_RUNNING

        import inspect
        from mlib.boot.mlog import info
        info(f'__init_subclass: {cls}')

        # need to hack ShellProject because I guess it currently has no abstract methods? and isnt currently being detected as abstract
        if cls.AUTO_RUN and not inspect.isabstract(
                cls
        ) and cls.__name__ != 'ShellProject':  # cls not in cls._ALREADY_SUPER_RUNNING
            # cls._ALREADY_SUPER_RUNNING.append(cls)
            SUPER_RUNNING.start()
            cls()._super_run()
Esempio n. 3
0
File: err.py Progetto: mgroth0/mlib
def pub_print_warn():
    from mlib.boot.mlog import warnings, log, LOG_LEVEL, LogLevel, info
    import mlib.boot.mlog
    if not mlib.boot.mlog.QUIET and LOG_LEVEL.value >= LogLevel.WARN.value: log(f'{len(warnings)=}')
    if len(warnings) > 0:
        log('WARNINGS:')

        warning_map = {}
        for w in warnings:
            if w in warning_map:
                warning_map[w] += 1
            else:
                warning_map[w] = 1
        for k, v in listitems(warning_map):
            log(f'\t{k} ({v} occurrences)')
    else:
        info('NO WARNINGS!')
Esempio n. 4
0
 def makeAllPlots(cls,
                  figDats,
                  overwrite,
                  force=False,
                  debug=False,
                  silent=True):
     will_maybe_do_more = []
     for fd in figDats:
         viss = fd.dataFile.loado(silent=True).fixInfs().viss
         makes = [v for v in viss if force or v.make]
         if makes:
             info('showing...')
             if not silent:
                 progress(f'exporting {fd.dataFile} ...')
             will_maybe_do_more += [
                 cls.export_fd(makes, fd, overwrite, debug=debug)
             ]
     return will_maybe_do_more
Esempio n. 5
0
def bad_tatome_code():
    info('starting theirs')
    for small in ['', '_small']:
        main(
            obj({
                # 'inputFile': '_figs/salience_filter/input.png',
                # 'intensityOutput': '_figs/salience_filter/tatome/intensity.png',
                'inputFile': f'_figs/salience_filter/input{small}.png',
                'intensityOutput':
                f'_figs/salience_filter/tatome/intensity{small}.png',
                'fileList': None,
                'gaborOutput':
                f'_figs/salience_filter/tatome/gabor{small}.png',
                'rgOutput': f'_figs/salience_filter/tatome/rg{small}.png',
                'byOutput': f'_figs/salience_filter/tatome/by{small}.png',
                'cOutput': f'_figs/salience_filter/tatome/c{small}.png',
                'saliencyOutput':
                f'_figs/salience_filter/tatome/saliency{small}.png',
                'markMaxima': None
            }))
Esempio n. 6
0
File: host.py Progetto: mgroth0/mlib
 def ssh(self, command=None, logfile_read=None) -> SSHExpectProcess:
     p = self.ssh_login(logfile_read=logfile_read)
     # print('finished login')
     info('setting prompt')
     p.setprompt()
     # breakpoint()
     info('cding to home')
     p.sendatprompt(f'cd {self.home}')
     if command is not None:
         info('sending command: \"{command}\" at prompt...')
         p.sendatprompt(command)
         info('sent command')
     return p
Esempio n. 7
0
USE_IMPORT_LOG_HOOK = False

from pathlib import Path
from time import time
Path('data').mkdir(exist_ok=True)
with open('data/_tic.txt', 'w') as f:
    f.write(str(int(time())))
import mlib.boot.mlog
if USE_IMPORT_LOG_HOOK:
    mlib.boot.mlog.register_import_log_hook()
# import numpy
# numpy.seterr(all='raise')

import mlib.err
mlib.err.register_exception_and_warning_handlers()

from mlib.boot.mlog import log, info


__all__ = [log]

info('finished booting')
Esempio n. 8
0
File: host.py Progetto: mgroth0/mlib
    def run(self,
            python_file_name,
            SW: Optional,
            srun: Optional[SRun],
            EXTRA_LOGGING=False,
            use_singularity=False,
            non_singularity_run_args=(),
            rsync_also=()):
        p: SSHExpectProcess = None

        def login():
            nonlocal p
            with PipelineSection("logging in to open mind", log=True):
                p = self.ssh()

        login_thread = Thread(target=login)
        login_thread.start()

        @run_in_thread
        def status_sync():
            login_thread.join()
            status_p = self.ssh()
            status_p.sendatprompt('mkdir status')
            status_p.sendatprompt('cd status')
            LOCAL_STATUS_FOLDER = File(
                "/Users/matt/Desktop/registered/todo/flow/status")
            LOCAL_FREECOMP_STATUS_FOLDER = LOCAL_STATUS_FOLDER['freecomp']
            while p.alive():
                sleep(1)
                the_ls = status_p.return_from_line("ls")
                now = str(time())
                if False:  # DEBUG/DEV
                    formatls = now + '\n' + the_ls
                    LOCAL_FREECOMP_STATUS_FOLDER['ls.status'].write(formatls)
                LOCAL_FREECOMP_STATUS_FOLDER['ls.status'].deleteIfExists(
                    silent=True)
                for f in (ff.strip() for ff in the_ls.split('\n')):
                    thecat = status_p.return_from_line(f"cat {f}")
                    LOCAL_FREECOMP_STATUS_FOLDER[f].write(now + thecat)

        USE_RSYNC = True
        if USE_RSYNC:  # NOT OPTIONAL NOW BC PORTAL FILE

            def rsync_fun():
                with PipelineSection("running rsync", log=True):
                    from mlib.open_mind import OM_MOUNT
                    # '-progress' # causes error?
                    rsync1 = [
                        'rsync',
                        '-vru',

                        # '-progress',

                        # https://galaxysd.github.io/20160302/Fastest-Way-Rsync
                        # supposedly speeds things up? ... Wow yes, from 10 to 8 seconds! Might be a coincidence but I'm happy!
                        '-e',
                        'ssh -T -o Compression=no -x',
                        # sshd_config requires special setup

                        # this is neccesary, or else python files I delete locally wont be deleted on OM
                        '--delete',
                        # of course this will mean I will need a seperate cache directory or some other exlcusion option. That's ok.
                        '''--filter=:- .gitignore''',
                        '--exclude=.git/',
                        '--exclude=*.pyc',
                        '.',
                        '../mlib/mlib',
                        '../mlib/mdb.py'
                    ] + list(rsync_also) + [
                        f'[email protected]:{OM_MOUNT}/user/mjgroth/dnn'
                    ]
                    # rsync2 = ['rsync', '-vru', '''--filter=:- .gitignore''', '--include=mlib/', '../mlib',
                    #           '[email protected]:/om5/user/mjgroth/mlib']

                    rsync_p = SSHExpectProcess(rsync1)
                    rsync_p.login(longpass=True)

                    # rsync_p.interact()
                    rsync_p.all_output()

                    # rsync_p = SSHExpectProcess(rsync2)
                    # rsync_p.login(longpass=True)
                    # rsync_p.interact()

            rsync_thread = Thread(target=rsync_fun)
            rsync_thread.start()
        else:
            err('nope. not ready for this with new parallel system')
            from _dnn_lib import git_push_both
            with PipelineSection("git push"):
                git_push_both()
            from mlib.open_mind import OpenMindProject
            self: OpenMindProject
            self.git_pull_dnn_and_mlib(p)
        from mlib.open_mind import OpenMindBashScript  # should really make a super class
        from mlib.open_mind import OpenMindProject
        SW: Optional[OpenMindBashScript]
        # p = self.ssh()
        login_thread.join()
        if EXTRA_LOGGING:
            p.log_to_stdout()
        if SW is not None:
            [p.sendatprompt(f'module load {m}') for m in SW.modules]
        if self.host.must_use_slurm():
            assert srun is not None
        if srun is not None:
            with section(f"requesting slurm session: {srun}"):
                import mlib.err
                stopper = p.log_to_stdout(
                    # fun=kill_on_finish_str,
                    # just_fun=True,
                    o=p,
                    print_not_log=True,
                    stop_on=[
                        ContainerBashScript.FINISH_STR, mlib.err.ERROR_EXIT_STR
                    ])
                # log(f"requesting slurm session: {srun}")
                p.sendatprompt(srun.command())
                index = p.expect([
                    'srun: error',
                    # 'has been allocated' #doesnt always happen?
                    "tasks started",  # need -v flag
                    "has been allocated resources"
                ])
                stopper.stop()
                if index == 0:
                    err('SRUN ERROR')
                progress('slurm allowed job to start')  # logpie
                p.setprompt()

        rsync_thread.join()

        if isinstance(self, OpenMindProject):
            if OpenMindProject.RUN_IN_VAGRANT:
                self._take_into_vagrant(p)
        info('bashing')
        # p.sendatprompt('cd ')

        with section("OM: running python script"):
            if use_singularity:
                p.sendatprompt(
                    f'bash {SW.name}'
                    # f'./dnn ' + SW.
                )  # why was I using sudo??? ohhhh I might have been using sudo in order to have write access to files? yes!! I was suing sudo because that is the only way files are writable
            else:
                # PYTHONPATH=../mlib
                # p.p.logfile_send = None
                progress('waiting for prompt to send py command')
                p.prompt()
                progress('got prompt, sending py command')
                p.sendline(
                    f'if MPLBACKEND=Agg /om2/user/mjgroth/miniconda3/envs/dnn39/bin/python ./{python_file_name} '
                    + ' '.join(non_singularity_run_args) +
                    '; then echo \"OK\"; else exit; fi')
                # p.interact()
                progress('python command sent')
            self.finish_process(p, SW)
        import mlib.boot.crunch
        mlib.boot.crunch.take_om_logs(self)
Esempio n. 9
0
 def __init__(self, *command, silent=False):
     self.command_as_str = AbstractShell.command_str(*command)
     self.command_as_list = AbstractShell.command_list(*command)
     if not silent:
         info(f'$: {self.command_as_str}')
     self.p = self._start()