Exemplo n.º 1
0
def load():
    """Standard load sequence."""
    global vmdmake, data, sns, calc, tempdir
    plotname = 'protein_rmsd'
    try:
        import vmdmake
    except:
        vmdmake_spot = os.path.join('calcs', 'vmdmake')
        if os.path.isdir(vmdmake_spot):
            raise Exception('could not import vmdmake but %s exists' %
                            vmdmake_spot)
        else:
            bash('git clone http://github.com/bradleyrp/amx-vmd vmdmake',
                 cwd='calcs')
    if 'data' not in globals(): data, calc = plotload(plotname)
    drop_dn = 'vmdmake_videos'
    #---store the snapshots in the post_plot_spot according to the tag
    tempdir = os.path.join(work.paths['post_plot_spot'], drop_dn)
    if not os.path.isdir(tempdir): os.mkdir(tempdir)
    status(
        'snapshots are dropping to %s (delete them if you want to re-make them)'
        % tempdir,
        tag='note')
    do_video, do_smooth = True, True
    sns = work.sns()
Exemplo n.º 2
0
    def setup_virtualenv(self, sandbox=False):
        """
		Create a virtualenvironment.
		"""
        def virtualenv_fail(name, extra=None):
            """
			When virtualenv requires a system package we tell users that anaconda may be an option.
			"""
            message = 'failed to create a virtual environment: missing %s. ' % name
            if extra: message += extra
            raise Exception(message)

        if is_terminal_command('virtualenv'): virtualenv_fail('virtualenv')
        #---! automatically start redis? prevent dump.rdb?
        #---preliminary checks
        #---! default redis conf?
        #---! this needs to be somewhere else ??? the whole point of this is to avoid that.
        if False:
            if is_terminal_command('redis-cli'):
                virtualenv_fail(
                    'redis-cli',
                    extra='if you have redis installed, ' +
                    'you can run "sudo /usr/sbin/redis-server --daemonize yes". '
                    +
                    'if your redis is local (via e.g. anacodna) you can omit "sudo"'
                )
        #---you can sandbox or not
        venv_opts = "--no-site-packages " if sandbox else "--system-site-packages "
        #---note that virtualenv can re-run on the env folder without issue
        bash('virtualenv %senv' % venv_opts, log='logs/log-virtualenv')
Exemplo n.º 3
0
    def __init__(self, ):
        """
		Create a factory environment from instructions in the config, and setup or refresh if necessary.
		"""
        #---always check that docker is running
        #---! can we also get some useful information from this check?
        bash('docker ps')
Exemplo n.º 4
0
    def setup_anaconda_refresh(self):
        """
		Refresh the virtualenvironment.
		"""
        if self.use_python2:
            #! note that anaconda may have deprecated use of env/envs/py2/bin/activate
            self.loader_commands['env_activate'] = 'env/bin/activate py2'
            self.source_cmd = 'source env/bin/activate py2'
        #---we consult a conda YAML file and a PIP text list to install packages
        #---default values are built into the class above but they can be overridden
        config = read_config()
        reqs_conda = config.get('reqs_conda', self.reqs_conda)
        reqs_pip = config.get('reqs_pip', self.reqs_pip)
        if type(reqs_conda) != list: reqs_conda = [reqs_conda]
        if type(reqs_pip) != list: reqs_pip = [reqs_pip]
        #---install from the conda requirements list followed by pip (for packages not available on conda)
        for fn in reqs_conda:
            print('[STATUS] installing packages via conda from %s' % fn)
            #---we tell conda to ignore local user site-packages because version errors
            bash(self.source_cmd + ' && conda env update --file %s' % fn,
                 log='logs/log-anaconda-conda-%s' % os.path.basename(fn))
        for fn in reqs_pip:
            print('[STATUS] installing packages via pip from %s' % fn)
            bash(self.source_cmd + ' && pip install -r %s' % fn,
                 log='logs/log-anaconda-conda-%s' % os.path.basename(fn))
Exemplo n.º 5
0
def prepare():
    """
	Run once to prepare for videos.
	"""
    global vmdmake, tempdir
    try:
        import vmdmake
    except:
        vmdmake_spot = os.path.join('calcs', 'vmdmake')
        if os.path.isdir(vmdmake_spot):
            raise Exception('could not import vmdmake but %s exists' %
                            vmdmake_spot)
        else:
            bash('git clone http://github.com/bradleyrp/amx-vmd vmdmake',
                 cwd='calcs')
    if 'data' not in globals():
        global data, calc
        data, calc = plotload(plotname)
    drop_dn = 'vmdmake_videos'
    #---store the snapshots in the post_plot_spot according to the tag
    tempdir = os.path.join(work.paths['post_plot_spot'], drop_dn)
    if not os.path.isdir(tempdir): os.mkdir(tempdir)
    status(
        'snapshots are dropping to temporary folders in %s. it is your responsibility to delete them.'
        % tempdir +
        ' after the video is created we will not remake it unless you delete it.',
        tag='note')
    do_video, do_smooth = True, True
Exemplo n.º 6
0
def show_running_factories():
	"""
	Show all factory processes.
	Note that jupyter notebooks cannot be easily killed manually so use the full path and try `pkill -f`.
	Otherwise this function can help you clean up redundant factories by username.
	"""
	cmd = 'ps xao pid,user,command | egrep "([c]luster|[m]anage.py|[m]od_wsgi-express|[j]upyter)"'
	try: bash(cmd)
	except: print('[NOTE] no processes found using bash command: `%s`'%cmd)
Exemplo n.º 7
0
def daemon_ender(fn,cleanup=True):
	"""
	Read a lock file and end the job with a particular message
	"""
	try: bash('bash %s'%fn)
	except Exception as e: 
		print('[WARNING] failed to shutdown lock file %s with exception:\n%s'%(fn,e))
	if cleanup: 
		print('[STATUS] daemon successfully shutdown via %s'%fn)
		os.remove(fn)
Exemplo n.º 8
0
def render_from_json():
    """
	Read video_catalog.json and make videos from it if they do not already exist.
	Video viles are written back to video_catalog.json.
	"""
    #---import settings due to problems with
    with open('video_requests.json') as fp:
        film_cuts = json.loads(fp.read())
    vids = dict([(sn, {}) for sn in sns])
    for cut_name, cut_spec in film_cuts.items():
        #---loop over simulations
        for sn in sns:
            #---settings
            slice_path = calc['extras'][sn]['slice_path']
            gro, xtc = [
                os.path.join(work.postdir, '%s.%s' % (slice_path, j))
                for j in ['gro', 'xtc']
            ]
            tpr = work.raw.get_last(sn, subtype='tpr')
            view = vmdmake.VMDWrap(site=tempdir,
                                   gro=gro,
                                   xtc=xtc,
                                   tpr=tpr,
                                   frames='',
                                   xres=4000,
                                   yres=4000,
                                   **cut_spec.get('kwargs', {}))
            view.do('load_dynamic', 'standard', *cut_spec.get('does', []))
            for sel in cut_spec.get('selections', []):
                view.select(**sel)
            view.do('reset', 'xview')
            view.command('scale by 1.2')
            view.video()
            view.command('animate goto last')
            view.command('')
            view['snapshot_filename'] = 'snap.%s.%s' % (cut_name, sn)
            view.do('snapshot')
            #---no repeats
            #---! currently the video suffix is hard-coded
            vid_fn = os.path.join('vid.%s.%s.%s' % (cut_name, sn, 'mp4'))
            is_complete = os.path.isfile(os.path.join(tempdir, vid_fn))
            if not is_complete:
                view.show(quit=True)
                view.render(name='vid.%s.%s' % (cut_name, sn))
            vid_fn_mp4 = os.path.join(tempdir, vid_fn)
            #---intervene to convert this to webm
            vid_fn = re.sub('\.mp4$', '.webm', vid_fn_mp4)
            if not is_complete:
                bash('ffmpeg -i %s -b:v 0 -crf 20 %s' % (vid_fn_mp4, vid_fn))
            #---save the video file and cut details
            #---! should you resave more cut information?
            vids[sn][cut_name] = vid_fn
    #---files go back out to the interactive python
    with open('video_catalog.json', 'w') as fp:
        json.dump(vids, fp)
Exemplo n.º 9
0
def clone_vmdmake():
    """Clone a copy of VMDMAKE."""
    from config import bash
    vmdmake_dn = 'calcs/codes/vmdmake'
    vmdmake_source = 'https://github.com/bradleyrp/amx-vmd'
    try:
        from codes import vmdmake
    except:
        if os.path.isdir(vmdmake_dn):
            raise Exception('cannot import vmdmake from codes but %s exists' %
                            vmdmake_dn)
        bash('git clone %s %s' % (vmdmake_source, vmdmake_dn))
        from codes import vmdmake
    globals()['vmdmake'] = vmdmake
Exemplo n.º 10
0
def clone_calcs(source):
	"""
	Clone a calculations repository.
	"""
	config = read_config()
	if 'calculations_repo' in config and not os.path.isdir('calcs/.git'):
		raise Exception('config has a calculations repo registered but we cannot find calcs/.git')
	elif not 'calculations_repo' in config and os.path.isdir('calcs/.git'):
		raise Exception('found calcs/.git but no calculations_repo in the config')
	elif 'calculations_repo' in config and os.path.isdir('calcs/.git'):
		raise Exception('you already have a calculations repo at calcs')
	#---clone and register
	bash('git clone %s calcs'%source)
	config['calculations_repo'] = source
	write_config(config)
Exemplo n.º 11
0
    def setup_virtualenv_refresh(self):
        """
		Refresh the virtualenvironment.
		"""
        for fn in self.reqs:
            print('[STATUS] installing packages via pip from %s' % fn)
            bash(self.source_cmd + ' && pip install -r %s' % fn,
                 log='logs/log-virtualenv-pip-%s' % os.path.basename(fn))
        #---custom handling for required upgrades
        #---! would be useful to make this systematic
        required_upgrades = [
            'Sphinx>=1.4.4', 'numpydoc', 'sphinx-better-theme',
            'beautifulsoup4'
        ]
        #---! here and above we should source the environment first, but this is chicken-egg
        bash(self.source_cmd + ' && pip install -U %s' %
             ' '.join(["'%s'" % i for i in required_upgrades]),
             log='logs/log-virtualenv-pip')
Exemplo n.º 12
0
    def setup_anaconda(self):
        """
		Set up anaconda.
		"""
        anaconda_location = self.config.get('anaconda_location', None)
        if not anaconda_location:
            raise Exception(
                'download anaconda and run `make set anaconda_location <path>`'
            )
        install_fn = abspath(anaconda_location)
        if not os.path.isfile(install_fn) and not (
                os.path.islink(install_fn) and not os.path.isdir(install_fn)):
            raise Exception(
                'cannot find %s. make sure you have a copy of anaconda there.'
                % install_fn +
                'or run `make set anaconda_location=~/path/to/Miniconda3-latest-<architecture>.sh` '
                'to use a different path.')
        bash('bash %s -b -p %s/env' % (install_fn, os.getcwd()),
             log='logs/log-anaconda-env')
        if self.use_python2:
            #---we have to source the root anaconda here. later the source_cmd will refer to "py2"
            bash(
                ' && '.join([
                    'source env/bin/activate',
                    #---! very annoying anaconda errors so we update here on installation
                    'conda update -y conda',
                    'conda create python=2 -y -n py2'
                ]),
                log='logs/log-anaconda-env-py2')
            #---we use the conda environment handler to avoid using the user site-packages in ~/.local
            env_etc = 'env/envs/py2/etc'
            env_etc_conda = 'env/envs/py2/etc/conda'
            for dn in [env_etc, env_etc_conda]:
                if not os.path.isdir(dn): os.mkdir(dn)
            for dn in ['activate.d', 'deactivate.d']:
                os.mkdir(os.path.join(env_etc_conda, dn))
            with open(os.path.join(env_etc_conda, 'activate.d', 'env_vars.sh'),
                      'w') as fp:
                fp.write('#!/bin/sh\nexport PYTHONNOUSERSITE=True\n')
            with open(
                    os.path.join(env_etc_conda, 'deactivate.d', 'env_vars.sh'),
                    'w') as fp:
                fp.write('#!/bin/sh\nunset PYTHONNOUSERSITE\n')
Exemplo n.º 13
0
def package_django_module(source,projname):
	"""
	Packages and installs a django module.
	Note that this is necessary for the initial connection, even if you use the development code.
	"""
	dev_dn = os.path.join(source,projname)
	pack_dn = os.path.join('pack',projname)
	if not os.path.isdir(dev_dn): raise Exception('cannot find %s'%dev_dn)
	if os.path.isdir(pack_dn): raise Exception('%s already exists'%pack_dn)
	#---copy the generic python packager
	shutil.copytree('mill/packer',pack_dn)
	#---copy the development code into the same directory
	#---! make this pythonic
	bash('cp -a %s %s'%(dev_dn,os.path.join(pack_dn,'')))
	find_and_replace(os.path.join('pack',projname,'setup.py'),
		('^#---SETTINGS','packname,packages = \"%s\",[\"%s\"]'%(projname,projname)))
	find_and_replace(os.path.join('pack',projname,'MANIFEST.in'),
		('APPNAME',projname))
	#---prepare the package
	bash('python %s sdist'%os.path.join('pack',projname,'setup.py'))
	#---uninstall the package
	#try: bash('echo -e "y\n" | pip uninstall %s &> logs/log-pip-$projname'%projname)
	#except: pass
	#---install the package
	bash('pip install -U pack/%s/dist/%s-0.1.tar.gz'%(projname,projname),log='logs/log-pip-%s'%projname)
Exemplo n.º 14
0
def confirm_env():
    """
	Make sure we are in the right conda environment.
	Obviously this will not work if you use the virtualenv.
	Factory setup adds activate_env pointer to the environment activator to config.py which is run
	every time you run a factory command. This is a helpful feature for one-off factory make commands because
	then the user doesn't have to worry about environments. It's annoying for development, so we source
	the environment ourselves, `make unset activate_env` and this check makes sure we don't make mistakes.
	"""
    hint = 'try `source env/bin/activate py2` or `make set activate_env="env/bin/activate py2"`'
    try:
        check = bash('conda info --json', catch=True)
    except:
        raise Exception(
            'you are definitely in the wrong environment because I cannot find `conda`'
            + hint)
    conda_info = json.loads(check['stdout'])
    if not os.path.relpath(conda_info['default_prefix'],
                           conda_info['conda_prefix']) == 'envs/py2':
        raise Exception('you are not in the right environment! ' + hint)
Exemplo n.º 15
0
def setup_virtualenv(sandbox=False):
    """
	MAIN SETUP ROUTINE
	"""
    config = read_config()
    if is_terminal_command('virtualenv'): virtualenv_fail('virtualenv')

    #---! automatically start redis? prevent dump.rdb?

    #---preliminary checks
    #---! default redis conf?
    if is_terminal_command('redis-cli'):
        virtualenv_fail(
            'redis-cli',
            extra='if you have redis installed, ' +
            'you can run "sudo /usr/sbin/redis-server --daemonize yes". ' +
            'if your redis is local (via e.g. anacodna) you can omit "sudo"')

    venv_opts = "--no-site-packages " if sandbox else "--system-site-packages "
    #---note that virtualenv can re-run on the env folder without issue
    start_time = time.time()
    bash('virtualenv %senv' % venv_opts, log='logs/log-virtualenv')
    print('[STATUS] installing packages via pip...')
    bash(
        'source env/bin/activate && pip install -r mill/requirements_virtualenv.txt',
        log='logs/log-virtualenv-pip')
    print('[NOTE] setup took %.1f minutes' %
          ((time.time() - start_time) / 60.))
    required_upgrades = [
        'Sphinx>=1.4.4', 'numpydoc', 'sphinx-better-theme', 'beautifulsoup4'
    ]
    bash('pip install -U %s' %
         ' '.join(["'%s'" % i for i in required_upgrades]),
         log='logs/log-virtualenv-pip')
    #---record success
    config['setup_stamp'] = time.strftime('%Y%m%d%H%M%s')
    write_config(config)
Exemplo n.º 16
0
def render_from_json(request_fn='video_requests.json',
                     catalog_fn='video_catalog.json'):
    """
	Read video_catalog.json and make videos from it if they do not already exist.
	Video viles are written back to video_catalog.json.
	"""
    sns = work.sns()
    #---import settings due to problems with
    if not os.path.isfile(request_fn):
        raise Exception('cannot find %s' % request_fn)
    with open(request_fn) as fp:
        film_cuts = json.loads(fp.read())
    vids = dict([(sn, {}) for sn in sns])
    for cut_name, cut_spec_loop in film_cuts.items():
        #---loop over simulations
        for sn in sns:
            #---make a copy because we modify some arguments
            cut_spec = copy.deepcopy(cut_spec_loop)
            #---settings
            slice_path = calc['extras'][sn]['slice_path']
            gro, xtc = [
                os.path.join(work.postdir, '%s.%s' % (slice_path, j))
                for j in ['gro', 'xtc']
            ]
            #! legacy plot mode. remove this once omnicalc development branch is completed.
            #! ... it is fairly inefficient to parse the entire source just to get a single TPR
            #! ... so consider using meta to specify it explicitly, or some other method?
            try:
                tpr = work.raw.get_last(sn, subtype='tpr')
            except:
                work.parse_sources()
                tpr = work.source.get_last(sn, subtype='tpr')
            #! limit the number of frames if it is excessive. overridden by step in kwargs in cut_spec
            nframes_max = cut_spec.get('nframes_max', None)
            if nframes_max:
                nframes = (
                    (calc['extras'][sn]['end'] - calc['extras'][sn]['start']) /
                    calc['extras'][sn]['skip'])
                # get the largest integer step size that will keep the number of frames below the max
                step = int(np.ceil(float(nframes) / nframes_max))
                if step < 1: raise Exception('negative step size')
            else: step = 1
            view = vmdmake.VMDWrap(site=tempdir,
                                   gro=gro,
                                   xtc=xtc,
                                   tpr=tpr,
                                   frames='',
                                   res=cut_spec.pop('resolution',
                                                    (1000, 1000)),
                                   step=step,
                                   **cut_spec.get('kwargs', {}))
            view.do('load_dynamic', 'standard', *cut_spec.get('does', []))
            for sel in cut_spec.get('selections', []):
                #! elaborate color handling here
                color_specific = sel.get('color_specific', None)
                color_specific_this = None
                if type(color_specific) == dict and set(
                        color_specific.keys()) == {'eval'}:
                    color_specific_this = eval(color_specific['eval'])
                    sel['color_specific'] = True
                elif type(color_specific) in str_types:
                    color_specific_this = str(color_specific)
                    sel['color_specific'] = True
                if color_specific_this != None:
                    view.set_color_cursor(color_specific_this)
                view.select(**sel)
            view.do('reset', 'xview')
            view.command('scale by %s' % cut_spec.get('zoom', 1.2))
            if not cut_spec.get('debug', False): view.video()
            view.command('animate goto last')
            # render in higher detail
            view.command(
                'display resize %d %d' %
                tuple(cut_spec.pop('resolution_snapshot', (4000, 4000))))
            view['snapshot_filename'] = 'snap.%s.%s' % (cut_name, sn)
            view.do('snapshot')
            #---no repeats
            #---! currently the video suffix is hard-coded
            vid_fn = os.path.join('vid.%s.%s.%s' % (cut_name, sn, 'mp4'))
            is_complete = os.path.isfile(os.path.join(tempdir, vid_fn))
            if not is_complete and not cut_spec.get('debug', False):
                view.show(quit=True)
                view.render(name='vid.%s.%s' % (cut_name, sn))
            elif cut_spec.get('debug', False):
                status('debug mode runs view.show to make a snapshot')
                view.show(quit=True)
            else:
                status('video is already complete at %s' %
                       os.path.join(tempdir, vid_fn))
            if not cut_spec.get('debug', False):
                vid_fn_mp4 = os.path.join(tempdir, vid_fn)
                #---intervene to convert this to webm
                vid_fn = re.sub('\.mp4$', '.webm', vid_fn_mp4)
                if not is_complete:
                    bash('ffmpeg -y -i %s -b:v 0 -crf 20 %s' %
                         (vid_fn_mp4, vid_fn))
                #---save the video file and cut details
                #---! should you resave more cut information?
                vids[sn][cut_name] = vid_fn
    #---files go back out to the interactive python
    with open(catalog_fn, 'w') as fp:
        json.dump(vids, fp)
Exemplo n.º 17
0
def plot():
    """Plot from the command line. See the film_cuts below."""
    import json
    from config import bash
    try:
        import vmdmake
    except:
        #---clone vmdmake codes if they are absent
        vmdmake_spot = os.path.join('vmdmake')
        if os.path.isdir(vmdmake_spot):
            raise Exception('could not import vmdmake but %s exists' %
                            vmdmake_spot)
        else:
            bash('git clone http://github.com/bradleyrp/amx-vmd vmdmake')
    #---clone the martini repo for the bonder code
    if not os.path.isdir('amx-martini'):
        bash('git clone http://github.com/bradleyrp/amx-martini')
    #---use a calculation to get the trajectory files, set by the martini_video_interactive entry in plots
    if 'data' not in globals(): data, calc = plotload(plotname)

    #---block: video requests
    drop_dn = 'vmdmake_videos'
    do_smooth = True
    lipid_material = ['goodsell', 'glass1', 'edgyglass', 'diffuse'][-1]
    film_cuts = {
        'bilayer.side': {
            'debug':
            False,
            'zoom':
            1.8,
            'does': ['bonder'],
            'nframes_max':
            300,
            'kwargs': {
                'cgbondspath':
                os.path.join(os.getcwd(), 'amx-martini/bin/cg_bonds.tcl')
            },
            'selections': [{
                'lipids_r%d' % rnum: 'noh and resname %s' % resname,
                'style': 'Licorice 0.3 12.0 12.0',
                'smooth': do_smooth,
                lipid_material: True,
                'color_specific': {
                    'eval':
                    'colorize(work.meta[sn],resname=\"' + resname +
                    '\",named=True,overrides={"CHL1":"white"})'
                }
            } for rnum, resname in enumerate(
                work.vars['selectors']['resnames_lipid'] + ['CHL1'])] + [
                    dict(subject='protein and noh',
                         style='Licorice 0.6 12.0 12.0',
                         smooth=do_smooth,
                         goodsell=True),
                    dict(subject_cartoon='protein and noh',
                         style='cartoon',
                         diffuse=True,
                         smooth=do_smooth,
                         goodsell=True,
                         color_specific='black'),
                ]
        },
    }

    #---block: make videos
    #---store the snapshots in the post_plot_spot according to the tag
    tempdir = os.path.join(work.paths['post_plot_spot'], drop_dn)
    if not os.path.isdir(tempdir): os.mkdir(tempdir)
    status(
        'snapshots are dropping to %s (delete them if you want to re-make them)'
        % tempdir,
        tag='note')
    sns = work.sns()
    #---! Jupyter has a bug currently forbidding Popen so we have to do this all day
    #---! mod from interactive
    with open('video_requests.json', 'w') as fp:
        json.dump(film_cuts, fp)
    render_from_json()
__doc__ = """
Automatic videomaker adapted for protein demo.
This script and its companion were adapted from the protein version.
"""

#---block: imports
if not is_live or not os.path.basename(os.getcwd())=='calcs':
	raise Exception('you must run this from jupyter in the factory')
import json
from config import bash
try: import vmdmake
except: 
	#---clone vmdmake codes if they are absent
	vmdmake_spot = os.path.join('vmdmake')
	if os.path.isdir(vmdmake_spot): raise Exception('could not import vmdmake but %s exists'%vmdmake_spot)
	else: bash('git clone http://github.com/bradleyrp/amx-vmd vmdmake')
#---clone the martini repo for the bonder code
if not os.path.isdir('amx-martini'): 
    bash('git clone http://github.com/bradleyrp/amx-martini')
#---use a calculation to get the trajectory files, set by the martini_video_interactive entry in plots
if 'data' not in globals(): data,calc = plotload(plotname)

#---block: video requests for MARTINI bilayer simulations
drop_dn = 'vmdmake_videos'
do_smooth = True
film_cuts = {
	'bilayer.side':{'does':['bonder'],'kwargs':{'cgbondspath':
		os.path.join(os.getcwd(),'amx-martini/bin/cg_bonds.tcl')},
		'selections':[dict(
			lipids=' or '.join(['resname %s'%i for i in work.vars['selectors']['resnames_lipid']]),
			style='Licorice 1.000000 12.000000 12.000000',smooth=do_smooth,goodsell=True)]},}
Exemplo n.º 19
0
def prepare_server():
	"""
	Confirm that we are ready to serve.
	"""
	#---mod_wsgi is not available for conda on python 2
	bash('LDLIBS=-lm pip install -U --no-cache-dir mod_wsgi')
Exemplo n.º 20
0
def connect_single(connection_name,**specs):
	"""
	The big kahuna. Revamped recently.
	"""
	config = read_config()
	#---skip a connection if enabled is false
	if not specs.get('enable',True): return
	mkdir_or_report('data')
	mkdir_or_report('site')
	#---the site is equivalent to a django project
	#---the site draws on either prepackaged apps in the pack folder or the in-development versions in dev
	#---since the site has no additional data except that specified in connect.yaml, we can always remake it
	if os.path.isdir('site/'+connection_name):
		print("[STATUS] removing the site for \"%s\" to remake it"%connection_name)
		shutil.rmtree('site/'+connection_name)
	#---regex PROJECT_NAME to the connection names in the paths sub-dictionary	
	#---note that "PROJECT_NAME" is therefore protected and always refers to the 
	#---...top-level key in connect.yaml
	#---! note that you cannot use PROJECT_NAME in spots currently
	for key,val in specs.items():
		if type(val)==str: specs[key] = re.sub('PROJECT_NAME',connection_name,val)
		elif type(val)==list:
			for ii,i in enumerate(val): val[ii] = re.sub('PROJECT_NAME',connection_name,i)
	#---paths defaults
	specs['plot_spot'] = specs.get('plot_spot',os.path.join('data',connection_name,'plot')) 
	specs['post_spot'] = specs.get('post_spot',os.path.join('data',connection_name,'post')) 
	specs['simulations_spot'] = specs.get('simulations_spot',os.path.join('data',connection_name,'sims'))
	specs['coords_spot'] = specs.get('coords_spot',os.path.join('data',connection_name,'coords'))
	#---intervene here to replace PROJECT_NAME in the string values of each spot
	for spotname,spot_details in specs.get('spots',{}).items():
		for key,val in spot_details.items():
			if type(val) in str_types:
				specs['spots'][spotname][key] = re.sub('PROJECT_NAME',connection_name,val)
			#---we also expand paths for route_to_data
			specs['spots'][spotname]['route_to_data'] = os.path.expanduser(os.path.abspath(
				specs['spots'][spotname]['route_to_data']))

	#---cluster namer is set in a separate file
	cluster_namer = {}
	with open('mill/cluster_spec.py') as fp: exec(fp.read(),cluster_namer) 
	for key in [i for i in cluster_namer if i not in cluster_namer['keepsakes']]: del cluster_namer[key]

	###---DJANGO SETTINGS

	#---first define folders and (possibly) http git repos
	settings_custom = {
		'SIMSPOT':abspath(specs['simulations_spot']),
		#---! hard-coded. get it from config.py??
		'AUTOMACS':'http://github.com/biophyscode/automacs',
		'PLOT':abspath(specs['plot_spot']),
		'POST':abspath(specs['post_spot']),
		'COORDS':abspath(specs['coords_spot']),
		#---omnicalc locations are fixed
		'CALC':abspath(os.path.join('calc',connection_name)),
		'FACTORY':os.getcwd(),
		#---! get this from config.py
		'CLUSTER':'cluster'}
	#---all paths are absolute unless they have a colon in them, in which case it is ssh or http
	#---we attach filesystem separators as well so that e.g. settings.SPOT can be added to relative paths
	settings_custom = dict([(key,os.path.join(os.path.abspath(val),'') if ':' not in val else val)
		for key,val in settings_custom.items()])
	settings_custom['CLUSTER_NAMER'] = cluster_namer
	#---if the user does not supply a gromacs_config.py the default happens
	#---option to specify gromacs config file for automacs
	if 'gromacs_config' in specs: 
		gromacs_config_fn = specs['gromacs_config']
		if not os.path.isfile(gromacs_config_fn):
			raise Exception('cannot find gromacs_config file at %s'%gromacs_config_fn)
		settings_custom['GROMACS_CONFIG'] = os.path.join(os.getcwd(),gromacs_config_fn)
	else: settings_custom['GROMACS_CONFIG'] = False
	#---additional custom settings which are not paths
	#---if there is a public dictionary and we receive the "public" flag from make we serve public site
	if specs.get('public',None):
		site_port = specs['public'].get('port',8000)
		#---the notebook IP for django must be the public hostname, however in the get_public_ports function
		#---...we have an internal notebook_hostname for users who have a router
		if 'hostname' not in specs['public']:
			raise Exception('for public deployment you must add the hostname to the connection')
		#---the hostnames are a list passed to ALLOWED_HOSTS starting with localhost
		if type(specs['public']['hostname']) in str_types: hostnames = [specs['public']['hostname']]
		elif type(specs['public']['hostname'])==list: hostnames = specs['public']['hostname']
		else: raise Exception('cannot parse hostname')
		hostnames.append('localhost')
		settings_custom['extra_allowed_hosts'] = list(set(hostnames))
		#---the first hostname is the primary one
		settings_custom['NOTEBOOK_IP'] = hostnames[0]
		settings_custom['NOTEBOOK_PORT'] = specs['public'].get('notebook_port',site_port+1)
	#---serve locally
	else:
		#---note that notebook ports are always one higher than the site port
		site_port = specs.get('port',8000)
		settings_custom['NOTEBOOK_IP'] = 'localhost'
		settings_custom['NOTEBOOK_PORT'] = specs.get('port_notebook',site_port+1)
		settings_custom['extra_allowed_hosts'] = []
	#---name this project
	settings_custom['NAME'] = connection_name

	###---END DJANGO SETTINGS

	#---make local directories if they are absent or do nothing if the user points to existing data
	root_data_dir = 'data/'+connection_name
	#---always make data/PROJECT_NAME for the default simulation_spot therein
	mkdir_or_report(root_data_dir)
	for key in ['post_spot','plot_spot','simulations_spot']: 
		mkdir_or_report(abspath(specs[key]))
	#---we always include a "sources" folder in the new simulation spot for storing input files
	mkdir_or_report(abspath(specs.get('coords_spot',os.path.join('data',connection_name,'coords'))))

	#---check if database exists and if so, don't make superuser
	make_superuser = not os.path.isfile(specs['database'])

	#---get automacs,omnicalc from a central place if it is empty
	automacs_upstream = specs.get('automacs',config.get('automacs',None))
	msg = 'You can tell the factory where to get omnicalc/automacs by running e.g. '+\
		'`make set automacs=http://github.com/someone/automacs`.' 
	if not automacs_upstream: 
		raise Exception('need automacs in config.py for factory or the connection. '+msg)
	#---! automacs_upstream is not being used?
	settings_custom['AUTOMACS'] = automacs_upstream
	automacs_branch = config.get('automacs_branch',None)
	if automacs_branch != None: settings_custom['AUTOMACS_BRANCH'] = automacs_branch
	omnicalc_upstream = specs.get('omnicalc',config.get('omnicalc',None))
	if not omnicalc_upstream: 
		raise Exception('need omnicalc in config.py for factory or the connection. '+msg)

	#---note that previous version of factory prepended a source command in front of every call
	#---...however the factory handles this for us now
	#---django is accessed via packages imported in settings.py which is why we have to package them
	#---...this saves us from making N copies of the development code

	#---! YOU NEED TO MAKE THE DEVELOPMENT POSSIBLE SOMEHWERE HEREABOUTS

	#---! hard-coding the location of the sources
	django_source = 'interface'
	#---! switching to new development codes...calculator not available yet
	for app in ['simulator','calculator']: 
		if os.path.isdir('pack/%s'%app): shutil.rmtree('pack/%s'%app)
		#---always repackage!
		package_django_module(source=django_source,projname=app)
	
	#---one new django project per connection
	bash('django-admin startproject %s'%connection_name,
		log='logs/log-%s-startproject'%connection_name,cwd='site/')

	#---if the user specifies a database location we override it here
	if specs.get('database',None):
		database_path_change = "\nDATABASES['default']['NAME'] = '%s'"%(
			os.path.abspath(specs['database']))
	else: database_path_change = ''

	#---all settings are handled by appending to the django-generated default
	#---we also add changes to django-default paths
	with open(os.path.join('site',connection_name,connection_name,'settings.py'),'a') as fp:
		fp.write(project_settings_addendum+database_path_change)
		#---only use the development code if the flag is set and we are not running public
		if specs.get('development',True) and not specs.get('public',False):
			fp.write('\n#---use the development copy of the code\n'+
				'import sys;sys.path.insert(0,os.path.join(os.getcwd(),"%s"))'%django_source) 
		#---one more thing: custom settings specify static paths for local or public serve
		#if specs.get('public',None):
		#	fp.write("\nSTATICFILES_DIRS = [os.path.join(BASE_DIR,'static')]")
		#else:
		fp.write("\nSTATICFILES_DIRS = [os.path.join('%s','interface','static')]"%
			os.path.abspath(os.getcwd()))

	#---write custom settings
	#---some settings are literals
	custom_literals = ['CLUSTER_NAMER']
	with open(os.path.join('site',connection_name,connection_name,'custom_settings.py'),'w') as fp:
		#---! proper way to write python constants?
		fp.write('#---custom settings are auto-generated from mill.factory.connect_single\n')
		for key,val in settings_custom.items():
			#---! is there a pythonic way to write a dictionary to a script of immutables
			if ((type(val) in str_types and re.match('^(False|True)$',val)) or key in custom_literals
				or type(val) in [bool,list,tuple]):
				out = '%s = %s\n'%(key,val)
			else: out = '%s = "%s"\n'%(key,val)
			fp.write(out)

	#---development uses live copy of static files in interface/static
	if not specs.get('public',None):
		#---link the static files to the development codes (could use copytree)
		os.symlink(os.path.join(os.getcwd(),django_source,'static'),
			os.path.join('site',connection_name,'static'))
	#---production collects all static files
	else: 
		os.mkdir(os.path.join(os.getcwd(),'site',connection_name,'static_root'))
		bash('python manage.py collectstatic',cwd='site/%s'%connection_name)

	#---write project-level URLs
	with open(os.path.join('site',connection_name,connection_name,'urls.py'),'w') as fp:
		fp.write(project_urls)

	#---clone omnicalc if necessary
	omnicalc_previous = os.path.isdir('calc/%s'%connection_name)
	if not omnicalc_previous:
		bash('git clone %s calc/%s'%(omnicalc_upstream,connection_name),
			 log='logs/log-%s-git-omni'%connection_name)
		#---if this is fresh we run `make setup` because that provides a minimal config.py
		bash('make setup',cwd=specs['calc'])
	else: print('[NOTE] found calc/%s'%connection_name)

	#---initial migration for all new projects to start the database
	#---...!!!!!!!!!!!!!!
	print('[NOTE] migrating ...')
	bash('python site/%s/manage.py makemigrations'%connection_name,
		log='logs/log-%s-migrate'%connection_name)
	bash('python site/%s/manage.py migrate --run-syncdb'%connection_name,
		log='logs/log-%s-migrate'%connection_name)
	print('[NOTE] migrating ... done')
	if make_superuser:
		print("[STATUS] making superuser")
		su_script = "from django.contrib.auth.models import User; "+\
			"User.objects.create_superuser('admin','','admin');print;quit();"
		p = subprocess.Popen('python ./site/%s/manage.py shell'%(connection_name),		
			stdin=subprocess.PIPE,stderr=subprocess.PIPE,stdout=open(os.devnull,'w'),
			shell=True,executable='/bin/bash')
		catch = p.communicate(input=su_script if sys.version_info<(3,0) else su_script.encode())[0]
	print("[STATUS] new project \"%s\" is stored at ./data/%s"%(connection_name,connection_name))
	print("[STATUS] replace with a symlink if you wish to store the data elsewhere")

	#---now that site is ready we can write credentials
	if specs.get('public',None):
		#---write key,value pairs as Basic Auth user/passwords
		creds = specs['public'].get('credentials',{})
		if creds: 
			with open(os.path.join('site',connection_name,connection_name,'wsgi_auth.py'),'w') as fp:
				fp.write(code_check_passwd%str([(k,v) for k,v in creds.items()]))

	#---set up the calculations directory in omnicalc
	#---check if the repo pointer in the connection is a valid path
	new_calcs_repo = not (os.path.isdir(abspath(specs['repo'])) and (
		os.path.isdir(abspath(specs['repo'])+'/.git') or os.path.isfile(abspath(specs['repo'])+'/HEAD')))
	downstream_git_fn = os.path.join('calc',connection_name,'calcs','.git')
	#---if the repo key gives a web address and we already cloned it, then we do nothing and suggest a pull
	if ':' in specs['repo'] and os.path.isdir(downstream_git_fn):
		print('[NOTE] the calc repo (%s) appears to be remote and %s exists.'%(
			specs['calc'],downstream_git_fn)+'you should pull the code manually to update it')
	#---check that a calcs repo from the internet exists
	elif new_calcs_repo and re.match('^http',specs['repo']):
		#---see if the repo is a URL. code 200 means it exists
		if sys.version_info<(3,0): from urllib2 import urlopen
		else: from urllib.request import urlopen
		code = urlopen(specs['repo']).code
		if code!=200: raise Exception('repo appears to be http but it does not exist')
		else: bash('make clone_calcs source="%s"'%specs['repo'],cwd=specs['calc'])
	#---check that the repo has a colon in the path, implying a remote ssh connection is necessary
	elif new_calcs_repo and ':' in specs['repo']:
		print('[WARNING] assuming that the calcs repository is on a remote machine: %s'%specs['repo'])
		bash('make clone_calcs source="%s"'%specs['repo'],cwd=specs['calc'])
	#---if the calcs repo exists locally, we just clone it
	elif not new_calcs_repo and os.path.isdir(downstream_git_fn): 
		print('[NOTE] git appears to exist at %s already and connection does not specify '%
			os.path.join(abspath(specs['repo']),'.git')+
			'an upstream calcs repo so we are continuing without action')
	elif not new_calcs_repo and not os.path.isfile(downstream_git_fn): 
		bash('make clone_calcs source="%s"'%specs['repo'],cwd=specs['calc'])
	#---make a fresh calcs repo because the meta file points to nowhere
	else:
		os.mkdir(specs['repo'])
		bash('git init',cwd=specs['repo'])
		#---after making a blank repo we put a placeholder in the config
		bash('make set calculations_repo="no_upstream"',cwd=specs['calc'])
		#---also generate a blank metadata so that the interface works
		bash('make blank_meta make_template=False',cwd=specs['calc'])
		msg = ('When connecting to project %s, the "repo" flag in your connection file points to nowhere. '
			'We made a blank git repository at %s. You should develop your calculations there, push that '
			'repo somewhere safe, and distribute it to all your friends, who can use the "repo" flag to '
			'point to it when they start their factories.')
		print('\n'.join(['[NOTE] %s'%i for i in textwrap.wrap(
			msg%(connection_name,specs['repo']),width=60)]))

	#---pass a list of meta_filters through (can be list of strings which are paths or globs)
	calc_meta_filters = specs.get('calc_meta_filters',None)
	if calc_meta_filters:
		bash('make unset meta_filter',cwd=specs['calc'])
		for filt in calc_meta_filters:
			#---note that meta_filter is turned into a list in config.py in omnicalc
			bash('make set meta_filter="%s"'%filt,cwd=specs['calc'])

	#---configure omnicalc 
	#---note that make set commands can change the configuration without a problem
	bash('make set post_data_spot=%s'%settings_custom['POST'],cwd=specs['calc'])
	bash('make set post_plot_spot=%s'%settings_custom['PLOT'],cwd=specs['calc'])
	#---! needs to interpret simulation_spot, add spots functionality
	#---! previously ran register_calculation.py here -- may be worth recapping in this version?
	#---! prepare vhost file here when it's ready
	#---??? IS THIS IT ???
	#---write spots to config
	if 'spots' in specs:
		config_fn = os.path.join(specs['calc'],'config.py')
		with open(config_fn) as fp: config_omni = eval(fp.read())
		config_omni['spots'] = specs['spots']
		import pprint
		#---write the config
		with open(config_fn,'w') as fp: 
			fp.write('#!/usr/bin/env python -B\n'+str(pprint.pformat(config_omni,width=110)))
	#---add the environment to omnicalc. this allows the publicly-served omnicalc to find the environment 
	#---...when root is running it. it also means users do not have to remember to source the environment 
	#---...when they are doing calculations "manually" from their project's omnicalc folder. note that there
	#---...is a slowdown if you are used to always sourcing the environment yourself, but you could just as 
	#---...easily remove the flag from the config.py to recover the original behavior
	if 'activate_env' in config:
		env_path = "%s %s"%(os.path.join(os.path.abspath(config['activate_env'].split()[0])),
			config['activate_env'].split()[1])
		bash('make set activate_env="%s"'%env_path,cwd=specs['calc'])
Exemplo n.º 21
0
        mvecs[:, fr],
        nprots_list,
        handle=handle,
        outdir=tmpdir,
        fi=fi,
        fn=handle + '.fr.%04d' % fi,
        pbc_expand=1.0,
        smooth=1.,
        panelspecs=panelspecs,
        square=square_layout,
        extrema=extrema,
        fs=fs,
        titles=titles,
        cmap_mpl_name=cmap_mpl_name,
    ) for fi, fr in enumerate(frameset))

# print the film. you could add codec options here. the filter slows down the movie.
ffmpegcmd =  ['ffmpeg','-i',tmpdir+'/'+handle+'.fr.%04d.png','-b:v','0','-crf','20']+\
 (['-filter:v','setpts=2.0*PTS'] if False else [])+[' '+outdir+'/'+handle+'.mp4']
print(' '.join(ffmpegcmd))
# ffmpeg is very particular
status('calling ffmpeg via: %s' % ' '.join(ffmpegcmd), tag='bash')
#! subprocess dislikes the list of arguments here for some reason
try:
    bash(' '.join(ffmpegcmd), cwd=tmpdir)
except Exception as e:
    raise Exception(
        'failed with exception %s.' % e +
        '\nyou may need to adjust the ffmpeg call and render manually.')
status('[STATUS] video rendered to ' + outdir + '/' + handle + '.mpeg')
Exemplo n.º 22
0
                                   fs=fs,
                                   titles=titles,
                                   cmap_mpl_name=cmap_mpl_name)
    sys.exit(1)
joblib.Parallel(n_jobs=ppn, verbose=10)(joblib.delayed(
    print_birdseye_snapshot_render, joblib.pool.has_shareable_memory)(
        surfs[:, fr],
        protein_pts[fr],
        mvecs[:, fr],
        nprots_list,
        handle=handle,
        outdir=tmpdir,
        fi=fi,
        fn=handle + '.fr.%04d' % fi,
        pbc_expand=1.0,
        smooth=1.,
        panelspecs=panelspecs,
        extrema=extrema,
        fs=fs,
        titles=titles,
        cmap_mpl_name=cmap_mpl_name,
    ) for fi, fr in enumerate(frameset))

#---print the film
ffmpegcmd =  ['ffmpeg','-i',tmpdir+'/'+handle+'.fr.%04d.png','-vcodec','mpeg2video','-q:v','0']+\
 (['-filter:v','setpts=2.0*PTS'] if False else [])+[' '+outdir+'/'+handle+'.mpeg']
print(' '.join(ffmpegcmd))
#---! ffmpeg call is currently broken
bash(ffmpegcmd, cwd=work.paths['post_plot_spot'])
status('[STATUS] video rendered to ' + outdir + '/' + handle + '.mpeg')
                nmol = len(m2i)
                #---note that depending on the PBC links we get a variable number of
                points_inside = np.array([
                    lipid_mesh['%d.%d.points' % (top_mono, fr)][:nmol]
                    for fr in range(nframes)
                ])
                windows = np.array([
                    np.arange(j, j + smooth_window)
                    for j in np.arange(0, nframes - smooth_window)
                ])
                points_inside_smooth = np.array(
                    [points_inside[w].mean(axis=0) for w in windows])
            #---render in parallel
            basic_compute_loop(
                compute_function=render_hydrogen_bonding_pattern,
                looper=[
                    dict(fr=fr, frameno=frameno)
                    for frameno, fr in enumerate(valid_frames)
                ],
                run_parallel=True,
                debug=False)
            #---render when complete
            try:
                # https://superuser.com/questions/1005315/interpolation-with-ffmpeg
                cmd = 'ffmpeg -i "snap.%05d.v1.png" ' + 'mov.hydrogen_bonding_pattern.%s' % sn + '.mp4'
                bash(cmd, cwd=out_dn)
            except:
                status('failed to render the video. try "%s" in %s' %
                       (cmd, out_dn))
        del lipid_mesh