Ejemplo n.º 1
0
def make_random_fast():
    ''' Link /dev/random to /dev/urandom.

        See
            Myths about /dev/urandom
                http://www.2uo.de/myths-about-urandom/
            Hacker news discussion where cperciva does not disagree with "Myths about /dev/urandom"
                https://news.ycombinator.com/item?id=10149019

        The risk of using urandom is that it can be deterministic.
        If you know the seed, you may know all of urandom's output forever.

        That is why we prefer to add entropy at runtime, using e.g. haveged.
        But does haveged affect urandom? The urandom seed may be set just
        once and saved, or set at boot before haveged runs.
    '''
    
    # delete /dev/random
    if os.path.exists('/dev/random'):
        if os.path.isfile('/dev/random') or os.path.islink('/dev/random'):
            os.remove('/dev/random')
        else:
            try:
                sh.umount('/dev/random')
            except:
                sh.rm('--force', '--recursive', '/dev/random')

    # "rngd -r /dev/urandom" should only be used during testing, if ever
    # if not is_program_running('rngd'):
    #     sh.rngd('-r', '/dev/urandom')

    sh.ln('--symbolic', '/dev/urandom', '/dev/random')
Ejemplo n.º 2
0
    def tree_construction(self,root = None, sccs = False):
        threads = 16 
        print "build a tree"
        if os.path.exists(self.base + "RAxML/" ):
            sh.rm("-r", self.base + "RAxML/")
        os.makedirs(self.base + "RAxML/")

        if self.seq_type == "proteins" :
            model = "PROTGAMMALG"
        else:
            model = "GTRGAMMA"

        alignment = self.base + "_scc_cat_align.fasta" if sccs else self.base + "_cat_align.fasta"
        
        sh.raxmlHPC_PTHREADS_AVX("-w", self.base + "RAxML/", "-T", threads-2, "-m", model, "-p", self.seed, "-#", 20, "-s", alignment, "-n", "T13", "-o", root) 
        print "boostrap dat tree"
        sh.raxmlHPC_PTHREADS_AVX("-w", self.base + "RAxML/", "-T", threads-2, "-m", model, "-p", self.seed, "-b", self.seed, "-#", 100, "-s", alignment, "-n", "T14", "-o", root)
        print "combine"
        sh.raxmlHPC_AVX("-m", "GTRCAT", "-w", self.base + "RAxML/", "-p", self.seed, "-f", "b", "-t", self.base + "RAxML/"+"RAxML_bestTree.T13", "-z",self.base + "RAxML/"+ "RAxML_bootstrap.T14", "-n", "T15", "-o", root)
        print "clean up"
        if os.path.exists(self.base + "_branches_labeled.tree"):
            os.remove(self.base + "_branches_labeled.tree")
            os.remove(self.base + "_nodes_labeled.tree")
        sh.ln("-s",  self.base + "RAxML/RAxML_bipartitionsBranchLabels.T15", self.base +"_branches_labeled.tree")
        sh.ln("-s",  self.base + "RAxML/RAxML_bipartitions.T15", self.scg_tree)
    def run_test_suite(self):
        print 'Working in ' + self.temp_dir
        print suite_started(self.suite)
        self.clean_failure_files()

        results = self.run(run_individual_test, self.test_files, self.temp_dir,
                           self.do_pdf, self.cache)
        failures = [
            file_name for (has_failed, file_name) in results if has_failed
        ]

        if not failures:
            self.clean_temp_files()
            print green("All tests passed.")
        else:
            if IS_TEAMCITY:
                print "Creating symbolic link to artefact working directory."
                sh.ln('-sf', self.temp_dir, 'working')
            print "%d test(s) failed." % len(failures)
            print "To run the failed tests again:"
            cmdline = "%s%s%s" % (sys.argv[0], (self.do_pdf and ' --pdf '
                                                or ' '), shell_join(failures))
            print cmdline
            print >> open('/tmp/rerun-failed-typesetr-test', 'w'), cmdline

        print suite_finished(self.suite)
        return len(failures)
    def run_test_suite(self):
        print 'Working in ' + self.temp_dir
        print suite_started(self.suite)
        self.clean_failure_files()

        results = self.run(run_individual_test, self.test_files,
                           self.temp_dir, self.do_pdf, self.cache)
        failures = [file_name for (has_failed, file_name) in results
                    if has_failed]

        if not failures:
            self.clean_temp_files()
            print green("All tests passed.")
        else:
            if IS_TEAMCITY:
                print "Creating symbolic link to artefact working directory."
                sh.ln('-sf', self.temp_dir, 'working')
            print "%d test(s) failed." % len(failures)
            print "To run the failed tests again:"
            cmdline = "%s%s%s" % (sys.argv[0],
                                  (self.do_pdf and ' --pdf ' or ' '),
                                  shell_join(failures))
            print cmdline
            print >> open('/tmp/rerun-failed-typesetr-test', 'w'), cmdline

        print suite_finished(self.suite)
        return len(failures)
Ejemplo n.º 5
0
 def import_map(self, path, mode='symlink'):
     """
     will populate the maps dir with a SAM or BAM file.
     TODO: other modes might be: copy, move
     """
     mkdir('-p', "%s/maps" % self.sample.path)
     if mode == 'symlink':
         ln('-s', path, self.sam_path)
Ejemplo n.º 6
0
 def build_arch(self, arch):
     env = self.get_recipe_env(arch)
     with current_directory(self.get_build_dir(arch.arch)):
         # Remove source in this pypi package
         sh.rm('-rf', 'leveldb', 'leveldb.egg-info', 'snappy')
         # Use source from leveldb recipe
         sh.ln('-s', self.get_recipe('leveldb', self.ctx).get_build_dir(arch.arch), 'leveldb')
     # Build and install python bindings
     super(PyLevelDBRecipe, self).build_arch(arch)
Ejemplo n.º 7
0
def secure_link(from_path, to_path):
    """
    secure link to prevent the case if the to_path exists.
    """
    try:
        sh.unlink(to_path)
    except sh.ErrorReturnCode_1:
        pass
    sh.ln("-s", from_path, to_path)
Ejemplo n.º 8
0
	def install_tool(self):
		results = sh.unzip("-o", DOWNLOAD, "-d", TOOL_HOME)
		parts = results.split('\n')
		for part in parts:
			if part.find("inflating") > -1:
				path = self._return_path_bit(part.strip().split(" ")[1])
				break
		sh.rm("-f", AMI_HOME)
		sh.ln("-s", TOOL_HOME + "/" + path, AMI_HOME)
Ejemplo n.º 9
0
def create_emu_env(run_dir, bin_file):
    c.safe_mkdir(run_dir)
    template_files = os.listdir(template_dir)
    for f in template_files:
        if os.path.isfile(pjoin(template_dir, f)):
            sh.ln('-sf', pjoin(template_dir, f), run_dir)
    text_file = pjoin(run_dir, 'bin.txt')
    if not c.is_older(bin_file, text_file):
        c.hex_dump(bin_file, text_file)
Ejemplo n.º 10
0
 def _darwin_create_dir_structure(self):
     mkdirp = mkdir.bake("-p")
     app_path = os.path.join(self._basedir, "Bitmask.app")
     mkdirp(app_path)
     mkdirp(os.path.join(app_path, "Contents", "MacOS"))
     mkdirp(os.path.join(app_path, "Contents", "Resources"))
     mkdirp(os.path.join(app_path, "Contents", "PlugIns"))
     mkdirp(os.path.join(app_path, "Contents", "StartupItems"))
     ln("-s", "/Applications", os.path.join(self._basedir, "Applications"))
Ejemplo n.º 11
0
	def install_tool(self):
		sh.rm("-rf", S3CMD_HOME)
		sh.rm("-f", TOOL_HOME + "/bin/s3cmd")
		sh.mkdir("-p", S3CMD_HOME)
		sh.virtualenv(S3CMD_HOME)
		sh.mkdir("-p", TOOL_HOME + "/bin")
		pip = sh.Command(S3CMD_HOME + "/bin/pip")
		pip("install", "s3cmd")
		sh.ln("-s", S3CMD_HOME + "/bin/s3cmd", TOOL_HOME + "/bin/s3cmd")
Ejemplo n.º 12
0
 def build_arch(self, arch):
     env = self.get_recipe_env(arch)
     with current_directory(self.get_build_dir(arch.arch)):
         # Remove source in this pypi package
         sh.rm('-rf', 'leveldb', 'leveldb.egg-info', 'snappy')
         # Use source from leveldb recipe
         sh.ln('-s', self.get_recipe('leveldb', self.ctx).get_build_dir(arch.arch), 'leveldb')
     # Build and install python bindings
     super(PyLevelDBRecipe, self).build_arch(arch)
Ejemplo n.º 13
0
Archivo: aws.py Proyecto: Knewton/k.aws
	def install_tool(self):
		sh.rm("-rf", AWS_CLI_HOME)
		sh.rm("-f", TOOL_HOME + "/bin/aws")
		sh.mkdir("-p", AWS_CLI_HOME)
		sh.virtualenv(AWS_CLI_HOME)
		sh.mkdir("-p", TOOL_HOME + "/bin")
		pip = sh.Command(AWS_CLI_HOME + "/bin/pip")
		pip("install", "awscli")
		sh.ln("-s", AWS_CLI_HOME + "/bin/aws", TOOL_HOME + "/bin/aws")
Ejemplo n.º 14
0
def ln_all_files(all_files, base_directory):
    for each_file in all_files:
        gcmtid = basename(each_file).split(".")[0]
        to_path = join(base_directory, gcmtid, "SEM", "adjoint.h5")
        # in the second iteration, the already existing adjoint.h5 will prevent linking
        try:
            sh.unlink(to_path)
        except sh.ErrorReturnCode_1:
            pass
        sh.ln("-s", each_file, to_path)
Ejemplo n.º 15
0
def build(cwd, site_dir):

    cfg = config.load_config()

    # sanity check - the version dirs exist as named
    for version in cfg['extra']['versions']:
        if not 'separate' in version or not version['separate']:
            d = os.path.join('versions', version['dir'])
            print('Verifying dir %s' % (d))
            if not os.path.isdir(d):
                print("The directory %s does not exist" % (d))
                return

    # sanity check - dependent_repos exist in '..'
    for repo in dependent_repos:
        d = os.path.join(cwd, '..', repo)
        print('Verifying repo dependency in %s' % (d))
        if not os.path.isdir(d):
            print("The directory %s does not exist" % (d))
            return

    # sanity check - only one latest
    latest = False
    for version in cfg['extra']['versions']:
        if not latest and 'latest' in version and version['latest']:
            print('Latest is %s' % (version['dir']))
            latest = True
        elif latest and 'latest' in version and version['latest']:
            print('ERROR: More than one version is latest.')
            print('Only one version can be latest: True.')
            print('Check mkdocs.yml.')
            return

    print("Building site pages")
    sh.rm('-rf', site_dir)
    sh.mkdocs('build', '--clean', '--site-dir', site_dir)

    for version in cfg['extra']['versions']:
        print("Building doc pages for: %s" % (version['dir']))
        if not 'separate' in version or not version['separate']:
            sh.mkdocs('build',
                      '--site-dir',
                      os.path.join(site_dir, version['dir']),
                      _cwd=os.path.join("versions", version['dir']))
        else:
            repo_dir = os.path.join(cwd, '..', 'mynewt-documentation')
            if version['dir'] != 'master':
                repo_dir = os.path.join(repo_dir, 'versions', version['dir'],
                                        'mynewt-documentation')
            sh.make('clean', _cwd=repo_dir)
            sh.make('docs', _cwd=repo_dir)
            sh.mv(os.path.join(repo_dir, '_build', 'html'),
                  os.path.join(site_dir, version['dir']))
        if 'latest' in version and version['latest']:
            sh.ln('-s', version['dir'], 'latest', _cwd=site_dir)
Ejemplo n.º 16
0
def fetch_all_external_docs_from_file(filename):
    with open(filename) as f:
        external_docs = [parse_external_doc_line(l) for l in f]
    for name, repository, doc_directory in external_docs:
        tmpdir = osp.join('/tmp', name)
        print('Fetching %s...' % name)
        fetch_external_doc(repository, tmpdir)
        src_dir = osp.join('src', name)
        sh.rm('-f', src_dir)
        print('Linking %s...' % name)
        sh.ln('-s', osp.join(tmpdir, doc_directory), src_dir)
Ejemplo n.º 17
0
 def build_arch(self, arch):
     env = self.get_recipe_env(arch)
     with current_directory(self.get_build_dir(arch.arch)):
         # Remove source in this pypi package
         sh.rm("-rf", "./leveldb", "./leveldb.egg-info", "./snappy")
         # Use source from leveldb recipe
         sh.ln("-s", self.get_recipe("leveldb", self.ctx).get_build_dir(arch.arch), "leveldb")
         # Build python bindings
         hostpython = sh.Command(self.hostpython_location)
         shprint(hostpython, "setup.py", "build", _env=env)
     # Install python bindings
     super(PyLevelDBRecipe, self).build_arch(arch)
Ejemplo n.º 18
0
Archivo: repo.py Proyecto: thesps/ipbb
def symlink(env, path):

    lRepoName = basename(path)
    lRepoLocalPath = join(env.work.path, kSourceDir, lRepoName)

    if exists(lRepoLocalPath):
        raise click.ClickException('Repository already exists \'%s\'' %
                                   lRepoLocalPath)

    echo('Adding symlink ' + style(path, fg='blue') + ' as ' +
         style(lRepoName, fg='blue'))

    sh.ln('-s', abspath(path), _cwd=lRepoLocalPath)
Ejemplo n.º 19
0
def test_clone_replace_symlink_by_file(git_dir, hg_repo):
    sh.cd(hg_repo)
    sh.ln("-s", "test_file", "link_or_file")
    sh.hg.add("link_or_file")
    sh.hg.commit(message="b")
    sh.hg.rm("link_or_file")
    make_hg_commit("c", filename="link_or_file")

    git_repo = clone_repo(git_dir, hg_repo)
    sh.cd(git_repo)

    assert p('link_or_file').isfile()
    assert not p('link_or_file').islink()
Ejemplo n.º 20
0
    def start(self):
        sh.rm('-rf', wpath('bin/tp'), **SHARG)
        sh.ln('-s', wpath('bin/command.py'), wpath('bin/tp'), **SHARG)
        rows, columns = os.popen('stty size', 'r').read().split()
        terminal_width = int(columns)
        for module in [
                'zsh', 'vim', 'git', 'ssh', 'tmux', 'python', 'clang',
                'dircolors'
        ]:
            print('{0:-^{1}}'.format(' {} environment '.format(module).upper(),
                                     terminal_width))
            getattr(self, '_{}_environment'.format(module))()

        sh.zsh(hpath('.zshrc'), **SHARG)
Ejemplo n.º 21
0
def main(base_directory, output_directory):
    """
    ln subdirectories that have no synthetic.h5 to a new base directory.
    """
    all_dirs = sorted(glob(join(base_directory, "*")))
    all_syncs = sorted(
        glob(join(base_directory, "*", "OUTPUT_FILES", "synthetic.h5")))
    all_dirs_with_sync = ["/".join(item.split("/")[:-2]) for item in all_syncs]
    all_dirs_no_syncs = sorted(set(all_dirs) - set(all_dirs_with_sync))
    for each_dir in all_dirs_no_syncs:
        thebasename = basename(each_dir)
        from_path = each_dir
        to_path = join(output_directory, thebasename)
        sh.ln("-s", from_path, to_path)
Ejemplo n.º 22
0
def test_clone_replace_dir_by_symlink(git_dir, hg_repo):
    sh.cd(hg_repo)
    sh.mkdir("dir_or_link")
    make_hg_commit("b", filename="dir_or_link/test_file")
    sh.hg.rm("dir_or_link/test_file")
    sh.ln("-s", "test_file", "dir_or_link")
    sh.hg.add("dir_or_link")
    sh.hg.commit(message="c")

    git_repo = clone_repo(git_dir, hg_repo)
    sh.cd(git_repo)

    assert p('dir_or_link').isfile()
    assert p('dir_or_link').islink()
Ejemplo n.º 23
0
    def create_link(self, log_link, log_file):
        if log_link == log_file:
            return
        if not (log_link and log_file):
            return

        if os.path.lexists(log_link):
            try:
                sh.rm(log_link)
            except:
                return
        try:
            sh.ln('-s', log_file, log_link)
        except:
            return
Ejemplo n.º 24
0
    def create_link(self, log_link, log_file):
        if log_link == log_file:
            return
        if not (log_link and log_file):
            return

        if os.path.lexists(log_link):
            try:
                sh.rm(log_link)
            except:
                return
        try:
            sh.ln('-s', log_file, log_link)
        except:
            return
Ejemplo n.º 25
0
 def fixssh(self):
     import pwd
     import glob
     # find /tmp -path '/tmp/ssh-*' -name 'agent*' -user ${USER}
     for path in glob.glob('/tmp/ssh-*/agent*', recursive=True):
         if os.stat(path).st_uid == pwd.getpwnam(self._default_user).pw_uid:
             link_file = os.path.expanduser('~/.ssh/ssh_auth_sock')
             # rm -rf ~/.ssh/ssh_auth_sock
             sh.rm('-rf', link_file, _fg=True)
             # ln -s ${ssh_agent_file} ~/.ssh/ssh_auth_sock
             sh.ln('-s', path, link_file, _fg=True)
             # ssh-add -l
             sh.Command('ssh-add')('-l', _fg=True)
             return
     print('not found ssh agent tmp file, please login again.')
Ejemplo n.º 26
0
def fli(env, dev, ipbuspkg):
    """
    Build the Modelsim-ipbus foreign language interface
    """

    # -------------------------------------------------------------------------
    # Must be in a build area
    if env.project is None:
        raise click.ClickException(
            'Project area not defined. Move into a project area and try again')

    if env.projectConfig['toolset'] != 'sim':
        raise click.ClickException(
            "Work area toolset mismatch. Expected 'sim', found '%s'" %
            env.projectConfig['toolset'])
    # -------------------------------------------------------------------------

    # -------------------------------------------------------------------------
    if not which('vsim'):
        raise click.ClickException(
            "ModelSim is not available. Have you sourced the environment script?"
        )
    # -------------------------------------------------------------------------

    # -------------------------------------------------------------------------
    if ipbuspkg not in env.getSources():
        raise click.ClickException(
            "Package %s not found in source/. The FLI cannot be built." %
            ipbuspkg)
    # -------------------------------------------------------------------------

    # Set ModelSim root based on vsim's path
    os.environ['MODELSIM_ROOT'] = (dirname(dirname(which('vsim'))))
    # Apply set
    # os.environ['MTI_VCO_MODE']='64'

    lFliSrc = join(env.src, ipbuspkg, 'components', 'ipbus_eth', 'firmware',
                   'sim', 'modelsim_fli')

    import sh
    # Clean-up
    sh.rm('-rf', 'modelsim_fli', 'mac_fli.so', _out=sys.stdout)
    # Copy
    sh.cp('-a', lFliSrc, './', _out=sys.stdout)
    # Make
    sh.make('-C', 'modelsim_fli', 'TAP_DEV={0}'.format(dev), _out=sys.stdout)
    # Link
    sh.ln('-s', 'modelsim_fli/mac_fli.so', '.', _out=sys.stdout)
Ejemplo n.º 27
0
def grade(uniqname, link):
    print("Grading {}".format(uniqname))
    with pushd("373-f15-linked-list"):
        wget(link, "-O", "{}.c".format(uniqname))
        rm("-f", "list.c", "list.o", "list")
        ln("-s", "{}.c".format(uniqname), "list.c")
        make("run")
        try:
            diff("list.out", "golden.out")
            perfect_grade(uniqname)
        except sh.ErrorReturnCode_1:
            try:
                diff("list.out", "naive.out")
                no_change(uniqname)
            except sh.ErrorReturnCode_1:
                handgrade(uniqname)
Ejemplo n.º 28
0
 def restore_attachments(self, zipfile, docker=False):
     unzip = sh.unzip.bake('-x', '-qq', '-n')
     restore_folder = os.path.join(self.data_dir,
                                   'filestore',
                                   self.target_db)
     sh.mkdir('-p', restore_folder)
     # unzip will place files are in <datadir>/filestore/<dbname>/filestore,
     # we create a symlink to <datadir>/filestore/<dbname> so they wind up
     # in the right spot
     restore_folder_faulty = os.path.join(restore_folder, 'filestore')
     sh.ln('-s', restore_folder, restore_folder_faulty)
     unzip(zipfile, 'filestore/*', '-d', restore_folder)
     # cleanup the symlink
     sh.rm(restore_folder_faulty)
     # When running in docker mode, change permissions
     if docker:
         sh.chown('-R', '999:999', self.data_dir)
Ejemplo n.º 29
0
 def test_correct_relative_paths(self, tmpdir):
     # - make a tempdir
     # - add two directories A and B
     # - add a file to A
     # - make a link to A/the_file from in B using python
     # - make a link to A/the_file from in B using relative paths in sh
     # - test that the the two links have the same representation
     with sh.pushd(tmpdir):
         sh.mkdir("A")
         sh.mkdir("B")
         target = "A/some_file"
         sh.touch(target)
         link1 = "B/link1"
         link2 = "B/link2"
         add_relative_symlink(target, link1)
         sh.ln("--symbolic", "--relative", target, link2)
         assert os.readlink(link1) == os.readlink(link2)
Ejemplo n.º 30
0
def build(site_dir):
    # make sure there are no local mods outstanding
    repo = Repo(os.getcwd())
    if repo.is_dirty() or repo.untracked_files:
        print "ERROR: Your working directory has outstanding changes."
        print "Commit or stash them."
        return

    cfg = config.load_config()

    # sanity check that the version dirs exist as named
    for version in cfg['extra']['versions']:
        d = os.path.join('versions', version['dir'])
        print 'Verifying dir %s' % (d)
        if not os.path.isdir(d):
            print "The directory %s does not exist" % (d)
            return

    # sanity check - only one latest
    latest = False
    for version in cfg['extra']['versions']:
        if not latest and 'latest' in version and version['latest']:
            print 'Latest is %s' % (version['dir'])
            latest = True
        elif latest and 'latest' in version and version['latest']:
            print 'ERROR: More than one version is latest.'
            print 'Only one version can be latest: True.'
            print 'Check mkdocs.yml.'
            return

    print "Building site pages"
    sh.rm('-rf', site_dir)
    sh.mkdocs('build', '--clean', '--site-dir', site_dir)

    for version in cfg['extra']['versions']:
        print "Building doc pages for: %s" % (version['dir'])
        sh.mkdocs('build',
                  '--site-dir',
                  os.path.join(site_dir, version['dir']),
                  _cwd=os.path.join("versions", version['dir']))
        if 'latest' in version and version['latest']:
            sh.ln('-s', version['dir'], 'latest', _cwd=site_dir)

    # Make sure old links still work
    sh.ln('-s', 'master', 'develop', _cwd=site_dir)
Ejemplo n.º 31
0
def test_sym_link(git_dir, hg_repo):
    sh.cd(hg_repo)
    write_to_test_file('b')
    sh.hg.add('test_file')
    sh.hg.commit(message="b")
    sh.ln('-s', 'test_file', 'linked')
    sh.hg.add('linked')
    sh.hg.commit(message="c")
    git_repo = clone_repo(git_dir, hg_repo)
    sh.cd(git_repo)
    assert p('linked').islink()
    sh.ln('-s', 'test_file', 'second_link')
    sh.git.add('second_link')
    sh.git.commit(message="d")
    sh.git.push()
    sh.cd(hg_repo)
    sh.hg.update()
    assert p('second_link').islink()
Ejemplo n.º 32
0
def build(site_dir):
    # make sure there are no local mods outstanding
    repo = Repo(os.getcwd())
    if repo.is_dirty() or repo.untracked_files:
        print "ERROR: Your working directory has outstanding changes."
        print "Commit or stash them."
        return

    cfg = config.load_config()

    # sanity check that the version dirs exist as named
    for version in cfg['extra']['versions']:
        d = os.path.join('versions', version['dir'])
        print 'Verifying dir %s' % (d)
        if not os.path.isdir(d):
            print "The directory %s does not exist" % (d)
            return

    # sanity check - only one latest
    latest = False
    for version in cfg['extra']['versions']:
        if not latest and 'latest' in version and version['latest']:
            print 'Latest is %s' % (version['dir'])
            latest = True
        elif latest and 'latest' in version and version['latest']:
            print 'ERROR: More than one version is latest.'
            print 'Only one version can be latest: True.'
            print 'Check mkdocs.yml.'
            return

    print "Building site pages"
    sh.rm('-rf', site_dir)
    sh.mkdocs('build', '--clean', '--site-dir', site_dir)

    for version in cfg['extra']['versions']:
        print "Building doc pages for: %s" % (version['dir'])
        sh.mkdocs('build', '--site-dir', os.path.join(site_dir, version['dir']), _cwd = os.path.join("versions", version['dir']))

    # Make sure old links still work
    sh.ln('-s', 'latest', 'develop', _cwd = site_dir)
    sh.ln('-s', 'latest', 'master', _cwd = site_dir)
        def build(self):

            self.change_to_parent_dir()
            self.init_virtualenv()
            print('created {}'.format(self.virtualenv_dir()))

            if self.virtualenv_dir_exists():
                os.chdir(self.VIRTUAL_SUBDIR)
                if IS_PY2:
                    self.link_supervisord_files()
                    print('linked supervisord')

                # activate the virtualenv
                with venv(dirname=self.virtualenv_dir()):
                    print('configuring virtualenv')

                    os.chdir('lib')
                    if IS_PY2:
                        sh.ln('-s', 'python2.7', 'python')
                    else:
                        sh.ln('-s', 'python3.4', 'python')

                    print('   installing requirements')
                    with open(self.get_requirements()) as f:
                        for line in f.readlines():
                            if len(line.strip()) > 0:
                                print('     {}'.format(line.strip()))
                                if IS_PY2:
                                    sh.pip('install', line.strip())
                                else:
                                    sh.pip3('install', line.strip())

                    print('   linking our packages')
                    os.chdir('python/site-packages')
                    self.link_packages()

                self.finish_build()

                print('Finished creating virtualenv')
            else:
                print('!!!Error: Unable to create virtualenv')
Ejemplo n.º 34
0
def swap_image(filepath):
    assert exists(filepath), "File does not exist."

    new_filepath = join(tempdir, "camimg_new.png")
    old_filepath = join(tempdir, "camimg_old.png")
    link_filepath = join(tempdir, "camimg.png")
    
    convert(filepath, "-thumbnail", "x480", "-gravity", "center",
            "-crop", "640x480+0+0!",
            "-background", "black", "-flatten",
            new_filepath,
    )
    cp( 
        link_filepath,
        old_filepath,
        f=True,
    )
    ln( new_filepath,
        link_filepath,
        s=True, 
        f=True,
    )
Ejemplo n.º 35
0
    def _install_android_packages(self):

        # if any of theses value change into the buildozer.spec, retry the
        # update
        cache_key = 'android:sdk_installation'
        cache_value = [
            self.android_api, self.android_minapi, self.android_ndk_version,
            self.android_sdk_dir, self.android_ndk_dir
        ]
        if self.buildozer.state.get(cache_key, None) == cache_value:
            return True

        # 3 pass installation.
        if not os.access(self.android_cmd, os.X_OK):
            self.buildozer.cmd('chmod ug+x {}'.format(self.android_cmd))

        # 1. update the tool and platform-tools if needed
        packages = self._android_list_sdk()
        skip_upd = self.buildozer.config.getdefault('app',
                                                    'android.skip_update', False)
        if 'tools' in packages or 'platform-tools' in packages:
            if not skip_upd:
                if WSL:
                    # WSL (Windows Subsystem for Linux) allows running
                    # linux from windows 10, but some windows
                    # limitations still apply, namely you can't rename a
                    # directory that a program was started from, which
                    # is what the tools updates cause, and we end up
                    # with an empty dir, so we need to run from a
                    # different place, and the updater is still looking
                    # for things in tools, and specifically renames the
                    # tool dir, hence, moving and making a symlink
                    # works.
                    sh.mv(
                        join(self.android_sdk_dir, 'tools'),
                        join(self.android_sdk_dir, 'tools.save')
                    )
                    sh.ln(
                        '-s',
                        join(self.android_sdk_dir, 'tools.save'),
                        join(self.android_sdk_dir, 'tools')
                    )
                old_android_cmd = self.android_cmd
                self.android_cmd = join(
                    self.android_sdk_dir,
                    'tools.save',
                    self.android_cmd.split('/')[-1]
                )
                self._android_update_sdk('tools,platform-tools')
                self.android_cmd = old_android_cmd
                if WSL:
                    sh.rm('-rf', join(self.android_sdk_dir, 'tools.save'))
            else:
                self.buildozer.info('Skipping Android SDK update due to spec file setting')

        # 2. install the latest build tool
        v_build_tools = self._read_version_subdir(self.android_sdk_dir,
                                                  'build-tools')
        packages = self._android_list_sdk(include_all=True)
        ver = self._find_latest_package(packages, 'build-tools-')
        if ver and ver > v_build_tools and not skip_upd:
            self._android_update_sdk(self._build_package_string('build-tools', ver))
        # 2. check aidl can be run
        self._check_aidl(v_build_tools)

        # 3. finally, install the android for the current api
        android_platform = join(self.android_sdk_dir, 'platforms', 'android-{0}'.format(self.android_api))
        if not self.buildozer.file_exists(android_platform):
            packages = self._android_list_sdk()
            android_package = 'android-{}'.format(self.android_api)
            if android_package in packages and not skip_upd:
                self._android_update_sdk(android_package)

        self.buildozer.info('Android packages installation done.')

        self.buildozer.state[cache_key] = cache_value
        self.buildozer.state.sync()
Ejemplo n.º 36
0
#! /usr/bin/python

import os
import sys
import sh

from config import SRC, PATHS

for path in PATHS:
	for folder, subfolders, filenames in os.walk(path):
		for filename in filenames:
			try:
				if filename.endswith('.pdf') and not os.path.exists(os.path.join(SRC, filename)):
					sh.ln('-s', os.path.join(path, filename), os.path.join(SRC, filename))
			except Exception as e:
				sys.stderr.write(u"Can't create symlink to file '{}'.\n{}".format(filename.decode('utf8'),e))
				pass
Ejemplo n.º 37
0
def main():
    arguments = docopt(__doc__, version='makemd 1.0')
    # print arguments

    # up to root
    if not up_to_main():
        print(".main file not exist, bort. Please creat a .main file in the main folder.")
        return

    main_config=yaml.load(open(".main",'r'))
    if not main_config:
        main_config={}

    if arguments.get('-l') or arguments.get('--log'):
        logger=mylog.set_logger(filename='makemd.log', level=mylog.logging.INFO)
    elif arguments.get('-q') or arguments.get('--quiet'):
        logger=mylog.set_logger(filename='makemd.log', level=mylog.logging.ERROR)
    elif arguments.get('-d') or arguments.get('--debug'):
        logger=mylog.set_logger(filename='makemd.log', level=mylog.logging.DEBUG)
    else:
        logger=mylog.set_logger(level=mylog.logging.INFO)

    logger.debug(arguments)

    # load main_config
    if main_config.has_key('output_type_list'):
        logger.info('output_type_list are %s' % main_config['output_type_list'])
## A .main config file sample. using yaml.
## output_type_list:
##    - latex_article
##    #- latex_report
##    #- rtf
##    #- docx

    # set filename varibles
    main_title=os.path.basename(os.getcwd())
    if not os.path.exists('.build'):
        sh.mkdir('.build')
    sh.cd('.build')
    if not os.path.exists('images') and os.path.exists('../images'):
        sh.ln('-s','../images','./')
    markdown_file_name=os.path.join(os.getcwd(),main_title+'.markdown')
    # markdown_file_name=os.path.join(os.getcwd(),main_title+'.markdown')
    docx_file_name=main_title+'.docx'
    rtf_file_name=main_title+'.rtf'


    # generate main_title.markdown file
    markdown_file=open(markdown_file_name, 'w')
    # markdown_file.write('#'+os.path.basename(os.getcwd())+'\n')
    # sh.cd('..')
    dfs_dir_byplist(os.pardir,markdown_file,0,logger)
    markdown_file.close()
    sh.open(markdown_file_name)

    markdown_file=open(markdown_file_name, 'r')

    if main_config.has_key('output_type_list'):

        # generate latex file
        if main_config['output_type_list'] and ('latex_report' in main_config['output_type_list'] or 'latex_article' in main_config['output_type_list']):
            content=markdown_file.read()
            encoding=chardet.detect(content)['encoding']
            if encoding == 'utf-8' and 'latex_report' in main_config['output_type_list']:
                # generate latex & pdf file by article
                mmd2tex(markdown_file_name,main_title,'report','cn')
            if encoding == 'utf-8' and 'latex_article' in main_config['output_type_list']:
                # generate latex & pdf file by article
                mmd2tex(markdown_file_name,main_title,'article','cn')

            if encoding != 'utf-8' and 'latex_report' in main_config['output_type_list']:
                mmd2tex(markdown_file_name,main_title,'report','en')
            if encoding != 'utf-8' and 'latex_article' in main_config['output_type_list']:
                # generate latex & pdf file by article
                mmd2tex(markdown_file_name,main_title,'article','en')
            logger.info("tex & pdf file generated")


        # generate rtf file
        if  main_config['output_type_list'] and 'rtf' in main_config['output_type_list']:
            mmd2rtf(markdown_file_name,rtf_file_name)
            sh.open(rtf_file_name)
            logger.info("rtf file generated")

        # generate docx file
        if  main_config['output_type_list'] and 'docx' in main_config['output_type_list']:
            mmd2docx(markdown_file_name,docx_file_name)
            sh.open(docx_file_name)
            logger.info("docx file generated")
Ejemplo n.º 38
0
def mmd2tex(markdown_file_name,main_title,type,lang):
    if not os.path.exists('latex'):
        sh.mkdir('latex')
    sh.cd('latex')
    if not os.path.exists('images') and os.path.exists('../images'):
        sh.ln('-s','../images','./')

    tex_file_name=main_title+'_'+type+'_'+lang+'.tex'

    # sh.pandoc(markdown_file_name,f="markdown_mmd",t="latex",o=tex_file_name)

    # generate proper tex file
    # main_title=str(sh.sed('-n','s/^# //p',markdown_file_name))
    tmp_markdown_file_name=tex_file_name.replace('.tex','.markdown')
    if type=='report':
        # report_markdown=main_title+'_report.markdown'
        sh.cp(markdown_file_name,tmp_markdown_file_name)
        # sh.sed('-i','-e','s/^#/'+'#/',tmp_markdown_file_name)

        sh.mmd2tex(tmp_markdown_file_name)
        # sh.mv(report_markdown,tex_file_name)
    elif type=='article':
        # article_markdown=main_title+'_article.markdown'
        sh.cp(markdown_file_name,tmp_markdown_file_name)
        sh.sed('-i','-e','s/^#/'+'#'*2+'/',tmp_markdown_file_name)
        sh.mmd2tex(tmp_markdown_file_name)
        # sh.mv(article_markdown.replace(,tex_file_name)
    else:
        return


    tex_file=open(tex_file_name,'r')
    content_list=tex_file.readlines()
    tex_file.close()
    if type=='report' and lang=='cn':
        prefix='''
% -*- coding: utf-8 -*-
\documentclass[UTF8,nofonts]{ctexrep}
\setCJKmainfont[BoldFont=STHeiti,ItalicFont=STKaiti]{STSong}
\setCJKsansfont[BoldFont=STHeiti]{STXihei}
\setCJKmonofont{STFangsong}

'''

    elif type=='article' and lang=='cn':
        prefix='''
% -*- coding: utf-8 -*-
\documentclass[UTF8,nofonts]{ctexart}
\setCJKmainfont[BoldFont=STHeiti,ItalicFont=STKaiti]{STSong}
\setCJKsansfont[BoldFont=STHeiti]{STXihei}
\setCJKmonofont{STFangsong}

'''

    elif type=='article' and lang=='en':
        prefix='''
% -*- coding: utf-8 -*-
\documentclass[UTF8]{article}

'''

    elif type=='report' and lang=='en':
        prefix='''
% -*- coding: utf-8 -*-
\documentclass[UTF8]{report}
'''

    secfix='''
\usepackage{graphicx}

\\begin{document}
\\title{%s} \\author{Haorui Lu}  \maketitle
\\tableofcontents
''' % (main_title)
    surfix='''
\end{document}
'''

    content_list.insert(0,secfix)
    content_list.insert(0,prefix)
    content_list.append(surfix)

    tex_file=open(tex_file_name,'w')
    tex_file.writelines(content_list)
    tex_file.close()
    _s=open(tex_file_name,'r').read().replace('includegraphics{','includegraphics[width=\\textwidth]{')
    open(tex_file_name,'w').write(_s)
    try:
        run_mytex=sh.Command("mytex.py")
        run_mytex(tex_file_name)
    except Exception, e:
        pass
Ejemplo n.º 39
0
    tex_file=open(tex_file_name,'w')
    tex_file.writelines(content_list)
    tex_file.close()
    _s=open(tex_file_name,'r').read().replace('includegraphics{','includegraphics[width=\\textwidth]{')
    open(tex_file_name,'w').write(_s)
    try:
        run_mytex=sh.Command("mytex.py")
        run_mytex(tex_file_name)
    except Exception, e:
        pass
    pdf_file_name=tex_file_name.replace('tex','pdf')
    if os.path.exists(pdf_file_name):
        sh.cd('..')
        if not os.path.exists(pdf_file_name):
            sh.ln('-s',os.path.join('latex',pdf_file_name))
        sh.open(pdf_file_name)


def mmd2rtf(markdown_file_name,rtf_file_name):
    sh.pandoc(markdown_file_name,f="markdown_mmd",t="rtf",o=rtf_file_name)
def mmd2docx(markdown_file_name,docx_file_name):
    sh.pandoc(markdown_file_name,f="markdown_mmd",t="docx",o=docx_file_name)

def add_quotation(s):
    return '\"'+s+'\"'


def main():
    arguments = docopt(__doc__, version='makemd 1.0')
    # print arguments
Ejemplo n.º 40
0
def get_input_data(dest_dir):
    """
    Set up input data.
    """

    sh.ln('-s', '/short/v45/nah599/more_home/Download/datasets', dest_dir)
Ejemplo n.º 41
0
def main():
    log = logging.getLogger("zulip-provisioner")

    if platform.architecture()[0] == '64bit':
        arch = 'amd64'
        phantomjs_arch = 'x86_64'
    elif platform.architecture()[0] == '32bit':
        arch = "i386"
        phantomjs_arch = 'i686'
    else:
        log.critical("Only x86 is supported; ping [email protected] if you want another architecture.")
        sys.exit(1)

    vendor, version, codename = platform.dist()

    if not (vendor in SUPPORTED_PLATFORMS and codename in SUPPORTED_PLATFORMS[vendor]):
        log.critical("Unsupported platform: {} {}".format(vendor, codename))

    with sh.sudo:
        sh.apt_get.update(**LOUD)

        sh.apt_get.install(*APT_DEPENDENCIES["trusty"], assume_yes=True, **LOUD)

    temp_deb_path = sh.mktemp("package_XXXXXX.deb", tmpdir=True)

    sh.wget(
        "{}/{}_{}_{}.deb".format(
            TSEARCH_URL_BASE,
            TSEARCH_PACKAGE_NAME["trusty"],
            TSEARCH_VERSION,
            arch,
        ),
        output_document=temp_deb_path,
        **LOUD
    )

    with sh.sudo:
        sh.dpkg("--install", temp_deb_path, **LOUD)

    with sh.sudo:
        PHANTOMJS_PATH = "/srv/phantomjs"
        PHANTOMJS_BASENAME = "phantomjs-1.9.8-linux-%s" % (phantomjs_arch,)
        PHANTOMJS_TARBALL_BASENAME = PHANTOMJS_BASENAME + ".tar.bz2"
        PHANTOMJS_TARBALL = os.path.join(PHANTOMJS_PATH, PHANTOMJS_TARBALL_BASENAME)
        PHANTOMJS_URL = "https://bitbucket.org/ariya/phantomjs/downloads/%s" % (PHANTOMJS_TARBALL_BASENAME,)
        sh.mkdir("-p", PHANTOMJS_PATH, **LOUD)
        if not os.path.exists(PHANTOMJS_TARBALL):
            sh.wget(PHANTOMJS_URL, output_document=PHANTOMJS_TARBALL, **LOUD)
        sh.tar("xj", directory=PHANTOMJS_PATH, file=PHANTOMJS_TARBALL, **LOUD)
        sh.ln("-sf", os.path.join(PHANTOMJS_PATH, PHANTOMJS_BASENAME, "bin", "phantomjs"),
              "/usr/local/bin/phantomjs", **LOUD)

    with sh.sudo:
        sh.rm("-rf", VENV_PATH, **LOUD)
        sh.mkdir("-p", VENV_PATH, **LOUD)
        sh.chown("{}:{}".format(os.getuid(), os.getgid()), VENV_PATH, **LOUD)

    sh.virtualenv(VENV_PATH, **LOUD)

    # Add the ./tools and ./scripts/setup directories inside the repository root to
    # the system path; we'll reference them later.
    orig_path = os.environ["PATH"]
    os.environ["PATH"] = os.pathsep.join((
            os.path.join(ZULIP_PATH, "tools"),
            os.path.join(ZULIP_PATH, "scripts", "setup"),
            orig_path
    ))


    # Put Python virtualenv activation in our .bash_profile.
    with open(os.path.expanduser('~/.bash_profile'), 'w+') as bash_profile:
        bash_profile.writelines([
            "source .bashrc\n",
            "source %s\n" % (os.path.join(VENV_PATH, "bin", "activate"),),
        ])

    # Switch current Python context to the virtualenv.
    activate_this = os.path.join(VENV_PATH, "bin", "activate_this.py")
    execfile(activate_this, dict(__file__=activate_this))

    sh.pip.install(requirement=os.path.join(ZULIP_PATH, "requirements.txt"), **LOUD)

    with sh.sudo:
        sh.cp(REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH, **LOUD)

    # npm install and management commands expect to be run from the root of the project.
    os.chdir(ZULIP_PATH)

    sh.npm.install(**LOUD)

    os.system("tools/download-zxcvbn")
    os.system("tools/emoji_dump/build_emoji")
    os.system("generate_secrets.py -d")
    if "--travis" in sys.argv:
        os.system("sudo service rabbitmq-server restart")
        os.system("sudo service redis-server restart")
        os.system("sudo service memcached restart")
    elif "--docker" in sys.argv:
        os.system("sudo service rabbitmq-server restart")
        os.system("sudo pg_dropcluster --stop 9.3 main")
        os.system("sudo pg_createcluster -e utf8 --start 9.3 main")
        os.system("sudo service redis-server restart")
        os.system("sudo service memcached restart")
    sh.configure_rabbitmq(**LOUD)
    sh.postgres_init_dev_db(**LOUD)
    sh.do_destroy_rebuild_database(**LOUD)
    sh.postgres_init_test_db(**LOUD)
    sh.do_destroy_rebuild_test_database(**LOUD)
    return 0
Ejemplo n.º 42
0
def lnk(src, dst):
    if os.path.isfile(dst) or os.path.islink(dst):
        print('WARNING: File at \'{}\' already exists, ignoring...'.format(dst))
        return
    sh.ln('-s', src, dst)
Ejemplo n.º 43
0
try:
	import sh
except:
	print("Could not find the sh module:")
	print("  sudo pip install sh")
	sys.exit(1)

from sh import ln
from sh import rm
from sh import docker


DOCKERFILES = [
	('bleScannerHCI', 'lab11/wearabouts-ble-scanner-js'),
	('bleScanner', 'lab11/wearabouts-ble-scanner-py')
]


# Get rid of the Dockerfile in the root
print('Removing existing Dockerfile if it exists')
rm('Dockerfile', '-f')

# Build each docker image
for dockerfile in DOCKERFILES:
	print('Building {}'.format(dockerfile[1]))
	ln('-s', dockerfile[0]+'/Dockerfile', 'Dockerfile')
	for chunk in docker('build', '-t', 'lab11/wearabouts-ble-scanner-js', '.'):
		print(chunk, end="")

	rm('Dockerfile')
Ejemplo n.º 44
0
def main():
    log = logging.getLogger("zulip-provisioner")
    # TODO: support other architectures
    if platform.architecture()[0] == '64bit':
        arch = 'amd64'
    else:
        log.critical("Only amd64 is supported.")

    vendor, version, codename = platform.dist()

    if not (vendor in SUPPORTED_PLATFORMS and codename in SUPPORTED_PLATFORMS[vendor]):
        log.critical("Unsupported platform: {} {}".format(vendor, codename))

    with sh.sudo:
        sh.apt_get.update(**LOUD)

        sh.apt_get.install(*APT_DEPENDENCIES["trusty"], assume_yes=True, **LOUD)

    temp_deb_path = sh.mktemp("package_XXXXXX.deb", tmpdir=True)

    sh.wget(
        "{}/{}_{}_{}.deb".format(
            TSEARCH_URL_BASE,
            TSEARCH_PACKAGE_NAME["trusty"],
            TSEARCH_VERSION,
            arch,
        ),
        output_document=temp_deb_path,
        **LOUD
    )

    with sh.sudo:
        sh.dpkg("--install", temp_deb_path, **LOUD)

    with sh.sudo:
        PHANTOMJS_PATH = "/srv/phantomjs"
        PHANTOMJS_TARBALL = os.path.join(PHANTOMJS_PATH, "phantomjs-1.9.8-linux-x86_64.tar.bz2")
        sh.mkdir("-p", PHANTOMJS_PATH, **LOUD)
        sh.wget("https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-1.9.8-linux-x86_64.tar.bz2",
                output_document=PHANTOMJS_TARBALL, **LOUD)
        sh.tar("xj", directory=PHANTOMJS_PATH, file=PHANTOMJS_TARBALL, **LOUD)
        sh.ln("-sf", os.path.join(PHANTOMJS_PATH, "phantomjs-1.9.8-linux-x86_64", "bin", "phantomjs"),
              "/usr/local/bin/phantomjs", **LOUD)

    with sh.sudo:
        sh.rm("-rf", VENV_PATH, **LOUD)
        sh.mkdir("-p", VENV_PATH, **LOUD)
        sh.chown("{}:{}".format(os.getuid(), os.getgid()), VENV_PATH, **LOUD)

    sh.virtualenv(VENV_PATH, **LOUD)

    # Add the ./tools and ./scripts/setup directories inside the repository root to
    # the system path; we'll reference them later.
    orig_path = os.environ["PATH"]
    os.environ["PATH"] = os.pathsep.join((
            os.path.join(ZULIP_PATH, "tools"),
            os.path.join(ZULIP_PATH, "scripts", "setup"),
            orig_path
    ))


    # Put Python virtualenv activation in our .bash_profile.
    with open(os.path.expanduser('~/.bash_profile'), 'w+') as bash_profile:
        bash_profile.writelines([
            "source .bashrc\n",
            "source %s\n" % (os.path.join(VENV_PATH, "bin", "activate"),),
        ])

    # Switch current Python context to the virtualenv.
    activate_this = os.path.join(VENV_PATH, "bin", "activate_this.py")
    execfile(activate_this, dict(__file__=activate_this))

    sh.pip.install(requirement=os.path.join(ZULIP_PATH, "requirements.txt"), **LOUD)

    with sh.sudo:
        sh.cp(REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH, **LOUD)

    # Add additional node packages for test-js-with-node.
    with sh.sudo:
        sh.npm.install(*NPM_DEPENDENCIES["trusty"], g=True, prefix="/usr", **LOUD)

    # Management commands expect to be run from the root of the project.
    os.chdir(ZULIP_PATH)

    os.system("tools/download-zxcvbn")
    os.system("tools/emoji_dump/build_emoji")
    os.system("generate_secrets.py -d")
    sh.configure_rabbitmq(**LOUD)
    sh.postgres_init_db(**LOUD)
    sh.do_destroy_rebuild_database(**LOUD)
    sh.postgres_init_test_db(**LOUD)
    sh.do_destroy_rebuild_test_database(**LOUD)
Ejemplo n.º 45
0
            content_base_path = syncer_dir + path('/content/%s' % key)

            if not path.exists(backup_base_path):
                sh.mkdir(backup_base_path)

            for f in paths:
                f_path = path(f)
                backup_path = backup_base_path + '/' +  f_path.name
                content_path = content_base_path + '/' + f_path.name

                # save a backup first
                if not path.exists(backup_path):
                    sh.cp('-r', path.expand(f_path), backup_path)

                # create symlink from content_path to f_path
                sh.ln('-s', content_path, path.expand(f_path)) 

elif args.command == TRACK:
    if not (args.key or args.files):
        raise Exception("Track is missing key and files")

    ensure_syncer_dir()
    sh.cd(syncer_dir)

    json_data = {}
    with open(path('manifest.json'), 'r') as manifest_file:
        try:
            json_data = json.load(manifest_file)
        except:
            json_data = {}
Ejemplo n.º 46
0
def build_site():
	print("Building website...")
	je = jinja.Environment(
		loader=jinja.FileSystemLoader(['.', 'templates']))

	BUILT_DIR = 'built'
	DIRECTORIES = ('demos', 'work-in-progress', 'raw', '.')

	try:
		WORKING_DIR = tempfile.mkdtemp()

		demos = {}
		demo_data = {} # dictionary of filename -> [name, description]

		for directory in DIRECTORIES:
			demos[directory] = []
			demo_data[directory] = {}

			for filename in glob.glob(directory + '/*.jinja'):
				basedir, name = os.path.split(filename)
				name = os.path.splitext(name)[0]
				outputname = os.path.join(WORKING_DIR, name + '.html')

				output = je.get_template(filename).render()

				with open(outputname, 'w') as f:
					f.write(output)

				if name != 'index':
					html_name = name + '.html'
					demos[directory].append(html_name)

					# default name and description
					name = os.path.splitext(html_name)[0].replace('_', ' ')
					name = titlecase.titlecase(name)
					desc = name

					# open file to read from it
					with open(filename, 'r') as f:
						for line in f:
							if line[0:8] == '{#Name: ':
								# '{#Name: <name>#}\n'
								name = line[8:-3]
							if line[0:8] == '{#Desc: ':
								# '{#Desc: <description>#}\n'
								desc = line[8:-3]

					demo_data[directory][html_name] = [name, desc]
				
			for filename in glob.glob(directory + '/*.html'):
				basedir, html_name = os.path.split(filename)
				dst = os.path.join(WORKING_DIR, html_name)
				demos[directory].append(html_name)
				cp(filename, dst)

				# default name and description
				name = os.path.splitext(html_name)[0].replace('_', ' ')
				name = titlecase.titlecase(name)
				desc = name

				# open file to read from it
				with open(filename, 'r') as f:
					for line in f:
						if line[0:8] == '{#Name: ':
							# '{#Name: <name>#}\n'
							name = line[8:-3]
						if line[0:8] == '{#Desc: ':
							# '{#Desc: <description>#}\n'
							desc = line[8:-3]

				demo_data[directory][html_name] = [name, desc]

		categories = []
		for directory in DIRECTORIES:
			demo_list = ''

			# sort demo_data by name
			for filename, [name, desc] in sorted(demo_data[directory].items(), key=lambda e: e[1][0]):
				demo_list += je.get_template('demo_item.jinja').render(
						name=name, desc=desc, path=filename)

			if demo_list == '':
				# No demos in this category, skip it
				continue
			category = je.get_template('demo_category_{}.jinja'
						.format(directory)).render(
							demos=demo_list,
							)
			categories.append(category)

		index = je.get_template('index.jinja').render(
			categories=categories
			)
		with open(os.path.join(WORKING_DIR, 'index.html'), 'w') as f:
			f.write(index)

		rm('-rf', BUILT_DIR)
		mv(WORKING_DIR, BUILT_DIR)
		for d in ('bower_components', 'css', 'images', 'js'):
			ln('-s', '../'+d, BUILT_DIR)

		find(BUILT_DIR, '-type', 'd', '-exec', 'chmod', '755', '{}', ';')
		find(BUILT_DIR, '-type', 'f', '-exec', 'chmod', '644', '{}', ';')


	except:
		print("Unexpected error building site. Working build directory at:")
		print("\t{0}".format(WORKING_DIR))
		print("")
		raise