def test_gmprocess():
    eid = 'usb000syza'
    tpath = pkg_resources.resource_filename('gmprocess', 'tests')
    gmprocess = os.path.abspath(os.path.join(tpath, '..', '..',
                                             'bin', 'gmprocess'))
    dpath = os.path.join('data', 'testdata', 'knet', eid)
    cfgpath = os.path.join('data',
                           'testdata',
                           'knet',
                           eid,
                           'config.yml')
    knetdir = pkg_resources.resource_filename('gmprocess', dpath)
    cfgfile = pkg_resources.resource_filename('gmprocess', cfgpath)
    try:
        tdir = tempfile.mkdtemp()
        fmt = '%s %s -i %s --directory %s -c %s'
        tpl = (gmprocess, tdir, eid, knetdir, cfgfile)
        cmd = fmt % tpl
        res, stdout, stderr = get_command_output(cmd)
        assert res

    except Exception:
        pass
    finally:
        shutil.rmtree(tdir)
Exemplo n.º 2
0
def test_rjb_WC94():
    cmd = "progs/RjbMeanVar.py tests/config/test_Rjb_WC94.ini"
    rc, so, se = get_command_output(cmd)
    r1 = pd.DataFrame.from_csv("tests/data/test_Rjb_WC94_mechA_ar1p7_seis0_20_Ratios.csv", header = 6)
    v1 = pd.DataFrame.from_csv("tests/data/test_Rjb_WC94_mechA_ar1p7_seis0_20_Var.csv", header = 6)
    r2 = pd.DataFrame.from_csv("TestData/test_Rjb_WC94_mechA_ar1p7_seis0_20_Ratios.csv", header = 6)
    v2 = pd.DataFrame.from_csv("TestData/test_Rjb_WC94_mechA_ar1p7_seis0_20_Var.csv", header = 6)

    pd.util.testing.assert_frame_equal(r1, r2)
    pd.util.testing.assert_frame_equal(v1, v2)

    # Clean up
    shutil.rmtree('TestData')
Exemplo n.º 3
0
def test_rrup_WC94_R_F():
    conf = 'fast_rrup_WC94_R_F.ini'
    cmd = "progs/RrupMeanVar.py tests/config/%s" %conf
    rc, so, se = get_command_output(cmd)
    r1 = pd.DataFrame.from_csv("tests/data/test_Rrup_WC94_mechR_ar1p0_seis0_15_Ratios.csv", header = 6)
    v1 = pd.DataFrame.from_csv("tests/data/test_Rrup_WC94_mechR_ar1p0_seis0_15_Var.csv", header = 6)
    r2 = pd.DataFrame.from_csv("TestData/test_Rrup_WC94_mechR_ar1p0_seis0_15_Ratios.csv", header = 6)
    v2 = pd.DataFrame.from_csv("TestData/test_Rrup_WC94_mechR_ar1p0_seis0_15_Var.csv", header = 6)

    pd.util.testing.assert_frame_equal(r1, r2)
    pd.util.testing.assert_frame_equal(v1, v2)

    # Clean up
    shutil.rmtree('TestData')
def test_gm2table():
    tpath = pkg_resources.resource_filename('gmprocess', 'tests')
    gm2table = os.path.abspath(os.path.join(tpath, '..', '..',
                                            'bin', 'gm2table'))
    dpath = os.path.join('data', 'testdata', 'knet')
    knetdir = pkg_resources.resource_filename('gmprocess', dpath)

    tfile = tempfile.mkstemp()[1]
    cmd = '%s %s %s -f csv' % (gm2table, knetdir, tfile)
    res, stdout, stderr = get_command_output(cmd)
    os.remove(tfile)

    tfile = tempfile.mkstemp()[1]
    cmd = '%s %s %s -f xlsx' % (gm2table, knetdir, tfile)
    res, stdout, stderr = get_command_output(cmd)
    os.remove(tfile)

    cwbdir = os.path.join(knetdir, '..', 'cwb')
    lon = 21.69
    lat = 24.14
    tfile = tempfile.mkstemp()[1]
    cmd = '%s %s %s --lat %s --lon %s' % (gm2table, cwbdir, tfile, lat, lon)
    res, stdout, stderr = get_command_output(cmd)
    os.remove(tfile)
Exemplo n.º 5
0
    def send(self):
        # we can really only support sending of one file and/or one directory, so error out
        # if someone has specified more than one of either.
        if len(self.files) > 1:
            raise ShakeMapException(
                'For PDL, you may only send one file at a time.')
        if len(self.directories) > 1:
            raise ShakeMapException(
                'For PDL, you may only send one directory at a time.')

        # make sure we have all the required properties
        for prop in self.__required_properties:
            if prop not in list(self.properties.keys()):
                raise ShakeMapException(
                    '"%s" property must be supplied to send via PDL')

        # build pdl command line from properties
        self.properties['command'] = 'send'
        self.properties['status'] = 'UPDATE'
        if self.files:
            self.properties['file'] = self.files[0]
        else:
            self.properties['file'] = ''
        if self.directories:
            self.properties['directory'] = self.directories[0]
        else:
            self.properties['directory'] = ''
        cmd = self.__pdlcmd
        for propkey, propvalue in self.properties.items():
            cmd = cmd.replace('[' + propkey.upper() + ']', propvalue)

        # call PDL on the command line
        retcode, stdout, stderr = get_command_output(cmd)
        if not retcode:
            fmt = 'Could not send product "%s" due to error "%s"'
            tpl = (code, stdout + stderr)
            raise ShakeMapException(fmt % tpl)

        # return the number of files we just sent
        nfiles = 0
        if self.properties['file']:
            nfiles += 1
        if self.properties['directory']:
            nfiles += len(os.listdir(self.properties['directory']))

        return nfiles
Exemplo n.º 6
0
def test_amps2xml():
    homedir = os.path.dirname(
        os.path.abspath(__file__))  # where is this script?
    excelfile = os.path.join(homedir, '..', 'data', 'minimum_mmi.xlsx')
    amps2xml = os.path.join(homedir, '..', '..', 'bin', 'amps2xml')
    tmpdir = tempfile.mkdtemp()

    try:
        cmd = '%s foo --debug-dir=%s %s' % (amps2xml, tmpdir, excelfile)
        res, stdout, stderr = get_command_output(cmd)
        if not res:
            raise AssertionError(
                'amps2xml command %s failed with errors "%s"' % (cmd, stderr))
        print(stdout)
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tmpdir)
Exemplo n.º 7
0
def test_rrup_WC94_SS_T():
    conf = 'fast_WC94_SS_T.ini'
    cmd = "bin/run_ps2ff -w rrup tests/config/%s" % conf
    rc, so, se = get_command_output(cmd)
    r1 = pd.DataFrame.from_csv(
        "tests/data/test_Rrup_WC94_mechSS_LW_seis0_15_Ratios.csv", header=6)
    v1 = pd.DataFrame.from_csv(
        "tests/data/test_Rrup_WC94_mechSS_LW_seis0_15_Var.csv", header=6)
    r2 = pd.DataFrame.from_csv(
        "TestData/Rrup_WC94_mechSS_LW_seis0_15_Ratios.csv", header=6)
    v2 = pd.DataFrame.from_csv("TestData/Rrup_WC94_mechSS_LW_seis0_15_Var.csv",
                               header=6)

    pd.util.testing.assert_frame_equal(r1, r2)
    pd.util.testing.assert_frame_equal(v1, v2)

    # Clean up
    shutil.rmtree('TestData')
Exemplo n.º 8
0
    def delete(self):
        for prop in self.__required_properties:
            if prop not in list(self.properties.keys()):
                raise ShakeMapException(
                    '"%s" property must be supplied to send via PDL')

        # build pdl command line from properties
        self.properties['status'] = 'DELETE'
        self.properties['files'] = ''
        self.properties['directories'] = ''
        cmd = self.__pdlcmd
        for propkey, propvalue in self.properties.items():
            cmd = cmd.replace('[' + propkey.upper() + ']', propvalue)

        retcode, stdout, stderr = get_command_output(cmd)
        if not retcode:
            fmt = 'Could not delete product "%s" due to error "%s"'
            tpl = (code, stdout + stderr)
            raise ShakeMapException(fmt % tpl)
    def cancel(self, cancel_content=None):
        """Send a delete message out via PDL regarding the product in question.

        Args:
            cancel_content: String containing cancel message. This is NOT used
                in the implementation for this class.

        Returns:
            Standard output from PDL DELETE command.
        """
        # build pdl command line from properties
        self._properties['status'] = 'DELETE'
        self._properties['file'] = ''
        self._properties['directory'] = ''
        cmd = self._pdlcmd

        prop_nuggets = []
        for propkey, propvalue in self._properties.items():
            if isinstance(propvalue, float):
                prop_nuggets.append('--property-%s=%.4f' %
                                    (propkey, propvalue))
            elif isinstance(propvalue, int):
                prop_nuggets.append('--property-%s=%i' % (propkey, propvalue))
            elif isinstance(propvalue, datetime.datetime):
                prop_nuggets.append(
                    '--property-%s=%s' %
                    (propkey, propvalue.strftime(DATE_TIME_FMT)[0:23]))
            elif isinstance(propvalue, str):
                prop_nuggets.append('--property-%s="%s"' %
                                    (propkey, propvalue))
            else:
                prop_nuggets.append('--property-%s=%s' %
                                    (propkey, str(propvalue)))

        cmd = cmd.replace('[PRODUCT_PROPERTIES]', ' '.join(prop_nuggets))

        retcode, stdout, stderr = get_command_output(cmd)
        if not retcode:
            fmt = 'Could not delete product "%s" due to error "%s"'
            tpl = (retcode, stdout + stderr)
            raise Exception(fmt % tpl)

        return stdout
Exemplo n.º 10
0
def test_parallel():
    gmprocess = pkg_resources.resource_filename(
        'gmprocess', os.path.join('..', 'bin', 'gmprocess'))

    data_dir = pkg_resources.resource_filename(
        'gmprocess', os.path.join('data', 'testdata', 'demo'))
    out_dir = 'temp_dir'

    try:
        cmd = ('%s -o %s --assemble --directory %s -n 2' %
               (gmprocess, out_dir, data_dir))
        rc, so, se = get_command_output(cmd)
        assert rc

    except Exception as e:
        print(so.decode())
        print(se.decode())
        raise e
    finally:
        shutil.rmtree(out_dir)
Exemplo n.º 11
0
def test_fetchflat():
    tpath = pkg_resources.resource_filename('gmprocess', 'tests')
    fetchflat = os.path.abspath(
        os.path.join(tpath, '..', '..', 'bin', 'fetchflat'))
    dpath = os.path.join('data', 'testdata', 'geonet2')
    cfgpath = os.path.join('data', 'testdata', 'geonet2', 'config.yml')
    datadir = pkg_resources.resource_filename('gmprocess', dpath)
    cfgfile = pkg_resources.resource_filename('gmprocess', cfgpath)
    try:
        tdir = tempfile.mkdtemp()
        fmt = '%s %s --directory %s -c %s'
        tpl = (fetchflat, tdir, datadir, cfgfile)
        cmd = fmt % tpl
        res, stdout, stderr = get_command_output(cmd)
        assert res

    except Exception:
        pass
    finally:
        shutil.rmtree(tdir)
Exemplo n.º 12
0
    def sendAction(self,action,eventid):
        if action not in ALLOWED_ACTIONS:
            fmt = 'Action "%s" not in list of allowed actions: "%s"'
            raise Exception(fmt % (action,str(ALLOWED_ACTIONS)))

        pdl_cmd = self._pdlcmd.replace('[JAVA]',self._init_params['java'])
        pdl_cmd = pdl_cmd.replace('[JARFILE]',self._init_params['jarfile'])
        pdl_cmd = pdl_cmd.replace('[privatekey]',self._init_params['privatekey'])
        pdl_cmd = pdl_cmd.replace('[configfile]',self._init_params['configfile'])
        pdl_cmd = pdl_cmd.replace('[CODE]',eventid)
        source,source_code = split_event(eventid)
        pdl_cmd = pdl_cmd.replace('[EVENTSOURCE]',source)
        pdl_cmd = pdl_cmd.replace('[EVENTSOURCECODE]',source_code)
        user = getpass.getuser()
        action_time = datetime.datetime.utcnow().strftime(self._date_time_fmt)
        pdl_cmd = pdl_cmd.replace('[ACTION]',action)
        pdl_cmd = pdl_cmd.replace('[USER]',user)
        pdl_cmd = pdl_cmd.replace('[ACTION-TIME]',action_time)
        res,stdout,stderr = get_command_output(pdl_cmd)
        return (res,stdout,stderr)
Exemplo n.º 13
0
def test_gmprocess():
    eid = 'usb000syza'
    tpath = pkg_resources.resource_filename('gmprocess', 'tests')
    gmprocess = os.path.abspath(
        os.path.join(tpath, '..', '..', 'bin', 'gmprocess'))
    dpath = os.path.join('data', 'testdata', 'knet', eid)
    cfgpath = os.path.join('data', 'testdata', 'knet', eid, 'config.yml')
    knetdir = pkg_resources.resource_filename('gmprocess', dpath)
    cfgfile = pkg_resources.resource_filename('gmprocess', cfgpath)
    try:
        tdir = tempfile.mkdtemp()
        fmt = '%s %s -i %s --directory %s -c %s'
        tpl = (gmprocess, tdir, eid, knetdir, cfgfile)
        cmd = fmt % tpl
        res, stdout, stderr = get_command_output(cmd)
        assert res

    except Exception:
        pass
    finally:
        shutil.rmtree(tdir)
def test_sm2xml():
    # TEST for KNET
    homedir = os.path.dirname(os.path.abspath(
        __file__))  # where is this script?
    indir = os.path.join(homedir, '..', 'data', 'geonet')
    sm2xml = os.path.join(homedir, '..', '..', 'bin', 'sm2xml')
    eventid = 'us1000778i'
    tmpdir = tempfile.mkdtemp()
    try:
        cmd = '%s %s %s --debug-dir=%s' % (sm2xml, eventid, indir, tmpdir)
        res, stdout, stderr = get_command_output(cmd)
        if not res:
            raise AssertionError(
                'sm2xml command %s failed with errors "%s"' % (cmd, stderr))
        print(stdout)
        # assert target_file.encode() in stdout
        # excel, ref = read_excel(target_file.replace('xml', 'xlsx'))
    except Exception as e:
        raise(e)
    finally:
        shutil.rmtree(tmpdir)
Exemplo n.º 15
0
def test_rjb_WC94():
    cmd = "bin/run_ps2ff -w rjb tests/config/test_WC94.ini"
    rc, so, se = get_command_output(cmd)
    r1 = pd.DataFrame.from_csv(
            "tests/data/test_Rjb_WC94_mechA_ar1p7_seis0_20_Ratios.csv",
            header=6)
    v1 = pd.DataFrame.from_csv(
            "tests/data/test_Rjb_WC94_mechA_ar1p7_seis0_20_Var.csv",
            header=6)
    r2 = pd.DataFrame.from_csv(
            "TestData/Rjb_WC94_mechA_ar1p7_seis0_20_Ratios.csv",
            header=6)
    v2 = pd.DataFrame.from_csv(
            "TestData/Rjb_WC94_mechA_ar1p7_seis0_20_Var.csv",
            header=6)

    pd.util.testing.assert_frame_equal(r1, r2)
    pd.util.testing.assert_frame_equal(v1, v2)

    # Clean up
    shutil.rmtree('TestData')
Exemplo n.º 16
0
def test_eventfile():
    out_dir = 'temp_dir'

    conf_file = pkg_resources.resource_filename(
        'gmprocess', os.path.join('data', 'testdata', 'conf_small.yml'))

    eventfile = pkg_resources.resource_filename(
        'gmprocess', os.path.join('data', 'testdata', 'example_eventfile.txt'))

    try:
        cmd = ('gmprocess2 -o %s --assemble --textfile %s --config %s' %
               (out_dir, eventfile, conf_file))
        rc, so, se = get_command_output(cmd)
        assert rc

    except Exception as e:
        print(so.decode())
        print(se.decode())
        raise e
    finally:
        shutil.rmtree(out_dir)
Exemplo n.º 17
0
def test_single():
    cmd = "progs/RrupRjbMeanVar_SingleEvent.py tests/config/test_single.ini"
    rc, so, se = get_command_output(cmd)

    rjb1 = pd.DataFrame.from_csv("tests/data/Rjb_bytheta_Ratios.csv", header = 6)
    vjb1 = pd.DataFrame.from_csv("tests/data/Rjb_bytheta_Var.csv", header = 6)
    rjb2 = pd.DataFrame.from_csv("DataSingle/Rjb_bytheta_Ratios.csv", header = 6)
    vjb2 = pd.DataFrame.from_csv("DataSingle/Rjb_bytheta_Var.csv", header = 6)

    rrup1 = pd.DataFrame.from_csv("tests/data/Rrup_bytheta_Ratios.csv", header = 6)
    vrup1 = pd.DataFrame.from_csv("tests/data/Rrup_bytheta_Var.csv", header = 6)
    rrup2 = pd.DataFrame.from_csv("DataSingle/Rrup_bytheta_Ratios.csv", header = 6)
    vrup2 = pd.DataFrame.from_csv("DataSingle/Rrup_bytheta_Var.csv", header = 6)

    pd.util.testing.assert_frame_equal(rjb1, rjb2)
    pd.util.testing.assert_frame_equal(vjb1, vjb2)
    pd.util.testing.assert_frame_equal(rrup1, rrup2)
    pd.util.testing.assert_frame_equal(vrup1, vrup2)

    # Clean up
    shutil.rmtree('DataSingle')
Exemplo n.º 18
0
    def cancel(self, cancel_content=None):
        """Send a delete message out via PDL regarding the product in question.

        Args:
            cancel_content: String containing cancel message. This is NOT used
                in the implementation for this class.

        Returns:
            Standard output from PDL DELETE command.
        """
        # build pdl command line from properties
        self._properties["file"] = ""
        self._properties["directory"] = ""
        cmd = self._pdlcmd

        # make this a delete status
        cmd = cmd.replace("[STATUS]", "DELETE")

        # fill out the required properties
        cmd = self._replace_required_properties(cmd)

        # fill out any files or directories we'll be sending
        cmd = self._replace_files(cmd)

        # fill in all the product properties
        cmd = self._replace_product_properties(cmd)

        # fill in all the optional properties
        cmd = self._replace_optional_properties(cmd)

        retcode, stdout, stderr = get_command_output(cmd)
        if not retcode:
            ptype = self._properties["type"]
            fmt = (
                f'Could not delete product "{ptype}" due to error '
                f'"{stdout + stderr}"'
            )
            raise PDLError(fmt)

        return stdout
Exemplo n.º 19
0
def test_single():
    cmd = "bin/run_ps2ff_single_event tests/config/test_single.ini"
    rc, so, se = get_command_output(cmd)

    rjb1 = pd.DataFrame.from_csv("tests/data/Rjb_bytheta_Ratios.csv", header=6)
    vjb1 = pd.DataFrame.from_csv("tests/data/Rjb_bytheta_Var.csv", header=6)
    rjb2 = pd.DataFrame.from_csv("DataSingle/Rjb_bytheta_Ratios.csv", header=6)
    vjb2 = pd.DataFrame.from_csv("DataSingle/Rjb_bytheta_Var.csv", header=6)

    rrup1 = pd.DataFrame.from_csv("tests/data/Rrup_bytheta_Ratios.csv",
                                  header=6)
    vrup1 = pd.DataFrame.from_csv("tests/data/Rrup_bytheta_Var.csv", header=6)
    rrup2 = pd.DataFrame.from_csv("DataSingle/Rrup_bytheta_Ratios.csv",
                                  header=6)
    vrup2 = pd.DataFrame.from_csv("DataSingle/Rrup_bytheta_Var.csv", header=6)

    pd.util.testing.assert_frame_equal(rjb1, rjb2)
    pd.util.testing.assert_frame_equal(vjb1, vjb2)
    pd.util.testing.assert_frame_equal(rrup1, rrup2)
    pd.util.testing.assert_frame_equal(vrup1, vrup2)

    # Clean up
    shutil.rmtree('DataSingle')
def test_gmsetup():

    out_dir = 'temp_dir'

    if not os.path.exists(out_dir):
        os.mkdir(out_dir)
    try:
        # Create config file:
        fname = os.path.join(out_dir, 'test.yml')
        cmd = ('gmsetup %s -f %s -e %s'
               % (fname,
                  'Test User',
                  '*****@*****.**')
               )

        rc, so, se = get_command_output(cmd)
        assert rc

    except Exception as e:
        print(so.decode())
        print(se.decode())
        raise e
    finally:
        shutil.rmtree(out_dir)
Exemplo n.º 21
0
def main(args):

    #-------------------------------------------------------------
    # where should .rst files, Makefile, _build folder be written?
    #-------------------------------------------------------------
    API_DIR = os.path.join(os.path.expanduser('~'), '__api-doc')
    shutil.rmtree(API_DIR, ignore_errors=True)

    #-------------------------------------------------------------
    # where should the temporary clone of the ground failure gh-pages repo live?
    #-------------------------------------------------------------
    CLONE_DIR = os.path.join(os.path.expanduser('~'), '__gf-doc')
    shutil.rmtree(CLONE_DIR, ignore_errors=True)

    #-------------------------------------------------------------
    # Some additional useful directories
    #-------------------------------------------------------------
    REPO_DIR = os.path.dirname(os.path.abspath(__file__))
    PACKAGE_DIR = os.path.join(REPO_DIR, 'groundfailure')

    #-------------------------------------------------------------
    # what is the package called and who are the authors
    #-------------------------------------------------------------
    PACKAGE = "GroundFailure"
    AUTHORS = 'Kate Allstadt, Eric Thompson, Mike Hearne, Katie Biegel'

    # find the make command on this system
    res, stdout, stderr = get_command_output('which make')
    if not res:
        print('Could not find the "make" command on your system. Exiting.')
        sys.exit(1)
    make_cmd = stdout.decode().strip()

    #-------------------------------------------------------------
    # clone the repository
    #-------------------------------------------------------------
    if args.post:
        sys.stderr.write('Cloning groundfailure gh-pages branch...\n')
        if os.path.isdir(CLONE_DIR):
            shutil.rmtree(CLONE_DIR)
        clonecmd = 'git clone -b gh-pages https://github.com/usgs/'\
                   'groundfailure.git %s' % CLONE_DIR
        res, stdout, stderr = get_command_output(clonecmd)
        if not res:
            raise Exception('Could not clone gh-pages branch.')

        # Delete everything in the repository (except hidden git files)
        cmd = 'rm -fr %s/*' % CLONE_DIR
        res, stdout, stderr = get_command_output(cmd)

    #-------------------------------------------------------------
    # run the api doc command; this creates the .rst files
    #-------------------------------------------------------------
    sys.stderr.write('Building groundfailure API documentation (REST)...\n')
    sphinx_cmd = 'sphinx-apidoc -o %s -f -e -l -d 12 -F -H %s -A "%s"'\
                 ' %s' % (API_DIR, PACKAGE, AUTHORS, PACKAGE_DIR)

    res, stdout, stderr = get_command_output(sphinx_cmd)

    if not res:
        raise Exception('Could not build GroundFailure API documentation'
                        ' - error "%s".' % stderr)

    #-------------------------------------------------------------
    # Edit the conf.py file to include the theme.
    #-------------------------------------------------------------
    fname = os.path.join(API_DIR, 'conf.py')
    f = open(fname, 'at')
    f.write("import os\nimport sys\n")
    f.write("sys.path.insert(0, os.path.abspath('%s'))\n" % (REPO_DIR))
    f.write("sys.path.insert(0, os.path.abspath('.'))\n")
    f.write("temp = sys.executable\n")
    f.write("EXECPATH='/'.join(temp.split('/')[:-2])\n")
    f.write("sys.path.append(os.path.join(os.path.expanduser('~'), EXECPATH, 'lib'))\n")

    #-------------------------------------
    # RTD theme
    #-------------------------------------
    f.write("import sphinx_rtd_theme\n")
    f.write("html_theme = 'sphinx_rtd_theme'\n")
    f.write("html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]\n")
    f.write("html_theme_options = {\n")
    f.write("    'collapse_navigation': False,\n")
    f.write("}\n")
    #-------------------------------------

    # Napolean extension? Supports Goggle and Numpy style docstrings, but it
    # also has some side effects such as restrictions on what sections are
    # allowed and it seems to suppress the [source] link to code; maybe this
    # is a configurable option though.
#    f.write("extensions = ['sphinx.ext.autodoc', 'sphinxcontrib.napoleon']\n")

    # This line is needed to inclue __init__ methods in documentation
    f.write("autoclass_content = 'both'\n")
    f.write("autodoc_member_order = 'bysource'\n")
    f.write("html_show_copyright = False\n")
#    f.write("extensions = extensions + [ 'sphinx.ext.autodoc', "\
#            "'sphinx.ext.napoleon', 'sphinx.ext.todo' ] \n")
    f.write("napoleon_include_special_with_doc = False\n")
    f.write("todo_include_todos = True\n")
    f.close()

    #-------------------------------------------------------------
    # Go to the api directory and build the html
    #-------------------------------------------------------------
    sys.stderr.write('Building groundfailure manual (HTML)...\n')
    os.chdir(API_DIR)
    res, stdout, stderr = get_command_output('%s html' % make_cmd)
    if not res:
        raise Exception('Could not build HTML for API documentation. - error "%s"' % stderr)
    if args.verbose:
        print(stdout.decode('utf-8'))
        print(stderr.decode('utf-8'))

    #-------------------------------------------------------------
    # Copy the generated content to the gh-pages branch we created
    # earlier
    #-------------------------------------------------------------
    htmldir = os.path.join(API_DIR, '_build', 'html')
    if not os.path.isdir(CLONE_DIR):
        os.makedirs(CLONE_DIR)
    copy_tree(htmldir, CLONE_DIR)

    if args.post:
        #-------------------------------------------------------------
        # Post to gh-pages
        #-------------------------------------------------------------

        # cd to directory above where html content was pushed
        os.chdir(CLONE_DIR)
        res, stdout, stderr = get_command_output('touch .nojekyll')
        res1, stdout, stderr1 = get_command_output('git add --all')
        res2, stdout, stderr2 = get_command_output(
            'git commit -am"Pushing to GitHub pages"')
        res3, stdout, stderr3 = get_command_output(
            'git push -u origin +gh-pages')
        if res1 + res2 + res3 < 3:
            stderr = stderr1 + stderr2 + stderr3
            print('Something bad happened when attempting to add, commit, '
                  'or push gh-pages content to GitHub - error "%s". Exiting.'
                  % stderr)
            sys.exit(1)
        print('You can inspect the GroundFailure API docs by looking '
              'here: http://usgs.github.io/groundfailure/index.html')
    else:
        if not args.clean:
            indexpage = os.path.join(CLONE_DIR, 'index.html')
            print('GroundFailure documentation index: %s' % indexpage)
Exemplo n.º 22
0
def transfer(event_dir,
             version,
             pdl_conf,
             pdl_bin=None,
             source="us",
             dryrun=False,
             status='UPDATE'):
    """
    This is to transfer the event's 'pdl_directory' to comcat. PDL must be
    installed separately, see https://usgs.github.io/pdl/ for information.

    Args:
        event_dir (str): File path to location of results for event.
        version (int): Version number of ground-failure run.
        pdl_conf (str): Path to PDL conf file.
        pdl_bin (str): Path to 'ProductClient.jar'. If None it guesses that it
            is installed in the user's home directory:
            ``~/ProductClient/ProductClient.jar``.
        source (str): PDL 'source'. This is 'us' for products coming from NEIC,
            and is tied to the authentication key but this only matters once
            we start sending to production servers rather than dev servers, at
            which point we'll need to add a configuration option for the key.
        dryrun (bool): If True, the PDL command is constructed and printed but
            not executed.
        status (str): Status of ground-failure product being sent to comcat.
            Default is "UPDATE" but can also be "WARNING" so that the product
            page displays the warning banner.

    Returns:
        dict or str: Dictionary of pdl return code, standard out, and standard
        error for dryrun=False; PDL command for dryrun=True.
    """
    if pdl_bin is None:
        pdl_bin = os.path.join(os.path.expanduser('~'), 'ProductClient',
                               'ProductClient.jar')

    pdl_dir = os.path.join(event_dir, 'pdl_directory')

    # Load info.json
    with open(os.path.join(pdl_dir, 'info.json')) as f:
        info_dict = json.load(f)

    # Get some event info for pdl send command
    lat = info_dict['Summary']['lat']
    lon = info_dict['Summary']['lon']
    dep = info_dict['Summary']['depth']
    mag = info_dict['Summary']['magnitude']
    time_stamp = info_dict['Summary']['time']
    code = info_dict['Summary']['code']
    eventsourcecode = info_dict['Summary']['code']
    eventsource = info_dict['Summary']['net']
    shake_version = info_dict['Summary']['shakemap_version']
    xmin, xmax, ymin, ymax = info_dict['Summary']['zoom_extent']

    pdl_type = 'ground-failure'

    # PDL properties
    # Get preferred models to extract PDL properties
    lqs = info_dict['Liquefaction']
    for lq in lqs:
        if lq['preferred']:
            lq_pref = lq
    lss = info_dict['Landslides']
    for ls in lss:
        if ls['preferred']:
            ls_pref = ls

    ls_alert = '"--property-landslide-alert=%s" ' % \
        ls_pref['alert']
    lq_alert = '"--property-liquefaction-alert=%s" ' % \
        lq_pref['alert']

    lq_haz_alert = '"--property-liquefaction-hazard-alert-color=%s" ' % \
        lq_pref['hazard_alert']['color']
    ls_haz_alert = '"--property-landslide-hazard-alert-color=%s" ' % \
        ls_pref['hazard_alert']['color']
    lq_pop_alert = '"--property-liquefaction-population-alert-color=%s" ' % \
        lq_pref['population_alert']['color']
    ls_pop_alert = '"--property-landslide-population-alert-color=%s" ' % \
        ls_pref['population_alert']['color']

    lq_haz_alert_value = '"--property-liquefaction-hazard-alert-value=%s" ' % \
        lq_pref['hazard_alert']['value']
    ls_haz_alert_value = '"--property-landslide-hazard-alert-value=%s" ' % \
        ls_pref['hazard_alert']['value']
    lq_pop_alert_value = '"--property-liquefaction-population-alert-value=%s" ' % \
        lq_pref['population_alert']['value']
    ls_pop_alert_value = '"--property-landslide-population-alert-value=%s" ' % \
        ls_pref['population_alert']['value']

    lq_haz_alert_parameter = '"--property-liquefaction-hazard-alert-parameter=%s" ' % \
        lq_pref['hazard_alert']['parameter']
    ls_haz_alert_parameter = '"--property-landslide-hazard-alert-parameter=%s" ' % \
        ls_pref['hazard_alert']['parameter']
    lq_pop_alert_parameter = '"--property-liquefaction-population-alert-parameter=%s" ' % \
        lq_pref['population_alert']['parameter']
    ls_pop_alert_parameter = '"--property-landslide-population-alert-parameter=%s" ' % \
        ls_pref['population_alert']['parameter']

    lq_overlay = '"--property-liquefaction-overlay=%s" ' % lq_pref['overlay']
    ls_overlay = '"--property-landslide-overlay=%s" ' % ls_pref['overlay']

    lq_extent = lq_pref['extent']
    ls_extent = ls_pref['extent']

    # Liquefaction extent
    lq_xmin = '"--property-liquefaction-minimum-longitude=%s" ' % lq_extent[0]
    lq_xmax = '"--property-liquefaction-maximum-longitude=%s" ' % lq_extent[1]
    lq_ymin = '"--property-liquefaction-minimum-latitude=%s" ' % lq_extent[2]
    lq_ymax = '"--property-liquefaction-maximum-latitude=%s" ' % lq_extent[3]

    # Landslide extent
    ls_xmin = '"--property-landslide-minimum-longitude=%s" ' % ls_extent[0]
    ls_xmax = '"--property-landslide-maximum-longitude=%s" ' % ls_extent[1]
    ls_ymin = '"--property-landslide-minimum-latitude=%s" ' % ls_extent[2]
    ls_ymax = '"--property-landslide-maximum-latitude=%s" ' % ls_extent[3]

    # Product extent --  note, for now I'm just setting this to the landslide
    # extent, which seems a bit pointless. But by providing the property, it
    # gives us the ability to update it later if we can come up with a more
    # sensible product extent.
    prod_xmin = '"--property-minimum-longitude=%s" ' % xmin
    prod_xmax = '"--property-maximum-longitude=%s" ' % xmax
    prod_ymin = '"--property-minimum-latitude=%s" ' % ymin
    prod_ymax = '"--property-maximum-latitude=%s" ' % ymax

    rupt_warn = '"--property-rupture-warning=%s" ' % \
                info_dict['Summary']['rupture_warning']

    # Check for PDL key:
    defaults = os.path.join(os.path.expanduser('~'), '.gfail_defaults')
    configs = ConfigObj(defaults)
    if 'key' in configs.keys():
        pdl_key = configs['key']
    else:
        pdl_key = None

    # Construct PDL command
    pdl_cmd = ('java -jar %s ' % pdl_bin +
               '--send --configFile=%s ' % pdl_conf + '--source=%s ' % source +
               '--eventsource=%s ' % eventsource + '--code=%s ' % code +
               '--status=%s ' % status +
               '--eventsourcecode=%s ' % eventsourcecode +
               '--version=%s ' % version + '--latitude=%s ' % lat +
               '--longitude=%s ' % lon + '--magnitude=%s ' % mag +
               '--depth=%s ' % dep + '--eventtime=%s ' % time_stamp +
               '--type=%s ' % pdl_type + '--directory=%s ' % pdl_dir +
               ls_alert + lq_alert + lq_haz_alert + ls_haz_alert +
               lq_pop_alert + ls_pop_alert + lq_haz_alert_value +
               ls_haz_alert_value + lq_pop_alert_value + ls_pop_alert_value +
               lq_haz_alert_parameter + ls_haz_alert_parameter +
               lq_pop_alert_parameter + ls_pop_alert_parameter + lq_overlay +
               ls_overlay + lq_xmin + lq_xmax + lq_ymin + lq_ymax + ls_xmin +
               ls_xmax + ls_ymin + ls_ymax + prod_xmin + prod_xmax +
               prod_ymin + prod_ymax + rupt_warn +
               '"--property-shakemap-version=%s" ' % shake_version)

    if pdl_key is not None:
        pdl_cmd = pdl_cmd + " --privateKey=%s " % pdl_key

    if not dryrun:
        rc, so, se = get_command_output(pdl_cmd)
        print('PDL return code: %s ' % rc)
        print('PDL standard output:\n%s ' % so)
        print('PDL standard error:\n%s ' % se)
        return {'rc': rc, 'so': so, 'se': se}
    else:
        print(pdl_cmd)
        return {'rc': True, 'so': b'', 'se': b''}
Exemplo n.º 23
0
def main(args):
    """
    Generate API docs.

    Args:
        args: Output of argparse. Currently only holds the verbose flag.

    Returns:
        Nothing. Function will exit upon success or failure.

    """
    verbose = args.verbose

    #-------------------------------------------------------------
    # Some useful directories
    #-------------------------------------------------------------
    REPO_DIR = os.path.dirname(os.path.abspath(__file__))
    DOCS_DIR = os.path.join(REPO_DIR, 'docs')
    API_DIR = os.path.join(REPO_DIR, 'doc_source')
    PACKAGE_DIR = os.path.join(REPO_DIR, 'ps2ff')

    #-------------------------------------------------------------
    # what is the package called and who are the authors
    #-------------------------------------------------------------
    PACKAGE = "ps2ff"
    AUTHORS = 'Eric Thompson, Bruce Worden'
    verstr = '1.1'

    #-------------------------------------------------------------
    # run the api doc command; this creates the .rst files
    #-------------------------------------------------------------
    sys.stderr.write('Building ps2ff API documentation (REST)...\n')
    sphinx_cmd = 'sphinx-apidoc -o %s -f -e -l -M -d 12 -H %s -A "%s"'\
                 ' -V %s %s' % (API_DIR, PACKAGE, AUTHORS, verstr,
                                PACKAGE_DIR)
    res, stdout, stderr = get_command_output(sphinx_cmd)

    if not res:
        raise Exception('Could not build ps2ff API documentation'
                        ' - error "%s".' % stderr)

    if args.verbose:
        print(stdout.decode('utf-8'))
        print(stderr.decode('utf-8'))

    #--------------------------------------------
    # try to clean up some of the excess labeling
    #--------------------------------------------
    clean_cmd = "sed -i '' -e 's/ module//g' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -i '' -e 's/ package//g' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -i '' -e '/Subpackages/d' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -i '' -e '/-.*-/d' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)

    #-------------------------------------------------------------
    # Build the html
    #-------------------------------------------------------------
    sys.stderr.write('Building ps2ff pages (HTML)...\n')
    res, stdout, stderr = get_command_output(
        'sphinx-build -a -E doc_source docs')
    if not res:
        raise Exception('Could not build HTML for API documentation. - '
                        'error "%s"' % stderr.decode())
    if args.verbose:
        print(stdout.decode('utf-8'))
        print(stderr.decode('utf-8'))

    pathlib.Path(os.path.join(DOCS_DIR, '.nojekyll')).touch(exist_ok=True)
Exemplo n.º 24
0
def create_twopager(pdata, hazinfo, version_dir):
    """
    :param pdata:
      PagerData object.
    :param hazinfo:
      HazusInfo object.
    :param version_dir:
      Path of event version directory.
    """

    # ---------------------------------------------------------------------------
    # Sort out some paths
    # ---------------------------------------------------------------------------

    # Location of this module
    mod_dir, dummy = os.path.split(__file__)

    # losspager package direcotry
    losspager_dir = os.path.join(mod_dir, '..')

    # Repository root directory
    root_dir = os.path.join(losspager_dir, '..')

    # Logos directory
    logos_dir = os.path.join(losspager_dir, 'logos')

    # twopager latex template file
    template_file = os.path.join(logos_dir, 'twopager.tex')

    # ---------------------------------------------------------------------------
    # Read in pager data, Hazus data, and latex template
    # ---------------------------------------------------------------------------

    json_dir = os.path.join(version_dir, 'json')
    pdict = pdata._pagerdict
    edict = pdata.getEventInfo()

    with open(template_file, 'r') as f:
        template = f.read()

    # ---------------------------------------------------------------------------
    # Fill in template values
    # ---------------------------------------------------------------------------

    # Sort out origin time
    olat = edict['lat']
    olon = edict['lon']
    otime_utc = edict['time']
    date_utc = datetime.strptime(otime_utc, "%Y-%m-%d %H:%M:%S")

    date_local = pdata.local_time
    DoW = date_local.strftime('%a')
    otime_local = date_local.strftime('%H:%M:%S')
    otime_local = DoW + ' ' + otime_local
    template = template.replace("[ORIGTIME]", otime_utc)
    template = template.replace("[LOCALTIME]", otime_local)

    # Some paths
    template = template.replace("[VERSIONFOLDER]", version_dir)
    template = template.replace("[HOMEDIR]", root_dir)

    # Magnitude location string under USGS logo
    magloc = 'M %.1f, %s' % (edict['mag'], texify(edict['location']))
    template = template.replace("[MAGLOC]", magloc)

    # Pager version
    ver = "Version " + str(pdict['pager']['version_number'])
    template = template.replace("[VERSION]", ver)

    # Epicenter location
    lat = edict['lat']
    lon = edict['lon']
    dep = edict['depth']
    if lat > 0:
        hlat = "N"
    else:
        hlat = "S"
    if lon > 0:
        hlon = "E"
    else:
        hlon = "W"
    template = template.replace("[LAT]", '%.4f' % abs(lat))
    template = template.replace("[LON]", '%.4f' % abs(lon))
    template = template.replace("[HEMILAT]", hlat)
    template = template.replace("[HEMILON]", hlon)
    template = template.replace("[DEPTH]", '%.1f' % dep)

    # Tsunami warning? --- need to fix to be a function of tsunamic flag
    if edict['tsunami']:
        template = template.replace(
            "[TSUNAMI]", "FOR TSUNAMI INFORMATION, SEE: tsunami.gov")
    else:
        template = template.replace("[TSUNAMI]", "")

    # Elapsed time
    if pdata.isScenario():
        elapse = ''
    else:
        elapse = "Created: " + \
            pdict['pager']['elapsed_time'] + " after earthquake"
    template = template.replace("[ELAPSED]", elapse)

    # Summary alert color
    template = template.replace("[SUMMARYCOLOR]",
                                pdata.summary_alert.capitalize())
    template = template.replace("[ALERTFILL]", pdata.summary_alert)

    # Summary comment
    template = template.replace("[IMPACT1]",
                                texify(pdict['comments']['impact1']))
    template = template.replace("[IMPACT2]",
                                texify(pdict['comments']['impact2']))

    # Hazus arrow color and relative position
    hazdel = (hazinfo.hazloss) / LOSS_CONV
    if hazdel < 0.1:
        hazdelval = 0.1
    elif hazdel > 1000000:
        hazdelval = 1000000
    else:
        hazdelval = hazdel
    arrowloc = (((6 - log10(hazdelval)) * 0.83) - 0.07)

    # distance (in cm) to the left from right end of the econ histogram
    template = template.replace("[ARROWSHIFT]", '%.2f' % arrowloc)
    shift = arrowloc + 1.75
    # value is ARROWSHIFT plus 1.75
    # white box around the arrow and text to "open" the lines between values
    template = template.replace("[BOXSHIFT]", '%.2f' % shift)
    # color of the Hazus econ loss value using PAGER color scale
    template = template.replace("[HAZUS_SUMMARY]", hazinfo.summary_color)

    # MMI color pal
    pal = ColorPalette.fromPreset('mmi')

    # get all of the tag tables
    (green_tag_table, yellow_tag_table,
     red_tag_table) = hazinfo.createTaggingTables()

    # Building Tags by occupancy
    template = template.replace('[GREEN_TAG_TABLE]', green_tag_table)
    template = template.replace('[YELLOW_TAG_TABLE]', yellow_tag_table)
    template = template.replace('[RED_TAG_TABLE]', red_tag_table)

    # Direct economic losses table
    econ_losses_table = hazinfo.createEconTable()
    template = template.replace('[DEL_TABLE]', econ_losses_table)

    # Non-fatal injuries table
    injuries_table = hazinfo.createInjuryTable()
    template = template.replace('[NFI_TABLE]', injuries_table)

    # Shelter needs table
    shelter_table = hazinfo.createShelterTable()
    template = template.replace('[SHELTER_TABLE]', shelter_table)

    # Earthquake Debris table
    debris_table = hazinfo.createDebrisTable()
    template = template.replace('[DEBRIS_TABLE]', debris_table)

    eventid = edict['eventid']

    # query ComCat for information about this event
    # fill in the url, if we can find it
    try:
        ccinfo = ComCatInfo(eventid)
        eventid, allids = ccinfo.getAssociatedIds()
        event_url = ccinfo.getURL() + '#pager'
    except:
        event_url = DEFAULT_PAGER_URL

    eventid = "Event ID: " + eventid
    template = template.replace("[EVENTID]", texify(eventid))
    template = template.replace("[EVENTURL]", texify(event_url))
    template = template.replace("[HAZUSURL]", texify(DEFAULT_FEMA_URL))

    # Write latex file
    tex_output = os.path.join(version_dir, 'twopager.tex')
    with open(tex_output, 'w') as f:
        f.write(template)

    pdf_output = os.path.join(version_dir, 'twopager.pdf')
    stderr = ''
    try:
        cwd = os.getcwd()
        os.chdir(version_dir)
        cmd = '%s -interaction nonstopmode --output-directory %s %s' % (
            LATEX_TO_PDF_BIN, version_dir, tex_output)
        logging.info('Running %s...' % cmd)
        res, stdout, stderr = get_command_output(cmd)
        os.chdir(cwd)
        if not res:
            if os.path.isfile(pdf_output):
                msg = 'pdflatex created output file with non-zero exit code.'
                return (pdf_output, msg)
            return (None, stderr)
        else:
            if os.path.isfile(pdf_output):
                return (pdf_output, stderr)
            else:
                pass
    except Exception as e:
        pass
    finally:
        os.chdir(cwd)
    return (None, stderr)
Exemplo n.º 25
0
def main(args):

    #-------------------------------------------------------------
    # Some additional useful directories
    #-------------------------------------------------------------
    REPO_DIR = os.path.dirname(os.path.abspath(__file__))
    PACKAGE_DIR = os.path.join(REPO_DIR, 'libcomcat')
    DOC_SRC_DIR = os.path.join(REPO_DIR, 'doc_source')
    API_DIR = os.path.join(DOC_SRC_DIR, 'apidoc')
    DOCS_DIR = os.path.join(REPO_DIR, 'docs')

    #-------------------------------------------------------------
    # get the human-friendly version of the libcomcat version
    #-------------------------------------------------------------
    verstr = '0.5'

    #-------------------------------------------------------------
    # what is the package called and who are the authors
    #-------------------------------------------------------------
    PACKAGE = "libcomcat 0.5 API"
    AUTHORS = 'Mike Hearne'

    #-------------------------------------------------------------
    # run the api doc command; this creates the .rst files
    #-------------------------------------------------------------
    sys.stderr.write('Building libcomcat API documentation (REST)...\n')
    sphinx_cmd = 'sphinx-apidoc -o %s -f -e -d 12 -H "%s" -A "%s"'\
                 ' -V %s -T %s' % (API_DIR, PACKAGE, AUTHORS, verstr,
                                PACKAGE_DIR)
    res, stdout, stderr = get_command_output(sphinx_cmd)

    if not res:
        raise Exception('Could not build libcomcat API documentation'
                        ' - error "%s".' % stderr.decode())
    if args.verbose:
        print(stdout.decode('utf-8'))
        print(stderr.decode('utf-8'))

    #--------------------------------------------
    # try to clean up some of the excess labeling
    #--------------------------------------------
    clean_cmd = "sed -e 's/ module//g' -i '' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -e 's/ package//g' -i '' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -e '/Subpackages/d' -i '' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -e '/-.*-/d' -i '' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)

    #-------------------------------------------------------------
    # Go to the api directory and build the html
    #-------------------------------------------------------------
    sys.stderr.write('Building HTML...\n')
    res, stdout, stderr = get_command_output('sphinx-build -a -E %s %s'
                                             % (DOC_SRC_DIR, DOCS_DIR))
    if not res:
        raise Exception('Could not build HTML. - '
                        'error "%s"' % stderr.decode())
    if args.verbose:
        print(stdout.decode('utf-8'))
        print(stderr.decode('utf-8'))

    pathlib.Path(os.path.join(DOCS_DIR, '.nojekyll')).touch(exist_ok=True)
Exemplo n.º 26
0
def main(args):

    # -------------------------------------------------------------
    # Some additional useful directories
    # -------------------------------------------------------------
    REPO_DIR = os.path.dirname(os.path.abspath(__file__))
    SHAKEMAP_PACKAGE_DIR = os.path.join(REPO_DIR, 'shakemap')
    SHAKELIB_PACKAGE_DIR = os.path.join(REPO_DIR, 'shakelib')
    DOC_SRC_DIR = os.path.join(REPO_DIR, 'doc_source')
    SHAKEMAP_API_DIR = os.path.join(DOC_SRC_DIR, 'apidoc')
    SHAKELIB_API_DIR = os.path.join(DOC_SRC_DIR, 'shakelib')
    DOCS_DIR = os.path.join(REPO_DIR, 'docs')

    # -------------------------------------------------------------
    # get the human-friendly version of the ShakeMap version
    # -------------------------------------------------------------
    verstr = '4.0a'

    # -------------------------------------------------------------
    # what is the package called and who are the authors
    # -------------------------------------------------------------
    SHAKEMAP_PACKAGE = "ShakeMap 4.0a API"
    SHAKELIB_PACKAGE = "ShakeLib API"
    AUTHORS = 'Bruce Worden, Eric Thompson, Mike Hearne, David Wald'

    # -------------------------------------------------------------
    # run the api doc command; this creates the .rst files
    # -------------------------------------------------------------

    # First clear out the apidoc and shakelib  directory
    for f in os.listdir(SHAKEMAP_API_DIR):
        fpath = os.path.join(SHAKEMAP_API_DIR, f)
        if os.path.isfile(fpath):
            os.unlink(fpath)
    for f in os.listdir(SHAKELIB_API_DIR):
        fpath = os.path.join(SHAKELIB_API_DIR, f)
        if os.path.isfile(fpath):
            os.unlink(fpath)

    sys.stderr.write('Building shakemap API documentation (REST)...\n')
    sphinx_cmd = 'sphinx-apidoc -o %s -f -e -d 12 -H "%s" -A "%s"'\
                 ' -V %s -T %s' % (SHAKEMAP_API_DIR, SHAKEMAP_PACKAGE,
                                   AUTHORS, verstr,
                                   SHAKEMAP_PACKAGE_DIR)
    res, stdout, stderr = get_command_output(sphinx_cmd)
    if not res:
        raise Exception('Could not build ShakeMap API documentation'
                        ' - error "%s".' % stderr.decode())
    if args.verbose:
        print(stdout.decode('utf-8'))
        print(stderr.decode('utf-8'))

    sys.stderr.write('Building shakelib API documentation (REST)...\n')
    sphinx_cmd = 'sphinx-apidoc -o %s -f -e -d 12 -H "%s" -A "%s"'\
                 ' -V %s -T %s shakelib/rupture/gc2.py' % \
                 (SHAKELIB_API_DIR, SHAKELIB_PACKAGE, AUTHORS, verstr,
                  SHAKELIB_PACKAGE_DIR)
    res, stdout, stderr = get_command_output(sphinx_cmd)
    if not res:
        raise Exception('Could not build ShakeLib API documentation'
                        ' - error "%s".' % stderr.decode())
    if args.verbose:
        print(stdout.decode('utf-8'))
        print(stderr.decode('utf-8'))

    # --------------------------------------------
    # try to clean up some of the excess labeling
    # --------------------------------------------
    clean_cmd = "sed -e 's/ module//g' -i '' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % SHAKEMAP_API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -e 's/ package//g' -i '' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % SHAKEMAP_API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -e '/Subpackages/d' -i '' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % SHAKEMAP_API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -e '/-.*-/d' -i '' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % SHAKEMAP_API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)

    clean_cmd = "sed -e 's/ module//g' -i '' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % SHAKELIB_API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -e 's/ package//g' -i '' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % SHAKELIB_API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -e '/Subpackages/d' -i '' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % SHAKELIB_API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -e '/-.*-/d' -i '' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % SHAKELIB_API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)

    # -------------------------------------------------------------
    # Go to the api directory and build the html
    # -------------------------------------------------------------
    sys.stderr.write('Building shakemap manual (HTML)...\n')
    res, stdout, stderr = get_command_output('sphinx-build -a -E %s %s'
                                             % (DOC_SRC_DIR, DOCS_DIR))
    if not res:
        raise Exception('Could not build HTML. - '
                        'error "%s"' % stderr.decode())
    if args.verbose:
        print(stdout.decode('utf-8'))
        print(stderr.decode('utf-8'))

    pathlib.Path(os.path.join(DOCS_DIR, '.nojekyll')).touch(exist_ok=True)
Exemplo n.º 27
0
def test_viewdb(tmpdir):
    # Make a copy of current defaults
    default_file = os.path.join(os.path.expanduser("~"), ".gfail_defaults")
    if os.path.exists(default_file):
        shutil.copy(default_file, default_file + '_bak')

    try:
        # Point to test database file
        pathcmd = 'gfail --set-default-paths -db %s' % \
            os.path.join(datadir, 'testevents.db')
        rc, so, se = get_command_output(pathcmd)

        # Run some examples
        runcmd1 = "viewdb -r -c -n 10 --all -s"
        rc1, so1, se1 = get_command_output(runcmd1)

        runcmd2 = "viewdb -r -c -n 10 -p --LShazmin yellow"
        rc2, so2, se2 = get_command_output(runcmd2)

        runcmd3 = "viewdb -e us1000h3p4 us1000h5el -s"  # --timeplots"
        rc3, so3, se3 = get_command_output(runcmd3)

        runcmd4 = "viewdb -p -c --minmag 7.5 --color"
        rc4, so4, se4 = get_command_output(runcmd4)

        runcmd5 = "viewdb -s --summaryplotfile %s" % \
            os.path.join(tmpdir.name, 'test.png')
        rc5, so5, se5 = get_command_output(runcmd5)

        runcmd6 = "viewdb -s --csvfile %s" % \
            os.path.join(tmpdir.name, 'test.csv')
        rc6, so6, se6 = get_command_output(runcmd6)

        # Test that everything ran
        np.testing.assert_equal(True, rc1,
                                '%s did not run successfully' % runcmd1)
        np.testing.assert_equal(True, rc2,
                                '%s did not run successfully' % runcmd2)
        np.testing.assert_equal(True, rc3,
                                '%s did not run successfully' % runcmd3)
        np.testing.assert_equal(True, rc4,
                                '%s did not run successfully' % runcmd4)
        np.testing.assert_equal(True, rc5,
                                '%s did not run successfully' % runcmd5)

        # Make sure figure was created
        np.testing.assert_equal(
            True, os.path.isfile(os.path.join(tmpdir.name,
                                              'test_overall.png')))
        # Make sure csv file was created
        np.testing.assert_equal(
            True, os.path.isfile(os.path.join(tmpdir.name, 'test.csv')))

    except Exception as e:
        print(e)

    # Put defaults back
    if os.path.exists(default_file + '_bak'):
        shutil.copy(default_file + '_bak', default_file)

    # Remove backup and tempfile
    if os.path.exists(default_file + '_bak'):
        os.remove(default_file + '_bak')
Exemplo n.º 28
0
def test_zhu2015_web(tmpdir):
    shakegrid = os.path.join(datadir, 'loma_prieta', 'grid.xml')
    pathcmd = """
        gfail --set-default-paths \
        -d %s/loma_prieta/model_inputs \
        -o [TMPOUT] \
        -c %s/defaultconfigfiles/models \
        -m %s/defaultconfigfiles/mapconfig.ini \
        -md %s/loma_prieta/mapping_inputs
    """ % (datadir, upone, upone, datadir)

    # Make a copy of current defaults
    default_file = os.path.join(os.path.expanduser("~"), ".gfail_defaults")
    if os.path.exists(default_file):
        shutil.copy(default_file, default_file + '_bak')

    try:
        try:
            p = os.path.join(str(tmpdir.name), "sub")
        except:
            p = os.path.join(str(tmpdir), "sub")
        if not os.path.exists(p):
            os.makedirs(p)
        else:
            shutil.rmtree(p)
            os.makedirs(p)

        # Clear paths
        rc, so, se = get_command_output('gfail -reset')
        np.testing.assert_equal(True, rc, 'gfail reset failed')

        # Modify paths
        pathcmd = pathcmd.replace('[TMPOUT]', p)
        rc, so, se = get_command_output(pathcmd)
        np.testing.assert_equal(True, rc, 'gfail path modification failed')

        with open(default_file, "a") as f:
            f.write("popfile = %s" %
                    os.path.join(datadir, 'loma_prieta/lspop2016_lp.flt'))

        # Run model
        conf = os.path.join(datadir, 'test_conf')
        runcmd = "gfail %s %s -w --hdf5 -ext" % (conf, shakegrid)
        rc, so, se = get_command_output(runcmd)
        np.testing.assert_equal(True, rc, se.decode())

        # event_dir = os.path.join(p, '19891018000415')

        # # Make png
        # cmd = 'create_png -e %s' % event_dir
        # rc1, so1, se1 = get_command_output(cmd)
        # np.testing.assert_equal(True, rc1, se1.decode())

        # # Make info
        # cmd = 'create_info -e %s' % event_dir
        # rc2, so2, se2 = get_command_output(cmd)
        # np.testing.assert_equal(True, rc2, se2.decode())

        # Make PDL directory
        # pdldir = os.path.join(p, '19891018000415')
        pdldir = p
        pdl.prepare_pdl_directory(pdldir)

        # Transfer dry run
        pdl_out = pdl.transfer(pdldir, 1, 'None', dryrun=True)
    except Exception as e:  # So that defaults are put back even if something goes wrong
        print(e)

    # Put defaults back
    if os.path.exists(default_file + '_bak'):
        shutil.copy(default_file + '_bak', default_file)

    # remove backup
    os.remove(default_file + '_bak')

    # Then do test
    assert pdl_out['rc'] is True

    # Remove tempfile
    shutil.rmtree(p)
Exemplo n.º 29
0
def build_report_latex(sc, directory, origin, config=None):
    """
    Build latex summary report.

    Args:
        st (StreamCollection):
            StreamCollection of data.
        directory (str):
            Directory for saving report.
        origin (ScalarEvent):
            ScalarEvent object.
        config (dict):
            Config dictionary.
    Returns:
        tuple:
            - Name of pdf or latex report file created.
            - boolean indicating whether PDF creation was successful.

    """
    # Need to get config to know where the plots are located
    if config is None:
        config = get_config()

    # Check if directory exists, and if not, create it.
    if not os.path.exists(directory):
        os.makedirs(directory)

    # Initialize report string with PREAMBLE
    report = PREAMBLE
    timestr = origin.time.strftime('%m/%d/%Y %H:%M:%S')

    # Does the map exist?
    map_file = os.path.join(directory, 'stations_map.png')
    if os.path.isfile(map_file):
        TB = TITLEBLOCK.replace('[MAPPATH]', 'stations_map.png')
        TB = TB.replace('[VERSION]', gmprocess.__version__)
        moveout_file = os.path.join(directory, 'moveout_plot.png')
        if os.path.isfile(moveout_file):
            TB = TB.replace('[MOVEOUTPATH]', 'moveout_plot.png')
        report += TB

    # Loop over each StationStream and append it's page to the report
    # do not include more than three.
    for st in sc:
        plot_path = os.path.join('plots',
                                 origin.id + '_' + st.get_id() + '.png')
        SB = STREAMBLOCK.replace('[PLOTPATH]', plot_path)
        SB = SB.replace(
            '[EVENT]',
            'M %s - %s - %s' % (origin.magnitude, origin.id, timestr))
        SB = SB.replace('[STATION]', st.get_id())
        report += SB

        prov_latex = get_prov_latex(st)

        report += prov_latex
        report += '\n'
        if st[0].hasParameter('signal_split'):
            pick_method = st[0].getParameter('signal_split')['picker_type']
            report += 'Pick Method: %s\n\n' % str_for_latex(pick_method)
        if 'nnet_qa' in st.getStreamParamKeys():
            score_lq = st.getStreamParam('nnet_qa')['score_LQ']
            score_hq = st.getStreamParam('nnet_qa')['score_HQ']
            report += ('Neural Network LQ score: %s\n\n' %
                       str_for_latex(str(score_lq)))
            report += ('Neural Network HQ score: %s\n\n' %
                       str_for_latex(str(score_hq)))
        if not st.passed:
            for tr in st:
                if tr.hasParameter('failure'):
                    report += (
                        'Failure reason: %s\n\n' %
                        str_for_latex(tr.getParameter('failure')['reason']))
                    break
        report += '\\newpage\n\n'

    # Finish the latex file
    report += POSTAMBLE

    res = False
    # Do not save report if running tests
    if 'CALLED_FROM_PYTEST' not in os.environ:

        # Set working directory to be the event subdirectory
        current_directory = os.getcwd()
        os.chdir(directory)

        # File name relative to current location
        file_name = ('report_%s.tex' % (origin.id))

        # File name for printing out later relative base directory
        latex_file = os.path.join(directory, file_name)
        with open(file_name, 'w') as f:
            f.write(report)

        # Can we find pdflatex?
        try:
            pdflatex_bin = which('pdflatex')
            pdflatex_options = '-interaction=nonstopmode -halt-on-error'
            cmd = '%s %s %s' % (pdflatex_bin, pdflatex_options, file_name)
            res, stdout, stderr = get_command_output(cmd)
            report_file = latex_file
            if res:
                base, ext = os.path.splitext(file_name)
                pdf_file = base + '.pdf'
                if os.path.isfile(pdf_file):
                    report_file = pdf_file
                    auxfiles = glob.glob(base + '*')
                    auxfiles.remove(pdf_file)
                    for auxfile in auxfiles:
                        os.remove(auxfile)
                else:
                    res = False
            else:
                print('pdflatex output:')
                print(stdout.decode())
                print(stderr.decode())
        except Exception:
            report_file = ''
            pass
        finally:
            os.chdir(current_directory)
    else:
        report_file = 'not run'

    # make report file an absolute path
    report_file = os.path.join(directory, report_file)

    return (report_file, res)
Exemplo n.º 30
0
def quickcut(filename,
             gdict,
             tempname=None,
             extrasamp=5.,
             method='bilinear',
             precise=True,
             cleanup=True,
             verbose=False,
             override=False):
    """
    Use gdal to trim a large global file down quickly so mapio can read it
    efficiently. (Cannot read Shakemap.xml files, must save as .bil filrst)

    Args:
        filename (str): File path to original input file (raster).
        gdict (geodict): Geodictionary to cut around and align with.
        tempname (str): File path to desired location of clipped part of
            filename.
        extrasamp (int): Number of extra cells to cut around each edge of
            geodict to have resampling buffer for future steps.
        method (str): If resampling is necessary, method to use.
        precise (bool): If true, will resample to the gdict as closely as
            possible, if False it will just roughly cut around the area of
            interest without changing resolution
        cleanup (bool): if True, delete tempname after reading it back in
        verbose (bool): if True, prints more details
        override (bool): if True, if filename extent is not fully contained by
            gdict, read in the entire file (only used for ShakeMaps)

    Returns: New grid2D layer

    Note: This function uses the subprocess approach because ``gdal.Translate``
        doesn't hang on the command until the file is created which causes
        problems in the next steps.
    """
    if gdict.xmax < gdict.xmin:
        raise Exception('quickcut: your geodict xmax is smaller than xmin')

    try:
        filegdict = GDALGrid.getFileGeoDict(filename)
    except:
        try:
            filegdict = GMTGrid.getFileGeoDict(filename)
        except:
            raise Exception('Cannot get geodict for %s' % filename)

    if tempname is None:
        tempdir = tempfile.mkdtemp()
        tempname = os.path.join(tempdir, 'junk.tif')
        deltemp = True
    else:
        tempdir = None
        deltemp = False

    # if os.path.exists(tempname):
    #     os.remove(tempname)
    #     print('Temporary file already there, removing file')

    filegdict = filegdict[0]

    # Get the right methods for mapio (method) and gdal (method2)
    if method == 'linear':
        method2 = 'bilinear'
    if method == 'nearest':
        method2 = 'near'
    if method == 'bilinear':
        method = 'linear'
        method2 = 'bilinear'
    if method == 'near':
        method = 'nearest'
        method2 = 'near'
    else:
        method2 = method

    if filegdict != gdict:
        # First cut without resampling
        tempgdict = GeoDict.createDictFromBox(gdict.xmin,
                                              gdict.xmax,
                                              gdict.ymin,
                                              gdict.ymax,
                                              filegdict.dx,
                                              filegdict.dy,
                                              inside=True)

        try:
            egdict = filegdict.getBoundsWithin(tempgdict)

            ulx = egdict.xmin - extrasamp * egdict.dx
            uly = egdict.ymax + extrasamp * egdict.dy
            lrx = egdict.xmax + (extrasamp + 1) * egdict.dx
            lry = egdict.ymin - (extrasamp + 1) * egdict.dy

            cmd = 'gdal_translate -a_srs EPSG:4326 -of GTiff -projwin %1.8f \
            %1.8f %1.8f %1.8f -r %s %s %s' % (ulx, uly, lrx, lry, method2,
                                              filename, tempname)
        except Exception as e:
            if override:
                # When ShakeMap is being loaded, sometimes they won't align
                # right because it's already cut to the area, so just load
                # the whole file
                cmd = 'gdal_translate -a_srs EPSG:4326 -of GTiff -r %s %s %s' \
                      % (method2, filename, tempname)
            else:
                raise Exception('Failed to cut layer: %s' % e)

        rc, so, se = get_command_output(cmd)
        if not rc:
            raise Exception(se.decode())
        else:
            if verbose:
                print(so.decode())

        newgrid2d = GDALGrid.load(tempname)
        if precise:
            # Resample to exact geodictionary
            newgrid2d = newgrid2d.interpolate2(gdict, method=method)
        if cleanup:
            os.remove(tempname)

        if deltemp:
            shutil.rmtree(tempdir)

    else:
        ftype = GMTGrid.getFileType(filename)
        if ftype != 'unknown':
            newgrid2d = GMTGrid.load(filename)
        elif filename.endswith('.xml'):
            newgrid2d = ShakeGrid.load(filename)
        else:
            newgrid2d = GDALGrid.load(filename)

    return newgrid2d
Exemplo n.º 31
0
def run_gfail(args):
    """Runs ground failure.

    Args:
        args: dictionary or argument parser Namespace output by bin/gfail
            program.

    Returns:
        list: Names of created files.

    """
    # TODO: ADD CONFIG VALIDATION STEP THAT MAKES SURE ALL THE FILES EXIST
    filenames = []
    # If args is a dictionary, convert to a Namespace
    if isinstance(args, dict):
        args = Namespace(**args)

    if args.set_default_paths:
        set_default_paths(args)
        print('default paths set, continuing...\n')

    if args.list_default_paths:
        list_default_paths()
        return

    if args.reset_default_paths:
        reset_default_paths()
        return

    if args.make_webpage:
        # Turn on GIS and HDF5 flags
        gis = True
        hdf5 = True
        kmz = True
    else:
        gis = args.gis
        hdf5 = args.hdf5
        kmz = args.kmz

    # Figure out what models will be run
    if args.shakefile is not None:  # user intends to actually run some models
        shakefile = args.shakefile

        # make output location for things
        if args.output_filepath is None:
            outdir = os.getcwd()
        else:
            outdir = args.output_filepath

        if hdf5 or gis or kmz:
            if not os.path.exists(outdir):
                os.makedirs(outdir)

        # download if is url
        # cleanup = False
        if not os.path.isfile(shakefile):
            if isURL(shakefile):
                # getGridURL returns a named temporary file object
                shakefile = getGridURL(shakefile)
                # cleanup = True  # Be sure to delete it after
            else:
                raise NameError('Could not find "%s" as a file or a valid url'
                                % shakefile)
        eventid = getHeaderData(shakefile)[0]['event_id']

        # Get entire path so won't break if running gfail with relative path
        shakefile = os.path.abspath(shakefile)

        if args.extract_contents:
            outfolder = outdir
        else:  # Nest in a folder named by eventid
            outfolder = os.path.join(outdir, eventid)
            if not os.path.exists(outfolder):
                os.makedirs(outfolder)

        # Copy shake grid into output directory
        # --- this is base on advice from Mike that when running in production
        #     the shake grids are not archived and so if we need/want to have
        #     the exact grid used for the calculation later if there's every a
        #     question about how the calculation was done, the safest thing is
        #     to store a copy of it here.
        shake_copy = os.path.join(outfolder, "grid.xml")
        shutil.copyfile(shakefile, shake_copy)

        if args.uncertfile is not None:
            uncertfile = os.path.abspath(args.uncertfile)
            unc_copy = os.path.join(outfolder, "uncertainty.xml")
            shutil.copyfile(uncertfile, unc_copy)
        else:
            uncertfile = None

        # Write shakefile to a file for use later
        shakename = os.path.join(outfolder, "shakefile.txt")
        shake_file = open(shakename, "wt")
        shake_file.write(shake_copy)
        shake_file.close()
        filenames.append(shakename)

        # Check that shakemap bounds do not cross 180/-180 line

        if args.set_bounds is None:
            sd = ShakeGrid.getFileGeoDict(shakefile)
            if sd.xmin > sd.xmax:
                print('\nShakeMap crosses 180/-180 line, setting bounds so '
                      'only side with more land area is run')
                if sd.xmax + 180. > 180 - sd.xmin:
                    set_bounds = '%s, %s, %s, %s' % (
                        sd.ymin, sd.ymax, -180., sd.xmax)
                else:
                    set_bounds = '%s, %s, %s, %s' % (sd.ymin, sd.ymax, sd.xmin,
                                                     180.)
                print('Bounds applied: %s' % set_bounds)
            else:
                set_bounds = args.set_bounds
        else:
            set_bounds = args.set_bounds

        config = args.config

        if args.config_filepath is not None:
            # only add config_filepath if full filepath not given and file
            # ext is .ini
            if (not os.path.isabs(config) and
                    os.path.splitext(config)[-1] == '.ini'):
                config = os.path.join(args.config_filepath, config)

        if os.path.splitext(config)[-1] == '.ini':
            temp = ConfigObj(config)
            if len(temp) == 0:
                raise Exception(
                    'Could not find specified .ini file: %s' % config)
            if args.data_path is not None:
                temp = correct_config_filepaths(args.data_path, temp)
            configs = [temp]
            conffail = []
        else:
            # input is a list of config files
            f = open(config, 'r')
            configlist = f.readlines()
            configs = []
            conffail = []
            for conf in configlist:
                conf = conf.strip()
                if not os.path.isabs(conf):
                    # only add config_filepath if full filepath not given
                    conf = os.path.join(args.config_filepath, conf)
                try:
                    temp = ConfigObj(conf)
                    if temp:
                        if args.data_path is not None:
                            temp = correct_config_filepaths(
                                args.data_path, temp)
                        configs.append(temp)
                    else:
                        conffail.append(conf)
                except BaseException:
                    conffail.append(conf)

        print('\nRunning the following models:')

        for conf in configs:
            print('\t%s' % conf.keys()[0])
        if len(conffail) > 0:
            print('Could not find or read in the following config files:\n')
            for conf in conffail:
                print('\t%s' % conf)
            print('\nContinuing...\n')

        if set_bounds is not None:
            if 'zoom' in set_bounds:
                temp = set_bounds.split(',')
                print('Using %s threshold of %1.1f to cut model bounds'
                      % (temp[1].strip(), float(temp[2].strip())))
                bounds = get_bounds(shakefile, temp[1].strip(),
                                    float(temp[2].strip()))
            else:
                temp = eval(set_bounds)
                latmin = temp[0]
                latmax = temp[1]
                lonmin = temp[2]
                lonmax = temp[3]
                bounds = {'xmin': lonmin, 'xmax': lonmax,
                          'ymin': latmin, 'ymax': latmax}
            print('Applying bounds of lonmin %1.2f, lonmax %1.2f, '
                  'latmin %1.2f, latmax %1.2f'
                  % (bounds['xmin'], bounds['xmax'],
                     bounds['ymin'], bounds['ymax']))
        else:
            bounds = None

        if args.make_webpage:
            results = []

        # pre-read in ocean trimming file polygons so only do this step once
        if args.trimfile is not None:
            if not os.path.exists(args.trimfile):
                print('trimfile defined does not exist: %s\n'
                      'Ocean will not be trimmed.' % args.trimfile)
                trimfile = None
            elif os.path.splitext(args.trimfile)[1] != '.shp':
                print('trimfile must be a shapefile, '
                      'ocean will not be trimmed')
                trimfile = None
            else:
                trimfile = args.trimfile
        else:
            trimfile = None

        # Get finite fault ready, if exists

        ffault = None
        point = True
        if args.finite_fault is not None:
            point = False
            try:
                if os.path.splitext(args.finite_fault)[-1] == '.txt':
                    ffault = text_to_json(args.finite_fault)
                elif os.path.splitext(args.finite_fault)[-1] == '.json':
                    ffault = args.finite_fault
                else:
                    print('Could not read in finite fault, will '
                          'try to download from comcat')
                    ffault = None
            except BaseException:
                print('Could not read in finite fault, will try to '
                      'download from comcat')
                ffault = None

        if ffault is None:
            # Try to get finite fault file, if it exists
            try:
                returned_ev = get_event_comcat(shakefile)
                if returned_ev is not None:
                    testjd, detail, temp = returned_ev
                    evinfo = testjd['input']['event_information']
                    if 'faultfiles' in evinfo:
                        ffilename = evinfo['faultfiles']
                        if len(ffilename) > 0:
                            # Download the file
                            with tempfile.NamedTemporaryFile(
                                    delete=False, mode='w') as f:
                                temp.getContent(ffilename, filename=f.name)
                                ffault = text_to_json(f.name)
                                os.remove(f.name)
                            point = False
                        else:
                            point = True
                else:
                    print('Unable to determine source type, unknown if finite'
                          ' fault or point source')
                    ffault = None
                    point = False

            except Exception as e:
                print(e)
                print('Unable to determine source type, unknown if finite'
                      ' fault or point source')
                ffault = None
                point = False

        # Loop over config files
        for conf in configs:
            modelname = conf.keys()[0]
            print('\nNow running %s:' % modelname)
            notcov, newbnds = check_input_extents(
                conf, shakefile=shakefile,
                bounds=bounds
            )
            if len(notcov) > 0:
                print('\nThe following input layers do not cover'
                      ' the area of interest:\n\t%s' % '\n\t'.join(notcov))
                if newbnds is None:
                    print('\nCannnot make bounds that work. '
                          'Skipping to next model\n')
                    continue
                else:
                    pnt = '%s, %s, %s, %s' % (
                        newbnds['xmin'], newbnds['xmax'],
                        newbnds['ymin'], newbnds['ymax'])
                    print('Running model for new bounds that are fully covered'
                          ' by input layer: %s' % pnt)
                    bounds2 = newbnds
            else:
                bounds2 = bounds

            modelfunc = conf[modelname]['funcname']
            if modelfunc == 'LogisticModel':
                lm = LM.LogisticModel(shakefile, conf,
                                      uncertfile=uncertfile,
                                      saveinputs=args.save_inputs,
                                      bounds=bounds2,
                                      trimfile=trimfile)

                maplayers = lm.calculate()
            elif modelfunc == 'godt2008':
                maplayers = godt2008(shakefile, conf,
                                     uncertfile=uncertfile,
                                     saveinputs=args.save_inputs,
                                     bounds=bounds2,
                                     trimfile=trimfile)
            else:
                print('Unknown model function specified in config for %s '
                      'model, skipping to next config' % modelfunc)
                continue

            # time1 = datetime.datetime.utcnow().strftime('%d%b%Y_%H%M')
            # filename = ('%s_%s_%s' % (eventid, modelname, time1))

            if args.appendname is not None:
                filename = ('%s_%s_%s' % (eventid, modelname, args.appendname))
            else:
                filename = ('%s_%s' % (eventid, modelname))
            if hdf5:
                filenameh = filename + '.hdf5'
                if os.path.exists(filenameh):
                    os.remove(filenameh)
                savelayers(maplayers, os.path.join(outfolder, filenameh))
                filenames.append(filenameh)

            if gis or kmz:
                for key in maplayers:
                    # Rename 'std' key to 'beta_sigma'
                    if key == 'std':
                        key_label = 'beta_sigma'
                    else:
                        key_label = key
                    if gis:
                        filen = os.path.join(outfolder, '%s_%s.bil'
                                             % (filename, key_label))
                        fileh = os.path.join(outfolder, '%s_%s.hdr'
                                             % (filename, key_label))
                        fileg = os.path.join(outfolder, '%s_%s.tif'
                                             % (filename, key_label))

                        GDALGrid.copyFromGrid(
                            maplayers[key]['grid']).save(filen)
                        cflags = '-co COMPRESS=DEFLATE -co predictor=2'
                        srs = '-a_srs EPSG:4326'
                        cmd = 'gdal_translate %s %s -of GTiff %s %s' % (
                            srs, cflags, filen, fileg)
                        rc, so, se = get_command_output(cmd)
                        # Delete bil file and its header
                        os.remove(filen)
                        os.remove(fileh)
                        filenames.append(fileg)
                    if kmz and (not key.startswith('quantile') and not key.startswith('std')) :
                        plotorder, logscale, lims, colormaps, maskthresh = \
                            parseConfigLayers(maplayers, conf, keys=['model'])
                        maxprob = np.nanmax(maplayers[key]['grid'].getData())
                        if key == 'model':
                            qdict = {
                                k: maplayers[k] for k in maplayers.keys()
                                if k.startswith('quantile')
                            }
                        else:
                            qdict = None
                        if maskthresh is None:
                            maskthresh = [0.]
                        if maxprob >= maskthresh[0]:
                            filen = os.path.join(outfolder, '%s_%s.kmz'
                                                 % (filename, key_label))
                            filek = create_kmz(maplayers[key], filen,
                                               mask=maskthresh[0],
                                               levels=lims[0],
                                               qdict=qdict)
                            filenames.append(filek)
                        else:
                            print('No unmasked pixels present, skipping kmz '
                                  'file creation')

            if args.make_webpage:
                # Compile into list of results for later
                results.append(maplayers)

                #  # Make binary output for ShakeCast
                #  filef = os.path.join(outfolder, '%s_model.flt'
                #                       % filename)
                #  # And get name of header
                #  filefh = os.path.join(outfolder, '%s_model.hdr'
                #                        % filename)
                #  # Make file
                #  write_floats(filef, maplayers['model']['grid'])
                #  filenames.append(filef)
                #  filenames.append(filefh)

        eventid = getHeaderData(shakefile)[0]['event_id']
        if not hasattr(args, 'eventsource'):
            args.eventsource = 'us'
        if not hasattr(args, 'eventsourcecode'):
            args.eventsourcecode = eventid

        if args.make_webpage:
            if len(results) == 0:
                raise Exception('No models were run. Cannot make webpages.')
            outputs = hazdev(
                results, configs,
                shakefile, outfolder=outfolder,
                pop_file=args.popfile,
                pager_alert=args.property_alertlevel,
                eventsource=args.eventsource,
                eventsourcecode=args.eventsourcecode,
                point=point, gf_version=args.gf_version,
                pdlcall=args.pdlcall)
            filenames = filenames + outputs

#        # create transparent png file
#        outputs = create_png(outdir)
#        filenames = filenames + outputs
#
#        # create info file
#        infofile = create_info(outdir)
#        filenames = filenames + infofile

        print('\nFiles created:\n')
        for filen in filenames:
            print('%s' % filen)

        return filenames
Exemplo n.º 32
0
def main(args):

    # -------------------------------------------------------------
    # Some additional useful directories
    # -------------------------------------------------------------
    REPO_DIR = os.path.dirname(os.path.abspath(__file__))
    SHAKEMAP_PACKAGE_DIR = os.path.join(REPO_DIR, 'shakemap')
    SHAKELIB_PACKAGE_DIR = os.path.join(REPO_DIR, 'shakelib')
    DOC_SRC_DIR = os.path.join(REPO_DIR, 'doc_source')
    SHAKEMAP_API_DIR = os.path.join(DOC_SRC_DIR, 'apidoc')
    SHAKELIB_API_DIR = os.path.join(DOC_SRC_DIR, 'shakelib')
    DOCS_DIR = os.path.join(REPO_DIR, 'docs')

    # -------------------------------------------------------------
    # get the human-friendly version of the ShakeMap version
    # -------------------------------------------------------------
    verstr = '4.0a'

    # -------------------------------------------------------------
    # what is the package called and who are the authors
    # -------------------------------------------------------------
    SHAKEMAP_PACKAGE = "ShakeMap 4.0a API"
    SHAKELIB_PACKAGE = "ShakeLib API"
    AUTHORS = 'Bruce Worden, Eric Thompson, Mike Hearne'

    # -------------------------------------------------------------
    # run the api doc command; this creates the .rst files
    # -------------------------------------------------------------

    # First clear out the apidoc and shakelib  directory
    for f in os.listdir(SHAKEMAP_API_DIR):
        fpath = os.path.join(SHAKEMAP_API_DIR, f)
        if os.path.isfile(fpath):
            os.unlink(fpath)
    for f in os.listdir(SHAKELIB_API_DIR):
        fpath = os.path.join(SHAKELIB_API_DIR, f)
        if os.path.isfile(fpath):
            os.unlink(fpath)

    sys.stderr.write('Building shakemap API documentation (REST)...\n')
    sphinx_cmd = 'sphinx-apidoc -o %s -f -e -d 12 -H "%s" -A "%s"'\
                 ' -V %s -T %s' % (SHAKEMAP_API_DIR, SHAKEMAP_PACKAGE, 
                                   AUTHORS, verstr,
                                   SHAKEMAP_PACKAGE_DIR)
    res, stdout, stderr = get_command_output(sphinx_cmd)
    if not res:
        raise Exception('Could not build ShakeMap API documentation'
                        ' - error "%s".' % stderr.decode())
    if args.verbose:
        print(stdout.decode('utf-8'))
        print(stderr.decode('utf-8'))

    sys.stderr.write('Building shakelib API documentation (REST)...\n')
    sphinx_cmd = 'sphinx-apidoc -o %s -f -e -d 12 -H "%s" -A "%s"'\
                 ' -V %s -T %s shakelib/rupture/gc2\.py' % \
                 (SHAKELIB_API_DIR, SHAKELIB_PACKAGE, AUTHORS, verstr,
                 SHAKELIB_PACKAGE_DIR)
    res, stdout, stderr = get_command_output(sphinx_cmd)
    if not res:
        raise Exception('Could not build ShakeLib API documentation'
                        ' - error "%s".' % stderr.decode())
    if args.verbose:
        print(stdout.decode('utf-8'))
        print(stderr.decode('utf-8'))

    # --------------------------------------------
    # try to clean up some of the excess labeling
    # --------------------------------------------
    clean_cmd = "sed -e 's/ module//g' -i '' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % SHAKEMAP_API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -e 's/ package//g' -i '' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % SHAKEMAP_API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -e '/Subpackages/d' -i '' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % SHAKEMAP_API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -e '/-.*-/d' -i '' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % SHAKEMAP_API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)


    clean_cmd = "sed -e 's/ module//g' -i '' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % SHAKELIB_API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -e 's/ package//g' -i '' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % SHAKELIB_API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -e '/Subpackages/d' -i '' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % SHAKELIB_API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -e '/-.*-/d' -i '' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % SHAKELIB_API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)

    # -------------------------------------------------------------
    # Go to the api directory and build the html
    # -------------------------------------------------------------
    sys.stderr.write('Building shakemap manual (HTML)...\n')
    res, stdout, stderr = get_command_output('sphinx-build -a -E %s %s'
                                             % (DOC_SRC_DIR, DOCS_DIR))
    if not res:
        raise Exception('Could not build HTML. - '
                        'error "%s"' % stderr.decode())
    if args.verbose:
        print(stdout.decode('utf-8'))
        print(stderr.decode('utf-8'))

    pathlib.Path(os.path.join(DOCS_DIR, '.nojekyll')).touch(exist_ok=True)
Exemplo n.º 33
0
def test_charlevoix_0(tmpdir):
    # make a temporary directory; read in rupture file
    p = os.path.join(str(tmpdir), "sub")
    if not os.path.exists(p):
        os.makedirs(p)
    old_shakedir = set_shakehome(p)
    v = os.path.join(shakedir, 'tests/data/CharlevoixVs30.grd')
    old_vs30file = set_vs30file(v)
    old_gmpe = set_gmpe('stable_continental_nshmp2014_rlme')
    jsonfile = os.path.join(
        shakedir, 'rupture_sets/BSSC2014/bssc2014_ceus.json')

    # directory holding test and target data for this event
    testinput = os.path.join(p, 'data/charlevoix_0_m7p_se/input')
    targetinput = os.path.join(
        shakedir, 'tests/output/charlevoix_0_m7p_se/input')

    #---------------------------------------------------------------------------
    # First test mkinputdir
    #---------------------------------------------------------------------------

    # Run mkinputdir
    cmd = 'mkinputdir -f %s -i 0 ' % jsonfile
    rc, so, se = get_command_output(cmd)
    if se != b'':
        print(so.decode())
        print(se.decode())

    # Check output files

    # Note: Not checking event.xml because the timestamp breaks cmp comparison.
    #       Would need to parse and to tag comaprisons. Not worth it.

    target = os.path.join(targetinput, 'charlevoix_0_m7p_se_for-map_fault.txt')
    test = os.path.join(testinput, 'charlevoix_0_m7p_se_for-map_fault.txt')
    assert filecmp.cmp(test, target) is True

    #---------------------------------------------------------------------------
    # Test mkscenariogrids
    #---------------------------------------------------------------------------
    datadir = os.path.join(p, 'data')
    cmd = 'mkscenariogrids -e charlevoix_0_m7p_se -r 0.1 '
    rc, so, se = get_command_output(cmd)

    # Check output files
    target = os.path.join(targetinput, 'mi_estimates.grd')
    test = os.path.join(testinput, 'mi_estimates.grd')
    grdcmp(test, target)

    target = os.path.join(targetinput, 'mi_sd.grd')
    test = os.path.join(testinput, 'mi_sd.grd')
    grdcmp(test, target)

    target = os.path.join(targetinput, 'pga_estimates.grd')
    test = os.path.join(testinput, 'pga_estimates.grd')
    grdcmp(test, target)

    target = os.path.join(targetinput, 'pga_sd.grd')
    test = os.path.join(testinput, 'pga_sd.grd')
    grdcmp(test, target)

    target = os.path.join(targetinput, 'pgv_estimates.grd')
    test = os.path.join(testinput, 'pgv_estimates.grd')
    grdcmp(test, target)

    target = os.path.join(targetinput, 'pgv_sd.grd')
    test = os.path.join(testinput, 'pgv_sd.grd')
    grdcmp(test, target)

    target = os.path.join(targetinput, 'psa03_estimates.grd')
    test = os.path.join(testinput, 'psa03_estimates.grd')
    grdcmp(test, target)

    target = os.path.join(targetinput, 'psa03_sd.grd')
    test = os.path.join(testinput, 'psa03_sd.grd')
    grdcmp(test, target)

    target = os.path.join(targetinput, 'psa10_estimates.grd')
    test = os.path.join(testinput, 'psa10_estimates.grd')
    grdcmp(test, target)

    target = os.path.join(targetinput, 'psa10_sd.grd')
    test = os.path.join(testinput, 'psa10_sd.grd')
    grdcmp(test, target)

    target = os.path.join(targetinput, 'psa30_estimates.grd')
    test = os.path.join(testinput, 'psa30_estimates.grd')
    grdcmp(test, target)

    target = os.path.join(targetinput, 'psa30_sd.grd')
    test = os.path.join(testinput, 'psa30_sd.grd')
    grdcmp(test, target)

    # Clean up
    set_shakehome(old_shakedir)
    set_vs30file(old_vs30file)
    set_gmpe(old_gmpe)
    shutil.rmtree(p)
Exemplo n.º 34
0
def tes_main():
    cmd = 'gfail -i -pd -pn -pi %s %s' % (configfile, shakefile)
    retcode, stdout, stderr = get_command_output(cmd)
    temp = stdout.decode('utf-8')
    print(temp)
Exemplo n.º 35
0
def test_zhu2015(tmpdir):
    shakegrid = os.path.join(datadir, 'loma_prieta', 'grid.xml')
    pathcmd = """
        gfail --set-default-paths \
        -d %s/loma_prieta/model_inputs \
        -o [TMPOUT] \
        -c %s/defaultconfigfiles/models \
        -m %s/defaultconfigfiles/mapconfig.ini \
        -md %s/loma_prieta/mapping_inputs
    """ % (datadir, upone, upone, datadir)

    trimfile = '%s/loma_prieta/mapping_inputs/ne_10m_ocean/ne_10m_ocean.shp' \
               % datadir

    # Make a copy of current defaults
    default_file = os.path.join(os.path.expanduser("~"), ".gfail_defaults")
    if os.path.exists(default_file):
        shutil.copy(default_file, default_file + '_bak')

    try:
        try:
            p = os.path.join(str(tmpdir.name), "sub")
        except:
            p = os.path.join(str(tmpdir), "sub")
        if not os.path.exists(p):
            os.makedirs(p)

        # Clear paths
        rc, so, se = get_command_output('gfail -reset')
        # Modify paths
        pathcmd = pathcmd.replace('[TMPOUT]', p)
        rc1, so1, se1 = get_command_output(pathcmd)

        with open(default_file, "a") as f:
            f.write("popfile = %s" %
                    os.path.join(datadir, 'loma_prieta/lspop2016_lp.flt'))

        # List paths
        rc3, so3, se3 = get_command_output('gfail --list-default-paths')

        # Run model with bounds
        runcmd = "gfail %s/test_conf %s -b 'zoom, pga, 2' --hdf5 -tr  %s -ext"\
                 % (datadir, shakegrid, trimfile)
        rc4, so4, se4 = get_command_output(runcmd)

        # Run model
        runcmd = "gfail %s/test_conf %s --gis -pn -pi -pd --hdf5 -ext" \
                 % (datadir, shakegrid)
        rc2, so2, se2 = get_command_output(runcmd)

        # Read in the testing data
        test_file = os.path.join(p, '19891018000415_zhu_2015_model.tif')
        test_grid = GDALGrid.load(test_file)
        test_data = test_grid.getData()

        # Read in target file
        target_file = os.path.join(datadir, 'loma_prieta', 'targets',
                                   '19891018000415_zhu_2015_model.tif')

        if changetarget:
            # To change target data:
            test_grid.save(test_file)
            cmd = 'gdal_translate -a_srs EPSG:4326 -of GTiff %s %s' % (
                test_file, target_file)
            rc, so, se = get_command_output(cmd)

        target_grid = GDALGrid.load(target_file)
        target_data = target_grid.getData()

    except Exception as e:
        print(e)

    # Put defaults back
    if os.path.exists(default_file + '_bak'):
        shutil.copy(default_file + '_bak', default_file)

    # Remove backup and tempfile
    if os.path.exists(default_file + '_bak'):
        os.remove(default_file + '_bak')
    shutil.rmtree(p)

    # Test that everything ran
    np.testing.assert_equal(True, rc, 'gfail reset failed')
    np.testing.assert_equal(True, rc1, 'gfail path modification failed')
    np.testing.assert_equal(True, rc2, se2.decode())
    np.testing.assert_equal(True, rc3, 'gfail list-default-paths failed')
    np.testing.assert_equal(True, rc4, se4.decode())

    # Then do test of values
    np.testing.assert_allclose(target_data, test_data, rtol=1e-3)
Exemplo n.º 36
0
def run_one_old_shakemap(eventid, topo=True, genex=True):
    """
    Convenience method for running old (v 3.5) shakemap with new estimates. This
    allows for us to generate all the products with the old code since the new
    code cannot do this yet, but use the new code for computing the ground
    motions.

    Args:
        eventid (srt): Specifies the id of the event to process.
        topo (bool): Include topography shading?
        genex (bool): Should genex be run?

    Returns:
        dictionary: Each entry is the log file for the different ShakeMap3.5
            calls.

    """
    config = ConfigObj(os.path.join(os.path.expanduser('~'), 'scenarios.conf'))
    shakehome = config['system']['shakehome']
    log = {}
    shakebin = os.path.join(shakehome, 'bin')
    datadir = os.path.join(shakehome, 'data')
    # Read event.xml
    eventdir = os.path.join(datadir, eventid)
    inputdir = os.path.join(eventdir, 'input')
    xml_file = os.path.join(inputdir, 'event.xml')
    # Read in event.xml
    event = read_event_file(xml_file)

    # Read in gmpe set name
    gmpefile = open(os.path.join(inputdir, "gmpe_set_name.txt"), "r")
    set_name = gmpefile.read()
    gmpefile.close()

    # Add scenario-specific fields:
    eventtree = ET.parse(xml_file)
    eventroot = eventtree.getroot()
    for eq in eventroot.iter('earthquake'):
        description = eq.attrib['description']
        directivity = eq.attrib['directivity']
        if 'reference' in eq.attrib.keys():
            reference = eq.attrib['reference']
        else:
            reference = ''

    event['description'] = description
    event['directivity'] = directivity
    event['reference'] = reference

    grd = os.path.join(inputdir, 'pgv_estimates.grd')
    gdict = GMTGrid.getFileGeoDict(grd)[0]

    # Tolerance is a bit hacky but necessary to prevent GMT
    # from barfing becasue it thinks that the estimates files
    # do not cover the desired area sampled by grind's call
    # with grdsample.
    tol = gdict.dx
    W = gdict.xmin + tol
    E = gdict.xmax - tol
    S = gdict.ymin + tol
    N = gdict.ymax - tol

    # Put into grind.conf (W S E N)
    confdir = os.path.join(eventdir, 'config')
    if os.path.isdir(confdir) == False:
        os.mkdir(confdir)

    # need to copy default grind.conf
    default_grind_conf = os.path.join(shakehome, 'config', 'grind.conf')
    grind_conf = os.path.join(confdir, 'grind.conf')
    shutil.copyfile(default_grind_conf, grind_conf)

    # Set strictbound and resolution to match estiamtes.grd files
    with open(grind_conf, 'a') as f:
        f.write('x_grid_interval : %.16f\n' % gdict.dx)
        f.write('y_grid_interval : %.16f\n' % gdict.dy)
        f.write('strictbound : %.9f %.9f %.9f %.9f\n' % (W, S, E, N))

    # Grind
    callgrind = os.path.join(shakebin, 'grind') + \
        ' -event ' + eventid + ' -psa'
    rc, so, se = get_command_output(callgrind)
    log['grind'] = {'rc': rc, 'so': so, 'se': se}

    # Add GMPE set name to info.json
    cmd = os.path.join(shakebin, 'edit_info') + ' -event ' + eventid + \
        ' -tag gmpe_reference' + ' -value ' + set_name
    rc, so, se = get_command_output(cmd)
    log['edit_info'] = {'rc': rc, 'so': so, 'se': se}

    # Tag
    calltag = os.path.join(shakebin, 'tag') + \
        ' -event ' + eventid + ' -name \"' + event['locstring'] + ' - ' + \
        event['description'] + '\"'
    rc, so, se = get_command_output(calltag)
    log['tag'] = {'rc': rc, 'so': so, 'se': se}

    # Copy rock_grid.xml from input to output directory
    rg_scr = os.path.join(inputdir, 'rock_grid.xml')
    rg_dst = os.path.join(eventdir, 'output', 'rock_grid.xml')
    cmd = shutil.copy(rg_scr, rg_dst)

    # Mapping
    if topo is True:
        topostr = '-itopo'
    else:
        topostr = ''
    callmapping = os.path.join(shakebin, 'mapping') + ' -event ' + \
        eventid + ' -timestamp -nohinges ' + topostr
    rc, so, se = get_command_output(callmapping)
    log['mapping'] = {'rc': rc, 'so': so, 'se': se}

    # Genex
    if genex is True:
        callgenex = os.path.join(shakebin, 'genex') + ' -event ' + \
            eventid + ' -metadata -zip -verbose -shape shape -shape hazus'
        rc, so, se = get_command_output(callgenex)
        log['genex'] = {'rc': rc, 'so': so, 'se': se}

    return log
Exemplo n.º 37
0
def trim_ocean(grid2D, mask, all_touched=True, crop=False):
    """Use the mask (a shapefile) to trim offshore areas

    Args:
        grid2D: MapIO grid2D object of results that need trimming
        mask: list of shapely polygon features already loaded in or string of
            file extension of shapefile to use for clipping
        all_touched (bool): if True, won't mask cells that touch any part of
            polygon edge
        crop (bool): crop boundaries of raster to new masked area

    Returns:
        grid2D file with ocean masked
    """
    gdict = grid2D.getGeoDict()

    tempdir = tempfile.mkdtemp()

    # Get shapes ready
    if type(mask) == str:
        with fiona.open(mask, 'r') as shapefile:
            bbox = (gdict.xmin, gdict.ymin, gdict.xmax, gdict.ymax)
            hits = list(shapefile.items(bbox=bbox))
            features = [feature[1]["geometry"] for feature in hits]
            # hits = list(shapefile)
            # features = [feature["geometry"] for feature in hits]
    elif type(mask) == list:
        features = mask
    else:
        raise Exception('mask is neither a link to a shapefile or a list of \
                        shapely shapes, cannot proceed')

    if len(features) == 0:
        print('No coastlines in ShakeMap area')
        return grid2D

    tempfilen = os.path.join(tempdir, 'temp.bil')
    tempfile1 = os.path.join(tempdir, 'temp.tif')
    tempfile2 = os.path.join(tempdir, 'temp2.tif')
    GDALGrid.copyFromGrid(grid2D).save(tempfilen)
    cmd = 'gdal_translate -a_srs EPSG:4326 -of GTiff %s %s' % \
        (tempfilen, tempfile1)
    rc, so, se = get_command_output(cmd)

    if rc:
        with rasterio.open(tempfile1, 'r') as src_raster:
            out_image, out_transform = rasterio.mask.mask(
                src_raster, features, all_touched=all_touched, crop=crop)
            out_meta = src_raster.meta.copy()
            out_meta.update({
                "driver": "GTiff",
                "height": out_image.shape[1],
                "width": out_image.shape[2],
                "transform": out_transform
            })
            with rasterio.open(tempfile2, "w", **out_meta) as dest:
                dest.write(out_image)

        newgrid = GDALGrid.load(tempfile2)

    else:
        print(se)
        raise Exception('ocean trimming failed')

    shutil.rmtree(tempdir)
    return newgrid
Exemplo n.º 38
0
def build_report_latex(
    st_list, directory, origin, prefix="", config=None, gmprocess_version="unknown"
):
    """
    Build latex summary report.

    Args:
        st_list (list):
            List of streams.
        directory (str):
            Directory for saving report.
        origin (ScalarEvent):
            ScalarEvent object.
        prefix (str):
            String to prepend to report file name.
        config (dict):
            Config dictionary.
        gmprocess_version:
            gmprocess version.
    Returns:
        tuple:
            - Name of pdf or latex report file created.
            - boolean indicating whether PDF creation was successful.

    """
    # Need to get config to know where the plots are located
    if config is None:
        config = get_config()

    # Check if directory exists, and if not, create it.
    if not os.path.exists(directory):
        os.makedirs(directory)

    # Initialize report string with PREAMBLE
    report = PREAMBLE
    timestr = origin.time.strftime("%m/%d/%Y %H:%M:%S")

    # Does the map exist?
    map_file = os.path.join(directory, "stations_map.png")
    if os.path.isfile(map_file):
        TB = TITLEBLOCK.replace("[MAPPATH]", "stations_map.png")

        TB = TB.replace("[VERSION]", gmprocess_version)
        moveout_file = os.path.join(directory, "moveout_plot.png")
        if os.path.isfile(moveout_file):
            TB = TB.replace("[MOVEOUT_PAGE]", moveout_page_tex)
            TB = TB.replace("[MOVEOUTPATH]", "moveout_plot.png")
        else:
            TB = TB.replace("[MOVEOUT_PAGE]", "")
        report += TB

    # Loop over each StationStream and append it's page to the report
    # do not include more than three.

    # sort list of streams:
    st_list.sort(key=lambda x: x.id)

    for st in st_list:
        # Do NOT use os.path.join() here becuase even on windows, latex needs the path
        # to use linux-style forward slashs.
        streamid = st.get_id()
        plot_path = f"plots/{origin.id}_{streamid}.png"
        SB = STREAMBLOCK.replace("[PLOTPATH]", plot_path)
        SB = SB.replace(
            "[EVENT]",
            f"M {origin.magnitude} - {str_for_latex(origin.id)} - {timestr}",
        )
        SB = SB.replace("[STATION]", st.get_id())
        report += SB

        prov_latex = get_prov_latex(st)

        report += prov_latex
        report += "\n"
        if st[0].hasParameter("signal_split"):
            pick_method = st[0].getParameter("signal_split")["picker_type"]
            report += f"Pick Method: {str_for_latex(pick_method)}\n\n"
        if "nnet_qa" in st.getStreamParamKeys():
            score_lq = st.getStreamParam("nnet_qa")["score_LQ"]
            score_hq = st.getStreamParam("nnet_qa")["score_HQ"]
            report += f"Neural Network LQ score: {str_for_latex(str(score_lq))}\n\n"
            report += f"Neural Network HQ score: {str_for_latex(str(score_hq))}\n\n"
        if not st.passed:
            for tr in st:
                if tr.hasParameter("failure"):
                    report += "Failure reason: %s\n\n" % str_for_latex(
                        tr.getParameter("failure")["reason"]
                    )
                    break
        report += "\\newpage\n\n"

    # Finish the latex file
    report += POSTAMBLE

    res = False
    # Do not save report if running tests
    if "CALLED_FROM_PYTEST" not in os.environ:

        # Set working directory to be the event subdirectory
        current_directory = os.getcwd()
        os.chdir(directory)

        # File name relative to current location
        file_name = f"{prefix}_report_{origin.id}.tex"

        # File name for printing out later relative base directory
        latex_file = os.path.join(directory, file_name)
        with open(file_name, "w", encoding="utf-8") as f:
            f.write(report)

        # Can we find pdflatex?
        try:
            pdflatex_bin = which("pdflatex")
            if os.name == "nt":
                # seems that windows needs two dashes for the program options
                flag = "--"
            else:
                flag = "-"
            pdflatex_options = f"{flag}interaction=nonstopmode {flag}halt-on-error"
            cmd = f"{pdflatex_bin} {pdflatex_options} {file_name}"
            res, stdout, stderr = get_command_output(cmd)
            report_file = latex_file
            if res:
                base, ext = os.path.splitext(file_name)
                pdf_file = base + ".pdf"
                if os.path.isfile(pdf_file):
                    report_file = pdf_file
                    auxfiles = glob.glob(base + "*")
                    auxfiles.remove(pdf_file)
                    for auxfile in auxfiles:
                        os.remove(auxfile)
                else:
                    res = False
            else:
                print("pdflatex output:")
                print(stdout.decode())
                print(stderr.decode())
        except BaseException:
            report_file = ""
            pass
        finally:
            os.chdir(current_directory)
    else:
        report_file = "not run"

    # make report file an absolute path
    report_file = os.path.join(directory, report_file)

    return (report_file, res)
Exemplo n.º 39
0
def create_onepager(pdata, version_dir, debug=False):
    """
    :param pdata:
      PagerData object.
    :param version_dir: 
      Path of event version directory.
    :param debug:
      bool for whether or not to add textpos boxes to onepager.
    """

    #---------------------------------------------------------------------------
    # Sort out some paths
    #---------------------------------------------------------------------------

    # Locaiton of this module
    mod_dir, dummy = os.path.split(__file__)

    # losspager package direcotry
    losspager_dir = os.path.join(mod_dir, '..')

    # Repository root directory
    root_dir = os.path.join(losspager_dir, '..')

    # Data directory
    data_dir = os.path.join(losspager_dir, 'data')

    # Onepager latex template file
    template_file = os.path.join(data_dir, 'onepager2.tex')

    #---------------------------------------------------------------------------
    # Read in pager data and latex template
    #---------------------------------------------------------------------------

    json_dir = os.path.join(version_dir, 'json')
    pdict = pdata._pagerdict
    edict = pdata.getEventInfo()

    with open(template_file, 'r') as f:
        template = f.read()

    #---------------------------------------------------------------------------
    # Fill in template values
    #---------------------------------------------------------------------------

    # Sort out origin time
    olat = edict['lat']
    olon = edict['lon']
    otime_utc = edict['time']
    date_utc = datetime.strptime(otime_utc, "%Y-%m-%d %H:%M:%S")

    date_local = pdata.local_time
    DoW = date_local.strftime('%a')
    otime_local = date_local.strftime('%H:%M:%S')
    otime_local = DoW + ' ' + otime_local
    template = template.replace("[ORIGTIME]", otime_utc)
    template = template.replace("[LOCALTIME]", otime_local)

    # Some paths
    template = template.replace("[VERSIONFOLDER]", version_dir)
    template = template.replace("[HOMEDIR]", root_dir)

    # Magnitude location string under USGS logo
    magloc = 'M %.1f, %s' % (edict['mag'], texify(edict['location']))
    template = template.replace("[MAGLOC]", magloc)

    # Pager version
    ver = "Version " + str(pdict['pager']['version_number'])
    template = template.replace("[VERSION]", ver)
    template = template.replace("[VERSIONX]", "2.5")

    # Epicenter location
    lat = edict['lat']
    lon = edict['lon']
    dep = edict['depth']
    if lat > 0:
        hlat = "N"
    else:
        hlat = "S"
    if lon > 0:
        hlon = "E"
    else:
        hlon = "W"
    template = template.replace("[LAT]", '%.4f' % abs(lat))
    template = template.replace("[LON]", '%.4f' % abs(lon))
    template = template.replace("[HEMILAT]", hlat)
    template = template.replace("[HEMILON]", hlon)
    template = template.replace("[DEPTH]", '%.1f' % dep)

    # Tsunami warning? --- need to fix to be a function of tsunamic flag
    if edict['tsunami']:
        template = template.replace(
            "[TSUNAMI]", "FOR TSUNAMI INFORMATION, SEE: tsunami.gov")
    else:
        template = template.replace("[TSUNAMI]", "")

    if pdata.isScenario():
        elapse = ''
    else:
        elapse = "Created: " + pdict['pager'][
            'elapsed_time'] + " after earthquake"
    template = template.replace("[ELAPSED]", elapse)
    template = template.replace("[IMPACT1]",
                                texify(pdict['comments']['impact1']))
    template = template.replace("[IMPACT2]",
                                texify(pdict['comments']['impact2']))
    template = template.replace("[STRUCTCOMMENT]",
                                texify(pdict['comments']['struct_comment']))

    # Summary alert color
    template = template.replace("[SUMMARYCOLOR]",
                                pdata.summary_alert.capitalize())
    template = template.replace("[ALERTFILL]", pdata.summary_alert)

    # fill in exposure values
    max_border_mmi = pdata._pagerdict['population_exposure'][
        'maximum_border_mmi']
    explist = pdata.getTotalExposure()
    pophold = 0
    for mmi in range(1, 11):
        iexp = mmi - 1
        if mmi == 2:
            pophold += explist[iexp]
            continue
        elif mmi == 3:
            pop = explist[iexp] + pophold
            macro = '[MMI2-3]'
        else:
            pop = explist[iexp]
            macro = '[MMI%i]' % mmi
        if pop < 1000:
            pop = round_to_nearest(pop, round_value=1000)
        if max_border_mmi > mmi and mmi <= 4:
            if pop == 0:
                popstr = '--*'
            else:
                if pop < 1000:
                    pop = round_to_nearest(pop, round_value=1000)
                popstr = pop_round_short(pop) + '*'
        else:
            popstr = pop_round_short(pop)
        template = template.replace(macro, popstr)

    # MMI color pal
    pal = ColorPalette.fromPreset('mmi')

    # Historical table
    htab = pdata.getHistoricalTable()
    if htab[0] is None:
        # use pdata.getHistoricalComment()
        htex = pdata.getHistoricalComment()
    else:
        # build latex table
        htex = """
\\begin{tabularx}{7.25cm}{lrc*{1}{>{\\centering\\arraybackslash}X}*{1}{>{\\raggedleft\\arraybackslash}X}}
\hline
\\textbf{Date} &\\textbf{Dist.}&\\textbf{Mag.}&\\textbf{Max}    &\\textbf{Shaking}\\\\
\\textbf{(UTC)}&\\textbf{(km)} &              &\\textbf{MMI(\#)}&\\textbf{Deaths} \\\\
\hline
[TABLEDATA]
\hline
\multicolumn{5}{p{7.2cm}}{\\small [COMMENT]}
\end{tabularx}"""
        comment = pdata._pagerdict['comments']['secondary_comment']
        htex = htex.replace("[COMMENT]", texify(comment))
        tabledata = ""
        nrows = len(htab)
        for i in range(nrows):
            date = htab[i]['Time'].split()[0]
            dist = str(int(htab[i]['Distance']))
            mag = str(htab[i]['Magnitude'])
            mmi = dec_to_roman(np.round(htab[i]['MaxMMI'], 0))
            col = pal.getDataColor(htab[i]['MaxMMI'])
            texcol = "%s,%s,%s" % (col[0], col[1], col[2])
            nmmi = pop_round_short(htab[i]['NumMaxMMI'])
            mmicell = '%s(%s)' % (mmi, nmmi)
            shakedeath = htab[i]['ShakingDeaths']
            if np.isnan(shakedeath):
                death = "--"
            else:
                death = pop_round_short(shakedeath)
            row = '%s & %s & %s & \cellcolor[rgb]{%s} %s & %s \\\\ '\
                  '\n' %(date, dist, mag, texcol, mmicell, death)
            tabledata = tabledata + row
        htex = htex.replace("[TABLEDATA]", tabledata)
    template = template.replace("[HISTORICAL_BLOCK]", htex)

    # City table
    ctex = """
\\begin{tabularx}{7.25cm}{lXr}
\hline
\\textbf{MMI} & \\textbf{City} & \\textbf{Population}  \\\\
\hline
[TABLEDATA]
\hline
\end{tabularx}"""
    ctab = pdata.getCityTable()
    nrows = len(ctab.index)
    tabledata = ""
    for i in range(nrows):
        mmi = dec_to_roman(np.round(ctab['mmi'].iloc[i], 0))
        city = ctab['name'].iloc[i]
        if ctab['pop'].iloc[i] == 0:
            pop = '$<$1k'
        else:
            if ctab['pop'].iloc[i] < 1000:
                popnum = round_to_nearest(ctab['pop'].iloc[i],
                                          round_value=1000)
            else:
                popnum = ctab['pop'].iloc[i]
            pop = pop_round_short(popnum)
        col = pal.getDataColor(ctab['mmi'].iloc[i])
        texcol = "%s,%s,%s" % (col[0], col[1], col[2])
        if ctab['on_map'].iloc[i] == 1:
            if ctab['pop'].iloc[i] == 0:
                pop = '\\boldmath$<$\\textbf{1k}'
                row = '\\rowcolor[rgb]{%s}\\textbf{%s} & \\textbf{%s} & '\
                      '%s\\\\ \n' %(texcol, mmi, city, pop)
            else:
                row = '\\rowcolor[rgb]{%s}\\textbf{%s} & \\textbf{%s} & '\
                      '\\textbf{%s}\\\\ \n' %(texcol, mmi, city, pop)
        else:
            row = '\\rowcolor[rgb]{%s}%s & %s & '\
                  '%s\\\\ \n' %(texcol, mmi, city, pop)
        tabledata = tabledata + row
    ctex = ctex.replace("[TABLEDATA]", tabledata)
    template = template.replace("[CITYTABLE]", ctex)

    eventid = edict['eventid']

    # query ComCat for information about this event
    # fill in the url, if we can find it
    try:
        ccinfo = ComCatInfo(eventid)
        eventid, allids = ccinfo.getAssociatedIds()
        event_url = ccinfo.getURL() + '#pager'
    except:
        event_url = DEFAULT_PAGER_URL

    eventid = "Event ID: " + eventid
    template = template.replace("[EVENTID]", texify(eventid))
    template = template.replace("[EVENTURL]", texify(event_url))

    # Write latex file
    tex_output = os.path.join(version_dir, 'onepager.tex')
    with open(tex_output, 'w') as f:
        f.write(template)

    pdf_output = os.path.join(version_dir, 'onepager.pdf')
    stderr = ''
    try:
        cwd = os.getcwd()
        os.chdir(version_dir)
        cmd = '%s -interaction nonstopmode --output-directory %s %s' % (
            LATEX_TO_PDF_BIN, version_dir, tex_output)
        print('Running %s...' % cmd)
        res, stdout, stderr = get_command_output(cmd)
        os.chdir(cwd)
        if not res:
            return (None, stderr)
        else:
            if os.path.isfile(pdf_output):
                return (pdf_output, stderr)
            else:
                pass
    except Exception as e:
        pass
    finally:
        os.chdir(cwd)
    return (None, stderr)
Exemplo n.º 40
0
def main(args):

    #-------------------------------------------------------------
    # where should .rst files, Makefile, _build folder be written?
    #-------------------------------------------------------------
    API_DIR = os.path.join(os.path.expanduser('~'), '__api-doc')
    shutil.rmtree(API_DIR, ignore_errors=True)

    #-------------------------------------------------------------
    # where should the temporary clone of the shakemap gh-pages repo live?
    #-------------------------------------------------------------
    TOP_DIR = os.path.join(os.path.expanduser('~'), '__shake-doc')
    CLONE_DIR = os.path.join(TOP_DIR, 'html')
    shutil.rmtree(TOP_DIR, ignore_errors=True)

    #-------------------------------------------------------------
    # Some additional useful directories
    #-------------------------------------------------------------
    REPO_DIR = os.path.dirname(os.path.abspath(__file__))
    DOC_DIR = os.path.join(REPO_DIR, 'doc')
    REST_DIR = os.path.join(REPO_DIR, 'rest')
    PACKAGE_DIR = os.path.join(REPO_DIR, 'shakemap')

    #-------------------------------------------------------------
    # get the human-friendly version of the ShakeMap version
    #-------------------------------------------------------------
    res, verstr, stderr = get_command_output('git describe --tags')
    verstr = verstr.decode().strip()
    if not len(verstr):
        verstr = DEFAULT_TAG
    else:
        if verstr[0] == 'v':
            verstr = verstr[1:]
        spl = re.findall(r"[\w']+", verstr)
        verstr = "%s.%s.%s" % (spl[0], spl[1], spl[2])

    #-------------------------------------------------------------
    # what is the package called and who are the authors
    #-------------------------------------------------------------
    PACKAGE = "shakemap"
    AUTHORS = 'Bruce Worden, Eric Thompson, Mike Hearne'

    # find the make command on this system
    res, stdout, stderr = get_command_output('which make')
    if not res:
        print('Could not find the "make" command on your system. Exiting.')
        sys.exit(1)
    make_cmd = stdout.decode().strip()

    #-------------------------------------------------------------
    # clone the repository
    #-------------------------------------------------------------
    if args.post:
        sys.stderr.write('Cloning shakemap gh-pages branch...\n')
        if os.path.isdir(CLONE_DIR):
            shutil.rmtree(CLONE_DIR)
        clonecmd = 'git clone -b gh-pages https://github.com/usgs/'\
                   'shakemap.git %s' % CLONE_DIR
        res, stdout, stderr = get_command_output(clonecmd)
        if not res:
            raise Exception('Could not clone gh-pages branch.')

        # Delete everything in the repository (except hidden git files)
        cmd = 'rm -fr %s' %CLONE_DIR

    #-------------------------------------------------------------
    # run the api doc command; this creates the .rst files
    #-------------------------------------------------------------
    sys.stderr.write('Building shakemap API documentation (REST)...\n')
    sphinx_cmd = 'sphinx-apidoc -o %s -f -e -l -d 12 -F -H %s -A "%s"'\
                 ' -V %s %s' % (API_DIR, PACKAGE, AUTHORS, verstr,
                                PACKAGE_DIR)

    res, stdout, stderr = get_command_output(sphinx_cmd)

    if not res:
        raise Exception('Could not build ShakeMap API documentation'
                        ' - error "%s".' % stderr)

    #-------------------------------------------------------------
    # change index.rst to api_index.rst
    #-------------------------------------------------------------
    move_cmd = 'mv %s/index.rst %s/api_index.rst' % (API_DIR, API_DIR)
    res, stdout, stderr = get_command_output(move_cmd)

    # Change name of API documentation in api_index.rst
    cmd = "sed -i -e 's/Welcome to shakemap.*/ShakeMap 4.0 API/g' "\
          "%s/api_index.rst" % API_DIR
    res, stdout, stderr = get_command_output(cmd)

    #--------------------------------------------
    # try to clean up some of the excess labeling
    #--------------------------------------------
    clean_cmd = "sed -i -e 's/ module//g' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -i -e 's/ package//g' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -i -e '/Subpackages/d' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)
    clean_cmd = "sed -i -e '/-.*-/d' `find %s/*.rst -type f "\
                "-maxdepth 0 -print`" % API_DIR
    res, stdout, stderr = get_command_output(clean_cmd)

    #-------------------------------------------------------------
    # run the make command to build the shakemap manual (pdf version)
    #-------------------------------------------------------------
    if not args.nopdf:
        sys.stderr.write('Building shakemap manual (PDF)...\n')
        os.chdir(DOC_DIR)
        # Need to run HTML to create __shakedoc/html/_static
        manualcmd = '%s latexpdf' % make_cmd
        res, stdout, stderr = get_command_output(manualcmd)
        if args.verbose:
            print(stdout.decode('utf-8'))
            print(stderr.decode('utf-8'))
        if not res:
            raise Exception('Could not build the PDF version of the ShakeMap '
                            'manual - error "%s".' % stderr)

        # Move PDF to API_DIR/_static
        spdf = '%s/latex/ShakeMapManual.pdf' %TOP_DIR
        dpdf = '%s/_static/ShakeMapManual.pdf' %API_DIR
        shutil.copy2(spdf, dpdf)

    #-------------------------------------------------------------
    # Edit the conf.py file to include the theme.
    #-------------------------------------------------------------
    fname = os.path.join(API_DIR, 'conf.py')
    f = open(fname, 'at')
    f.write("sys.path.insert(0, os.path.abspath('%s'))\n" % (REPO_DIR))

    #-------------------------------------
    # Built in theme:
    #-------------------------------------
#    f.write("html_theme = 'haiku'\n")
    #-------------------------------------

    #-------------------------------------
    # RTD theme
    #-------------------------------------
    f.write("import sphinx_rtd_theme\n")
    f.write("html_theme = 'sphinx_rtd_theme'\n")
    f.write("html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]\n")
    f.write("html_theme_options = {\n")
    f.write("    'collapse_navigation': False,\n")
    f.write("}\n")
    #-------------------------------------

    #-------------------------------------
    # Bootstrap theme
    #-------------------------------------
#    f.write("import sphinx_bootstrap_theme\n")
#    f.write("html_theme = 'bootstrap'\n")
#    f.write("html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()\n")
#    f.write("html_theme_options = {\n")
#    f.write("    'bootswatch_theme': \"Sandstone\",\n")
#    f.write("    'navbar_site_name': \"API Documentation\",\n")
#    f.write("    'navbar_sidebarrel': False,\n")
#    f.write("    'navbar_pagenav': False,\n")
#    f.write("    'navbar_links': [\n")
#    f.write("        (\"Manual\", \"../shake_index.html\", 1),\n")
#    f.write("    ],\n")
#    f.write("}\n")
    #-------------------------------------

    # Napolean extension? Supports Goggle and Numpy style docstrings, but it
    # also has some side effects such as restrictions on what sections are
    # allowed and it seems to suppress the [source] link to code; maybe this
    # is a configurable option though.
#    f.write("extensions = ['sphinx.ext.autodoc', 'sphinxcontrib.napoleon']\n")

    # This line is needed to inclue __init__ methods in documentation
    f.write("autoclass_content = 'both'\n")
    f.write("autodoc_member_order = 'bysource'\n")
    f.write("html_show_copyright = False\n")
#    f.write("extensions = extensions + [ 'sphinx.ext.autodoc', "\
#            "'sphinx.ext.napoleon', 'sphinx.ext.todo' ] \n")
    f.write("napoleon_include_special_with_doc = False\n")
    f.write("todo_include_todos = True\n")
    f.close()

    #-------------------------------------------------------------
    # Copy the manual REST files to the API directory
    #-------------------------------------------------------------

    # Get rid of the automatic conf.py so our preferred one is used
    os.remove('%s/conf.py' % (API_DIR))

    # put aside Makefile so it doesn't get overwritten
    oldmake = '%s/Makefile' %API_DIR
    tmpmake = '%s/Makefile_save' %API_DIR
    os.rename(oldmake, tmpmake)

    # move files into API directory; this should raise exceptions if any
    # files will get overwritten. 
    copy_tree(DOC_DIR, API_DIR, update=1)

    # put Makefile back
    os.rename(tmpmake, oldmake)

    # Move index.rst to manual_index.rst
    ind1 = '%s/index.rst' %API_DIR
    ind2 = '%s/manual_index.rst' %API_DIR
    os.rename(ind1, ind2)

    # Copy index.rst from rest/ directory into build directory
    restind = '%s/index.rst' %REST_DIR
    apiind = '%s/index.rst' %API_DIR
    shutil.copy2(restind, apiind)

    #-------------------------------------------------------------
    # Go to the api directory and build the html
    #-------------------------------------------------------------
    sys.stderr.write('Building shakemap manual (HTML)...\n')
    os.chdir(API_DIR)
    res, stdout, stderr = get_command_output('%s html' % make_cmd)
    if not res:
        raise Exception('Could not build HTML for API documentation. - '
                        'error "%s"' % stderr)
    if args.verbose:
        print(stdout.decode('utf-8'))
        print(stderr.decode('utf-8'))

    #-------------------------------------------------------------
    # Copy the generated content to the gh-pages branch we created
    # earlier
    #-------------------------------------------------------------
    htmldir = os.path.join(API_DIR, '_build', 'html')
    if not os.path.isdir(CLONE_DIR):
        os.makedirs(CLONE_DIR)
    copy_tree(htmldir, CLONE_DIR)

    if args.post:
        #-------------------------------------------------------------
        # Post to gh-pages
        #-------------------------------------------------------------

        # cd to directory above where html content was pushed
        os.chdir(CLONE_DIR)
        res, stdout, stderr = get_command_output('touch .nojekyll')
        res1, stdout, stderr1 = get_command_output('git add --all')
        res2, stdout, stderr2 = get_command_output(
            'git commit -am"Pushing version %s to GitHub pages"' % verstr)
        res3, stdout, stderr3 = get_command_output(
            'git push -u origin +gh-pages')
        if res1 + res2 + res3 < 3:
            stderr = stderr1 + stderr2 + stderr3
            print('Something bad happened when attempting to add, commit, '
                  'or push gh-pages content to GitHub - error "%s". Exiting.'
                  % stderr)
            sys.exit(1)
        print('You can inspect the ShakeMap manual and API docs by looking '
              'here: http://usgs.github.io/shakemap/index.html')
    else:
        if not args.clean:
            indexpage = os.path.join(CLONE_DIR, 'index.html')
            print('ShakeMap documentation index: %s' % indexpage)