Ejemplo n.º 1
0
def plot_isos(isos='default', emissions='all'):
    """
    Create a plot with both the CMIP6 emissions and frozen emissions for each 
    emission species for a selection of ISOs
    
    Parameters
    -----------
    isos : str or list of str, optional
        ISOs to plot. Default is 'default', which results in 
        USA, Canada, China, & Russia being plotted.
    emissions : str or list of str, optional
        Emission species to plot. Default is 'all'
    """
    logger = logging.getLogger('main')
    logger.debug('In summary.py::plot_isos')
    # Directory holding the final frozen emissions files to read & plot
    in_dir  = os.path.join(utils.get_root_dir(), 'output', 'final-emissions')
    # Directory that the frozen emissions plots will be written to
    out_dir = os.path.join(utils.get_root_dir(), 'output', 'diagnostic')
    
    if emissions == 'all':
        emissions = ['BC', 'CH4', 'CO', 'CO2', 'NH3', 'NMVOC', 'NOx', 'OC', 'SO2']
    elif not isinstance(emissions, list) and emissions != 'all':
        emissions = [emissions]
        
    if isos == 'default':
        isos = ['usa', 'can', 'chn', 'rus']
    elif not isinstance(isos, list) and emissions != 'default':
        isos = [isos]
    
    logger.debug('emissions = '.format(emissions))
    logger.debug('isos = '.format(isos))
    
    # Create 3x3 facet plot
    fig, ((ax1, ax2, ax3), (ax4, ax5, ax6), (ax7, ax8, ax9)) = plt.subplots(3, 3)
    
    idx, em in enumerate(emissions):
        # TODO: Make function for writing to logger & printing to console
        msg = 'Processing diagnostics for {}'.format(em)
        print(msg)
        logger.debug(msg)
        
        frzn_fname = 'CEDS_{}_emissions_by_country_sector_{}.csv'.format(em, CEDS_VERSION)
        frzn_fname = os.path.join(utils.get_root_dir(), 'output', 'final-emissions', frzn_fname)
        
        cmip_fname = '{}_CEDS_emissions_by_sector_country_{}.csv'.format(em, CMIP_VERSION)
        cmip_fname = os.path.join(utils.get_root_dir(), 'input', 'cmip', 'final-emissions', cmip_fname)
        
        msg = 'Reading {}...'.format(frzn_fname)
        print(msg)
        logger.debug(msg)
        frzn_df = pd.read_csv(frzn_fname, sep=',', header=0)
        
        msg = 'Reading {}...'.format(cmip_fname)
        print(msg)
        logger.debug(msg)
        cmip_df = pd.read_csv(cmip_fname, sep=',', header=0)
    def execute_with_params(self, r, q):
        self.init_data_with_iris()

        _now = time.strftime("%Y.%m.%d__%H.%M.%S")
        filename = "%s/results/hyper_params_%s.txt" % (utils.get_root_dir(),
                                                       _now)
        filenameMem = "%s/results/hyper_params_MEMORIA_%s.txt" % (
            utils.get_root_dir(), _now)
        result_file = open(filename, "w")
        result_file_mem = open(filenameMem, "w")
        result_file_mem.close()

        result_file.write(
            "\nself.rips = gudhi.RipsComplex(points=self.training,max_edge_length=r)\n\n"
        )

        result_file.write(
            "self.simplex_tree = self.rips.create_simplex_tree(max_dimension=q)"
        )

        for k in [5, 10, 15]:
            for n in range(k):
                self.split_dataset(k, n)
                result_file.write("\nCROSS VALIDATION VALUES k=%s, n=%s," %
                                  (k, n))
                result_file.write("\nk=%s, q=%s, r=%s, q=%s," % (k, n, r, q))
                try:
                    result_file.write("\nk=%s, q=%s, r=%s, q=%s," %
                                      (k, n, r, q))
                    t1 = time.time()
                    # self.build_vr_complex(r, q)
                    resp = utils.exec_with_timeout(self.build_vr_complex,
                                                   [r, q], 120)
                    t2 = time.time()
                    if k == 5 and n == 0:
                        resp2 = utils.exec_with_timeout(
                            self.get_object_size, [filenameMem, k, n, r, q],
                            120)
                        if not resp2:
                            self.save_size(filenameMem, None, k, n, r, q)
                    self.destroy()
                    t = (t2 - t1) if resp else None
                except BaseException as e:
                    t = None
                    m = None
                    wd = None
                    print("Error jejeje: {0}".format(e))

                result_file.write("timing=%s" % (t))
                print("\nk=%s, n=%s, r=%s, q=%s, timing=%s seg" %
                      (k, n, r, q, t))
                result_file.flush()
                break
        result_file.close()
    def execute(self):
        self.init_data_with_iris()
        _D = self.get_maximal_distance()
        _Q0 = 1  #int(len(self.dataset)/2)
        _Q = len(self.dataset)

        _now = time.strftime("%Y.%m.%d__%H.%M.%S")
        filename = "%s/results/hyper_params_%s.txt" % (utils.get_root_dir(),
                                                       _now)
        filenameMem = "%s/results/hyper_params_MEMORIA_%s.txt" % (
            utils.get_root_dir(), _now)
        result_file = open(filename, "w")
        result_file_mem = open(filenameMem, "w")
        result_file_mem.close()

        result_file.write(
            "\nself.rips = gudhi.RipsComplex(points=self.training,max_edge_length=r)\n\n"
        )

        result_file.write(
            "self.simplex_tree = self.rips.create_simplex_tree(max_dimension=q)"
        )
        timeout = 24000
        for k in [5]:
            for n in [0]:
                self.split_dataset(k, n)
                result_file.write("\nCROSS VALIDATION VALUES k=%s, n=%s," %
                                  (k, n))
                for r in range(1, _D):
                    for q in range(_Q0, _Q):
                        try:
                            result_file.write("\nk=%s, n=%s, r=%s, q=%s," %
                                              (k, n, r, q))
                            t1 = time.time()
                            self.build_vr_complex(filenameMem, k, n, r, q)
                            # resp = utils.exec_with_timeout(self.build_vr_complex, [filenameMem, k, n, r, q], timeout*3)
                            t2 = time.time()

                            self.destroy()
                            # t = (t2 - t1) if resp else timeout
                            print("\nk=%s, n=%s, r=%s, q=%s, timing=%s seg" %
                                  (k, n, r, q, t2 - t1))
                        except BaseException as e:
                            t = None
                            m = None
                            wd = None
                            print("Error jejeje: {0}".format(e))

                        # result_file.write("timing=%s" % (t))

                        result_file.flush()

        result_file.close()
Ejemplo n.º 4
0
 def test_get_root_dir(self):
     """
     Check that utils.get_root_dir() returns the correct directory
     """
     root_actual = os.path.join('C:\\', 'Users', 'nich980', 'code',
                                'frozen-emissions')
     root_test = utils.get_root_dir()
     self.assertEqual(root_test, root_actual)
 def __init__(self, data_file_name=None):
     path = utils.get_root_dir()
     self.data_file_name = data_file_name if data_file_name else "%s/dataset/iris.csv" % path
     self.simplex_tree = None
     self.dataset = []
     self.training = []
     self.test = []
     self.filtrations = None
     self.simplex_tree = None
     self.rips = None
     self.memory = None
 def __init__(self, link: str) -> None:
     super().__init__()
     self.setFixedWidth(15)
     self.setFixedHeight(15)
     self.setStyleSheet("""
             QPushButton {border: none;}
         """)
     self.setIcon(
         QIcon(os.path.join(get_root_dir(), ICONS_FOLDER, "question.png")))
     self.setIconSize(QSize(15, 15))
     self.link = link
     self.clicked.connect(self.open)
Ejemplo n.º 7
0
# TODO
# set output directory

import os, sys
import click

from utils import load_data, set_dir, get_root_dir

from reproschema.models.item import ResponseOption

local_reproschema = os.path.join(get_root_dir(), "..", "reproschema-py",
                                 "reproschema", "models")
# sys.path.insert(0, local_reproschema)


@click.command()
@click.option(
    "--filename",
    default="mri_softwares",
    help="Name of the response options to create.",
)
@click.option(
    "--out_dir",
    default=os.path.join(get_root_dir(), "schemas"),
    help="Name of the response options to create.",
)
def create_response_options(filename, out_dir):

    df = load_data(filename, out_dir)

    responses = df.name.unique()
Ejemplo n.º 8
0
 def compute(self):
     output_dict = parse_fwgs_output(self.getInputFromPort("input_csv").name,
                                     utils.get_root_dir())
     
     self.setResult('outputs_dictionary', output_dict)
Ejemplo n.º 9
0
def get_basic_image_file_name():
    return os.path.join(get_root_dir(), cfg.Config().config["basic_image"])
Ejemplo n.º 10
0
def get_hue_image_file_name():
    hue_path = os.path.join(get_root_dir(), cfg.Config().config["hue_image"])
    if os.path.exists(hue_path):
        return hue_path
    return get_basic_image_file_name()
Ejemplo n.º 11
0
    def compute(self):
        output_dict = parse_fwgs_output(
            self.getInputFromPort("input_csv").name, utils.get_root_dir())

        self.setResult('outputs_dictionary', output_dict)
Ejemplo n.º 12
0
import pandas as pd

from constants import MovieGross
from utils import get_root_dir

ROOT_DIR = get_root_dir()


class DataOverview:
    def __init__(self):

        self.train_data = MovieGross.TRAIN_DATA
        self.body = pd.DataFrame()

    def load_data(self):

        # load the data onto the body df
        self.body = pd.read_csv("{}/data/{}".format(ROOT_DIR, self.train_data))
        return self.body

    def show_nulls(self):

        null_values = []
        total_records = self.body.shape[0]

        for column in self.body:

            null_counts = self.body[column].isna().sum()
            null_percents = null_counts / total_records
            data_to_append = (column, null_counts, null_percents)
            null_values.append(data_to_append)
Ejemplo n.º 13
0
import pandas as pd

from item import get_item_info, define_new_item
from utils import (
    snake_case,
    set_dir,
    print_info,
    print_item_info,
    get_root_dir,
    get_landing_page,
)

from reproschema.models.activity import Activity
from reproschema.models.protocol import Protocol

local_reproschema = os.path.join(get_root_dir(), "..", "reproschema-py",
                                 "reproschema", "models")
# sys.path.insert(0, local_reproschema)


def create_schema(this_schema, out_dir, debug=False):
    """
    This takes the content of the a csv file and turns it into a
    reproschema protocol.
    This loops through the items of the csv and creates a new reproschema
    activity with every new checklist "section" it encouters: this new activity
    will be added to the protocol.
    Every new item encountered is added to the current activity.
    """

    protocol, protocol_path = initialize_protocol(this_schema, out_dir)