Example #1
0
    def setup_weight(self, weight_mode="num_wins"):
        """
        Use Window information to setup weight.
        :returns:
        """
        logger.info("*" * 15)
        logger.info("Start weighting...")
        if self.config.weight_data:
            # first calculate azimuth and distance for each data pair
            self.prepare_for_weighting()
            # then calculate azimuth weighting
            for idx, window in enumerate(self.window):
                if weight_mode.lower() == "num_files":
                    # weighted by the number of files in each azimuth bin
                    self.setup_weight_for_location(window, self.naz_files,
                                                   self.naz_files_all)
                else:
                    # weighted by the number of windows in each azimuth bin
                    self.setup_weight_for_location(window, self.naz_wins,
                                                   self.naz_wins_all)

                if self.config.normalize_category:
                    self.setup_weight_for_category(window)

            # normalization of data weights
            self.normalize_weight()

        # prepare the weight array
        self.weight_array = np.zeros([self.data_container.nwins])
        _idx = 0
        for window in self.window:
            for win_idx in range(window.num_wins):
                self.weight_array[_idx] = window.weight[win_idx]
                _idx += 1
Example #2
0
def fetch_election(year):
    '''
    Fetch all papers related to an election campaign; year should be one of:
    2016, 2012, 2008, 2004, 1960
    '''
    year_html = get_html(base_url + '/' + year + '_election.php')
    if year == '2008':
        # fix weird issue in Fred Thompson's entry
        year_html = year_html.replace(
            'Status: withdrew on <span class="docdate">',
            'Status: <span class="docdate">withdrew on ')
    soup = BeautifulSoup(year_html)
    container = soup.find('td', class_='doctext').find_parent('table')
    for td in container.find_all('td', class_='doctext'):
        paragraphs = td.find_all('p')
        if len(paragraphs) > 0:
            info_paragraph, links_paragraph = paragraphs
            candidate = _get_candidate_info(info_paragraph)
            for category, category_url in _iter_candidate_categories(
                    links_paragraph):
                logger.info('Fetching papers from category "%s"', category)
                category_soup = get_soup(category_url)
                category_pids = _get_pids(category_soup)
                for pid in category_pids:
                    paper = fetch(pid)
                    if candidate['name'] != paper['author']:
                        logger.warn(
                            'candidate name "%s" does not match paper author "%s" (%s)',
                            candidate['name'], paper['author'], pid)
                    paper['category'] = category
                    yield paper
Example #3
0
def current_version(binary, version_modpath):
    """
    Summary:
        Returns current binary package version if locally
        installed, master branch __version__ if the binary
        being built is not installed locally
    Args:
        :root (str): path to the project root directory
        :binary (str): Name of main project exectuable
        :version_modpath (str): path to __version__ module
    Returns:
        current version number of the project, TYPE: str
    """
    pkgmgr = 'apt'
    pkgmgr_bkup = 'apt-cache'

    if which(binary):

        if which(pkgmgr):
            cmd = pkgmgr + ' show ' + binary + ' 2>/dev/null | grep Version | head -n1'

        elif which(pkgmgr_bkup):
            cmd = pkgmgr_bkup + ' policy ' + binary + ' 2>/dev/null | grep Installed'

        try:

            installed_version = subprocess.getoutput(cmd).split(':')[1].strip()
            return greater_version(installed_version, __version__)

        except Exception:
            logger.info(
                '%s: Build binary %s not installed, comparing current branch version to master branch version'
                % (inspect.stack()[0][3], binary))
    return greater_version(masterbranch_version(version_modpath), __version__)
Example #4
0
    def grid_search_energy(self):

        logger.info('Energy Search...')

        m00_s = self.config.m00_s
        m00_e = self.config.m00_e
        dm00 = self.config.dm00
        m00_array = np.arange(m00_s, m00_e+dm00, dm00)
        nm00 = m00_array.shape[0]
        misfit = np.zeros(nm00)

        for i in range(nm00):
            m00 = m00_array[i]
            dlnA_array, cc_amp_array, kai_array = \
                self.calculate_misfit_for_m00(m00)
            if self.config.energy_misfit_function == 'energy':
                misfit[i] = \
                    np.sum((0.25 * dlnA_array ** 2 + 0.0 * cc_amp_array ** 2)
                           * self.weight_array)
            else:
                misfit[i] = np.sum(kai_array * self.weight_array)

        # find minimum
        min_idx = misfit.argmin()
        m00_best = m00_array[min_idx]

        logger.info("best m00: %6.3f" % m00_best)
        self.m00_best = m00_best
        self.m00_misfit = misfit
        self.m00_array = m00_array
Example #5
0
def ospackages(pkg_list):
    """Summary
        Install OS Package Prerequisites
    Returns:
        Success | Failure, TYPE: bool
    """
    try:
        for pkg in pkg_list:

            if is_installed(pkg):
                logger.info(f'{pkg} binary is already installed - skip')
                continue

            elif which('yum'):
                cmd = 'sudo yum install ' + pkg + ' 2>/dev/null'
                print(subprocess.getoutput(cmd))

            elif which('dnf'):
                cmd = 'sudo dnf install ' + pkg + ' 2>/dev/null'
                print(subprocess.getoutput(cmd))

            else:
                logger.warning(
                    '%s: Dependent OS binaries not installed - package manager not identified'
                    % inspect.stack()[0][3])

    except OSError as e:
        logger.exception('{}: Problem installing os package {}'.format(
            inspect.stack()[0][3], pkg))
        return False
    return True
def merge_graph_2_paraphrases(node_dict: Dict[str, Node]) -> Dict[str, Node]:

    single_node_token_words = []

    is_graph_changed = True
    while is_graph_changed is True:
        is_graph_changed = False
        if len(node_dict) % 50 == 0:
            logger.info(f"- merging turn with {len(node_dict)} nodes")

        for curr_node_id, curr_node_obj in node_dict.items():
            curr_token_word = curr_node_obj.token_word
            if curr_token_word in single_node_token_words:
                continue
            node_id_list_with_curr_token_word = _get_node_id_list_with_token_word(
                curr_token_word, curr_node_id, node_dict)
            if len(node_id_list_with_curr_token_word) < 1:
                single_node_token_words.append(curr_token_word)
            else:
                is_graph_changed = _changing_match(
                    curr_node_obj, node_id_list_with_curr_token_word,
                    node_dict, "incoming")
                if is_graph_changed is True:
                    break

                is_graph_changed = _changing_match(
                    curr_node_obj, node_id_list_with_curr_token_word,
                    node_dict, "outgoing")
                if is_graph_changed is True:
                    break
            if is_graph_changed:
                break

    return node_dict
Example #7
0
def fetch_speeches():
    for author, title, date, href in _iter_speeches():
        speech_url = base_url + href
        speech_html = requests.get(speech_url).text
        # Lincoln's "Cooper Union Address" has some issues :(
        speech_html = speech_html.replace(
            '<div id="_mcePaste" style="position: absolute; left: -10000px; top: 120px; width: 1px; height: 1px; overflow-x: hidden; overflow-y: hidden;">',
            '<p>')
        soup = BeautifulSoup(speech_html)
        # Herbert Hoover's "Campaign speech in Indianapolis, Indiana" has even worse issues :(
        if author == 'Herbert Hoover' and title == 'Campaign speech in Indianapolis, Indiana.':
            logger.info("Fixing Hoover's Indianapolis speech")
            transcript_p = soup.find(id='description').next_sibling.extract()
            soup.find(id='transcript').append(transcript_p)
        transcript = soup.find(id='transcript')
        # two of the speeches have missing transcripts :(
        if transcript:
            paragraphs = [
                paragraph.strip() for paragraph in _iter_paragraphs(transcript)
                if not paragraph.isspace()
            ]
            # replace &nbsp; + space with just the space
            text = '\n'.join(paragraphs).replace(u'\xA0 ', ' ')
            timestamp = date.date().isoformat() if date else None
            yield {
                'author': author,
                'title': title,
                'timestamp': timestamp,
                'text': text,
                'source': speech_url,
            }
Example #8
0
    def calculate_variance(self):
        """
        Calculate variance reduction based on old and new source solution

        :return:
        """
        npar = self.config.npar
        dm = self.new_cmt_par[0:npar] - self.cmt_par[0:npar]

        var_all = 0.0
        var_all_new = 0.0

        self.stats_before = {}
        self.stats_after = {}
        for _idx, window in enumerate(self.window):
            obsd = window.datalist['obsd']
            synt = window.datalist['synt']
            dt = obsd.stats.delta
            self.compute_new_syn(window.datalist, dm)
            new_synt = window.datalist['new_synt']
            # calculate old variance
            [v1, d1, nshift1, cc1, dlnA1, cc_amp_value1] = \
                self.calculate_var_one_trace(obsd, synt, window.win_time)
            # calculate new variance
            [v2, d2, nshift2, cc2, dlnA2, cc_amp_value2] = \
                self.calculate_var_one_trace(obsd, new_synt, window.win_time)

            var_all += np.sum(0.5 * v1 * window.weight * obsd.stats.delta)
            var_all_new += np.sum(0.5 * v2 * window.weight * obsd.stats.delta)

            # prepare stats
            tag = window.tag['obsd']
            if tag not in self.stats_before.keys():
                self.stats_before[tag] = []
            if tag not in self.stats_after.keys():
                self.stats_after[tag] = []
            for _i in range(window.num_wins):
                self.stats_before[tag].append(
                    [nshift1[_i]*dt, cc1[_i], dlnA1[_i], cc_amp_value1[_i],
                     v1[_i]/d1[_i]])
                self.stats_after[tag].append(
                    [nshift2[_i]*dt, cc2[_i], dlnA2[_i], cc_amp_value2[_i],
                     v2[_i]/d2[_i]])

        for tag in self.stats_before.keys():
            self.stats_before[tag] = np.array(self.stats_before[tag])
            self.stats_after[tag] = np.array(self.stats_after[tag])

        logger.info(
            "Total Variance Reduced from %e to %e ===== %f %%"
            % (var_all, var_all_new, (var_all - var_all_new) / var_all * 100))
        self.var_all = var_all
        self.var_all_new = var_all_new
        self.var_reduction = (var_all - var_all_new) / var_all
Example #9
0
    def add_measurements_from_sac(self, flexwinfile, tag="untaged",
                                  initial_weight=1.0,
                                  load_mode="obsolute_time"):
        """
        Add measurments(window and data) from the given flexwinfile
        and the data format should be sac

        :param flexwinfile:
        :return:
        """
        load_mode = load_mode.lower()
        if load_mode not in ["obsolute_time", "relative_time"]:
            raise ValueError("load_winfile mode incorrect: 1)obsolute_time"
                             "2)relative_time")

        t1 = time.time()
        win_list = self.load_winfile(flexwinfile,
                                     initial_weight=initial_weight)
        for win_obj in win_list:
            self.load_data_from_sac(win_obj, tag=tag, mode=load_mode)

        self.window += win_list
        # count the total number of files and windows
        nfiles = len(win_list)
        self.nfiles += nfiles
        nwins = 0
        for window in win_list:
            nwins += window.win_time.shape[0]
        self.nwins += nwins
        t2 = time.time()
        logger.info("="*10 + " Measurements Loading " + "="*10)
        logger.info("Data loaded in sac format: %s" % flexwinfile)
        logger.info("Elapsed time: %5.2f s" % (t2-t1))
        logger.info("Number of files and window added: [%d, %d]"
                    % (nfiles, nwins))
Example #10
0
 def wrapper(*args, **kwargs):
     global base_url
     kwargs['url'] = f"{base_url}{kwargs['url']}"
     logger.info(
         '{type_query}: {url}\t\tparams: {params}\t\tjson={json}\t\theaders: {headers}'
         .format(
             type_query=func.__name__.upper(),
             url=f"{kwargs['url']}",
             params=kwargs['query_params']
             if 'query_params' in kwargs else None,
             json=kwargs['json_data'] if 'json_data' in kwargs else None,
             headers=kwargs['headers'] if 'headers' in kwargs else None))
     response = func(*args, **kwargs)
     logger.info('Response: {resp}'.format(resp=response.json()))
     return response
Example #11
0
    def setup_matrix(self):
        """
        Calculate A and b for all windows

        :return:
        """
        logger.info("*" * 15)
        logger.info("Set up inversion matrix")

        for window in self.window:
            # loop over pair of data
            dsyn = self.calculate_dsyn(window.datalist)
            for win_idx in range(window.num_wins):
                # loop over each window
                # here, A and b are exact measurements
                # and no weightings are applied
                [A1, b1] = self.compute_A_b(window, win_idx, dsyn)
                self.A1_all.append(A1)
                self.b1_all.append(b1)
Example #12
0
    def grid_search_origin_time(self):

        logger.info("Origin time search...")

        self.calculate_tshift()

        t00_s = self.config.t00_s
        t00_e = self.config.t00_e
        dt00 = self.config.dt00_over_dt * \
            self.window[0].datalist['obsd'].stats.delta
        logger.info("Grid search dt00: %6.3f" % dt00)

        t00_array = np.arange(t00_s, t00_e+dt00, dt00)
        nt00 = t00_array.shape[0]
        misfit = np.zeros(nt00)

        for i in range(nt00):
            t00 = t00_array[i]
            misfit[i] = self.calculate_tshift_misfit(t00)

        # find minimum
        min_idx = misfit.argmin()
        t00_best = t00_array[min_idx]

        logger.info("minimum t00: %6.3f" % t00_best)
        self.t00_best = t00_best
        self.t00_misfit = misfit
        self.t00_array = t00_array
def main():

    args = get_arguments(sys.argv)

    config_fn = args.config
    config = get_config(config_fn)

    input_fn = args.input_filename

    end_points = args.end_points
    ep = " with additional start/end points" if end_points else ""
    logger.info(f"Reading sentences from '{input_fn}'{ep}.")

    output_dir = args.output_dir if args.output_dir else os.path.dirname(
        os.path.realpath(input_fn))
    os.makedirs(output_dir, exist_ok=True)
    ep = "_we" if end_points else ""
    output_fn = os.path.join(
        output_dir,
        os.path.splitext(os.path.basename(input_fn))[0] + ep + "_fsa.graphml")

    nodes, edges = build_graphml_automaton(input_fn, config, end_points)
    logger.info(
        f"Automaton graph with {len(nodes)} nodes and {len(edges)} edges built."
    )
    write_graphml(nodes, edges, output_fn)
    logger.info(f"See output in '{output_fn}'.")
Example #14
0
    def provision(self, create_or_deploy):
        try:
            self.setup_keypair()
        except LibcloudError as e:
            logger.warn('{cls}: {msg}'.format(cls=e.__class__.__name__, msg=e.message))

        if 'ex_securitygroup' in self.node_specs and self.provider_dict['provider']['name'].startswith('EC2'):
            print self.node_specs['ex_securitygroup']

        if create_or_deploy == 'deploy':
            with open(self.provider_dict['ssh']['public_key_path'], mode='rt') as f:
                public_ssh_key = f.read()
            self.node_specs.update({'deploy': SSHKeyDeployment(public_ssh_key)})

        self.node_name = self.strategy.get_node_name()
        try:
            self.node = getattr(
                self, '{0}_node'.format(create_or_deploy)
            )(name=self.node_name, **self.node_specs)
        except NotImplementedError as e:
            if create_or_deploy != 'deploy':
                raise e
            error_message = 'deploy_node not implemented for this driver'
            if e.message != error_message:
                raise
            logger.info('{error_message}, so running `create_node` instead.'.format(
                error_message=error_message.replace('deploy_node', '`deploy_node`')
            ))
            self.node = self.create_node(name=self.node_name, **self.node_specs)
            # logger.info('SoftLayer billing is giving error, will remove condition once resolved.')
        except LibcloudError as e:
            logger.warn('{cls}: {msg}'.format(cls=e.__class__.__name__, msg=e.message))
        except Exception as e:
            if e.message.startswith('InvalidGroup.NotFound'):
                print 'InvalidGroup.NotFound'
                exit(1)
            else:
                raise e
Example #15
0
    def add_measurements_from_asdf(self, flexwinfile, asdf_file_dict,
                                   obsd_tag=None, synt_tag=None,
                                   external_stationfile=None,
                                   initial_weight=1.0,
                                   winfile_format="txt"):
        """
        Add measurments(window and data) from the given flexwinfile and
        the data format should be asdf. Usually, you can leave the
        obsd_tag=None and synt_tag=None unless if you have multiple tags in
        asdf file.

        :param flexwinfile:
        :param asdf_file_dict:
        :return:
        """
        t1 = time.time()
        # load window information
        win_list = self.load_winfile(flexwinfile,
                                     initial_weight=initial_weight,
                                     file_format=winfile_format)
        # load in the asdf data
        asdf_dataset = self.check_and_load_asdf_file(asdf_file_dict)
        if external_stationfile is not None:
            station_info = \
                self.load_station_from_text(external_stationfile)
        else:
            station_info = None
        # load data for each window
        for win_obj in win_list:
            self.load_data_from_asdf(
                win_obj, asdf_dataset, obsd_tag=obsd_tag,
                synt_tag=synt_tag, station_dict=station_info)

        self.window += win_list
        # count the total number of files and windows
        nfiles = len(win_list)
        self.nfiles += nfiles
        nwins = 0
        for window in win_list:
            nwins += window.win_time.shape[0]
        self.nwins += nwins
        t2 = time.time()
        logger.info("="*10 + " Measurements Loading " + "="*10)
        logger.info("Data loaded in asdf format: %s" % flexwinfile)
        logger.info("Elapsed time: %5.2f s" % (t2-t1))
        logger.info("Number of files and window added: [%d, %d]"
                    % (nfiles, nwins))
Example #16
0
    def attempt_provision(self, create_or_deploy='create', prefer_provider=None, prefer_image=None):
        if ping_port() is not True:
            raise EnvironmentError('etcd server not up')

        if prefer_provider:
            self.strategy.strategy['provider']['options'] = (next(
                ifilter(
                    lambda obj: obj.keys()[0] == prefer_provider,
                    self.strategy.strategy['provider']['options']
                )
            ),)
            '''
            # Prefer syntax
            self.strategy.strategy['provider']['options'].insert(
                0, self.strategy.strategy['provider']['options'].pop(
                    next(
                        ifilter(
                            lambda (idx, obj): obj.keys()[0] == prefer_provider,
                            enumerate(self.strategy.strategy['provider']['options'])
                        )
                    )[0]
                )
            )
            '''
        for i in xrange(len(self.strategy.strategy['provider']['options'])):  # Threshold
            logger.info('Attempting to create node "{node_name}" on: {provider}'.format(
                node_name=self.strategy.get_node_name(), provider=self.provider_dict['provider']['name']
            ))
            self.provision(create_or_deploy)

            if self.node:
                save_node_info(self.node_name, node_to_dict(self.node), marshall=json)
                return self.node
            self.restrategise()

        raise LibcloudError('Failed to provision node')
Example #17
0
    def invert_solver(self, A, b, print_mode=False):
        """
        Solver part. Hession matrix A and misfit vector b will be
        reconstructed here based on different constraints.

        :param A: basic Hessian matrix
        :param b: basic misfit vector
        :param print_mode: if True, then print out log information;
        if False, then no log information
        :return:
        """

        npar = self.config.npar
        old_par = self.cmt_par[0:npar] / self.config.scale_par[0:npar]

        # scale the A and b matrix
        max_row = np.amax(abs(A), axis=1)
        for i in range(len(b)):
            A[i, :] /= max_row[i]
            b[i] /= max_row[i]

        # setup inversion schema
        if self.config.double_couple:
            linear_inversion = False
            na = npar + 2
        elif self.config.zero_trace:
            linear_inversion = True
            na = npar + 1
        else:
            linear_inversion = True
            na = npar

        # add damping
        trace = np.matrix.trace(A)
        damp_matrix = np.zeros([npar, npar])
        np.fill_diagonal(damp_matrix, trace * self.config.lamda_damping)
        A = A + damp_matrix
        if print_mode:
            logger.info("Condition number of new A: %10.2f"
                        % np.linalg.cond(A))

        if linear_inversion:
            if print_mode:
                logger.info("Linear Inversion...")
            new_par = self.linear_solver(old_par, A, b, npar, na)
        else:
            if print_mode:
                logger.info("Nonlinear Inversion...")
            new_par = self.nonlinear_solver(old_par, A, b, npar, na)

        new_cmt_par = np.copy(self.cmt_par)
        new_cmt_par[0:npar] = new_par[0:npar] * self.config.scale_par[0:npar]

        return new_cmt_par
def main():

    args = get_arguments(sys.argv)

    config_fn = args.config
    config = get_config(config_fn)

    input_fn = args.input_filename

    end_points = args.end_points
    ep = " with additional start/end points" if end_points else ""
    logger.info(f"Reading sentences from '{input_fn}'{ep}.")

    output_dir = args.output_dir if args.output_dir else os.path.dirname(
        os.path.realpath(input_fn))
    os.makedirs(output_dir, exist_ok=True)
    ep = "_we" if end_points else ""
    output_fn = os.path.join(
        output_dir,
        os.path.splitext(os.path.basename(input_fn))[0] + ep + "_prp.graphml")

    tokenized_sentences = read_sentences(input_fn, config, end_points)
    initial_node_dict = build_initial_graph(tokenized_sentences)

    # make the paraphrases !
    logger.info(f"Initial graph with {len(initial_node_dict)} nodes built.")
    if len(initial_node_dict) > 1000:
        very = ""
        if len(initial_node_dict) > 2000:
            very = "very "
        logger.warning(
            f"! Generation of the paraphrase graphs with this initial size could be {very}slow."
        )
    node_dict_paraphrases = merge_graph_2_paraphrases(initial_node_dict)

    logger.info(
        f"Paraphrase graph with {len(node_dict_paraphrases)} nodes built.")
    write_graphml(node_dict_paraphrases, config, output_fn)
    logger.info(f"See output in '{output_fn}'.")
def run_command_with_output(command_description, command):
    process_output = ""

    logger.info("Starting process: " + command_description)
    logger.info("Running command: " + command)

    with subprocess.Popen(command,
                          stdout=subprocess.PIPE,
                          stderr=subprocess.PIPE,
                          bufsize=1,
                          universal_newlines=True,
                          shell=True) as process:
        for line in process.stdout:
            process_output += line
        errors = os.linesep.join(process.stderr.readlines())
    if process.returncode != 0:
        logger.error(command_description + " failed!" + os.linesep + errors)
        raise subprocess.CalledProcessError(process.returncode, process.args)
    else:
        logger.info(command_description + " completed successfully")
    return process_output
Example #20
0
    def prepare_for_weighting(self):
        """
        Prepare necessary information for weighting, e.x., 
        calculating azimuth, distance and energty of a window.
        Also, based on the tags, sort window into different categories.

        :return:
        """
        for window in self.window:
            # calculate energy
            #window.win_energy(mode=self.config.norm_mode)
            # calculate location
            window.get_location_info(self.cmtsource)

        self.naz_files, self.naz_wins = self.calculate_azimuth_bin()
        # add all category together
        # if not weighted by category, then use total number
        self.naz_files_all = np.zeros(const.NREGIONS)
        self.naz_wins_all = np.zeros(const.NREGIONS)
        for key in self.naz_files.keys():
            self.naz_files_all += self.naz_files[key]
            self.naz_wins_all += self.naz_wins[key]
            logger.info("Category: %s" % key)
            logger.info("Azimuth file bin: [%s]" 
                        % (', '.join(map(str, self.naz_files[key]))))
            logger.info("Azimuth win bin: [%s]" 
                        % (', '.join(map(str, self.naz_wins[key]))))

        # stat different category
        bin_category = {}
        for window in self.window:
            tag = window.tag['obsd']
            if tag in bin_category.keys():
                bin_category[tag] += window.num_wins
            else:
                bin_category[tag] = window.num_wins
        self.bin_category = bin_category
Example #21
0
def builddir_structure(param_dict, version):
    """
    Summary.

        - Updates paths in binary exectuable
        - Updates

    Args:
        :root (str): full path to root directory of the git project
        :builddir (str): name of current build directory which we need to populate

    Vars:
        :lib_path (str): src path to library modules in project root
        :builddir_path (str): dst path to root of the current build directory
         (/<path>/nlines-1.X.X dir)

    Returns:
        Success | Failure, TYPE: bool

    """
    root = git_root()
    project_dirname = root.split('/')[-1]
    core_dir = root + '/' + 'core'
    config_dir = root + '/' + 'config'
    build_root = TMPDIR

    # files
    binary = param_dict['Executable']
    control_file = param_dict['ControlFile']['Name']
    compfile = param_dict['BashCompletion']
    builddir = param_dict['ControlFile']['BuildDirName']

    # full paths
    builddir_path = build_root + '/' + builddir
    deb_src = root + '/packaging/deb'
    debian_dir = 'DEBIAN'
    debian_path = deb_src + '/' + debian_dir
    binary_path = builddir_path + '/usr/local/bin'
    lib_path = builddir_path + '/usr/local/lib/' + PROJECT
    comp_src = root + '/' + 'bash'
    comp_dst = builddir_path + '/etc/bash_completion.d'

    arrow = yl + Colors.BOLD + '-->' + rst

    try:

        stdout_message(
            f'Assembling build directory artifacts in {bn + builddir + rst}')

        # create build directory
        if os.path.exists(builddir_path):
            rmtree(builddir_path)
        os.makedirs(builddir_path)
        stdout_message(
            message='Created builddir_path: {}'.format(yl + builddir_path +
                                                       rst),
            prefix='OK')

        if not os.path.exists(builddir_path + '/' + debian_dir):
            copytree(debian_path, builddir_path + '/' + debian_dir)
            # status msg
            _src_path = '../' + project_dirname + debian_path.split(
                project_dirname)[1]
            _dst_path = builddir_path + '/' + debian_dir
            stdout_message(message='Copied: {} {} {}'.format(
                lk + _src_path + rst, arrow, lk + _dst_path + rst),
                           prefix='OK')

        if not os.path.exists(binary_path):
            os.makedirs(binary_path)
            _dst_path = binary_path
            stdout_message(message='Created: {}'.format(lk + _dst_path + rst),
                           prefix='OK')

        if not os.path.exists(binary_path + '/' + PROJECT):
            binary_src = PROJECT_ROOT + '/' + binary
            binary_dst = binary_path + '/' + binary
            copyfile(binary_src, binary_dst)
            # status msg
            _src_path = '../' + project_dirname + '/' + os.path.split(
                binary_src)[1]
            _dst_path = '../' + project_dirname + '/' + os.path.split(
                binary_dst)[1]
            stdout_message(message='Copied:\t{} {} {}'.format(
                lk + _src_path + rst, arrow, lk + _dst_path + rst),
                           prefix='OK')

        if not os.path.exists(lib_path):

            os.makedirs(lib_path)  # create library dir in builddir

            # status msg branching
            _dst_path = '../' + project_dirname + lib_path.split(
                project_dirname)[1]
            if os.path.exists(lib_path):
                stdout_message(message='Created:\t{}'.format(lk + _dst_path +
                                                             rst),
                               prefix='OK')
            else:
                stdout_message(
                    message='Failed to create:\t{}'.format(lk + _dst_path +
                                                           rst),
                    prefix='FAIL')

        for libfile in os.listdir(core_dir):
            if os.path.exists(lib_path + '/' + libfile):
                stdout_message(
                    f'{libfile} target exists - skip adding to builddir')

            if libfile.endswith('.log'):
                # log file, do not place in build
                logger.info(f'{libfile} is log file - skip adding to builddir')

            else:
                # place lib files in build
                lib_src = core_dir + '/' + libfile
                lib_dst = lib_path + '/' + libfile
                copyfile(lib_src, lib_dst)
                # status msg
                _src_path = '../' + project_dirname + lib_src.split(
                    project_dirname)[1]
                _dst_path = '../' + project_dirname + lib_dst.split(
                    project_dirname)[1]
                stdout_message(message='Copied:\t{} {} {}'.format(
                    lk + _src_path + rst, arrow, lk + _dst_path + rst),
                               prefix='OK')

        for confile in os.listdir(config_dir):

            if not os.path.exists(lib_path + '/config'):
                os.makedirs(lib_path +
                            '/config')  # create config dir in builddir

            _src = config_dir + '/' + confile
            _dst = lib_path + '/config/' + confile
            copyfile(_src, _dst)

            # status msg
            _src_path = '../' + project_dirname + _src.split(
                project_dirname)[1]
            _dst_path = '../' + project_dirname + _dst.split(
                project_dirname)[1]
            stdout_message(message='Copied:\t{} {} {}'.format(
                lk + _src_path + rst, arrow, lk + _dst_path + rst),
                           prefix='OK')

        if not os.path.exists(comp_dst):
            # create path
            os.makedirs(comp_dst)
            _dst_path = '../' + project_dirname + comp_dst.split(
                project_dirname)[1]
            stdout_message(message='Created:\t{}'.format(lk + _dst_path + rst),
                           prefix='OK')

            # copy
            for artifact in list(
                    filter(lambda x: x.endswith('.bash'),
                           os.listdir(comp_src))):
                copyfile(comp_src + '/' + artifact, comp_dst + '/' + artifact)

    except OSError as e:
        logger.exception('{}: Problem creating dirs on local fs'.format(
            inspect.stack()[0][3]))
        return False
    return True
def validate_imported_contigs(assembly_properties_file, config_file):
    config = get_args_from_private_config_file(config_file)
    config.update(
        get_args_from_assembly_properties_file(assembly_properties_file))
    # We need to rename the keys because string interpolation won't work if there is a dot character in them
    config["assembly_report_path"] = config[
        "parameters.assemblyReportUrl"].split("file:")[-1]
    config["assembly_md5"] = hashlib.md5(
        config["parameters.assemblyName"].encode("utf-8")).hexdigest()
    config["taxonomy_accession"] = config["parameters.taxonomyAccession"]

    config[
        "contig_chr_mismatch_table"] = "dbsnp_ensembl_species.dbsnp_species_with_contig_chromosome_start_mismatch"
    config[
        "contig_chr_match_table"] = "dbsnp_ensembl_species.dbsnp_species_with_contig_chromosome_start_match"
    final_formatting_genbank_accessions_cmd = r"cut -d$'\t' -f5 | sort | uniq | paste -s - | sed s/$'\t'/\",\"/g | sed s/$/\"/g | sed s/^/\"/g"
    get_contigs_start_mismatch_cmd = "psql -A -t -h {metahost} -U {metauser} -d {metadb} -c " \
                                     "\"select distinct contig_name from {contig_chr_mismatch_table} " \
                                     "where table_name like '%{assembly_md5}%'" \
                                     " and schema_name like '%{taxonomy_accession}%'\" -P pager=off " \
                                     "| grep {assembly_report_path} -f - |".format(**config) + \
                                     final_formatting_genbank_accessions_cmd
    get_contigs_start_match_cmd = "psql -A -t -h {metahost} -U {metauser} -d {metadb} -c " \
                                  "\"select distinct contig_name from {contig_chr_match_table} " \
                                  "where table_name like '%{assembly_md5}%'" \
                                  " and schema_name like '%{taxonomy_accession}%'\" -P pager=off " \
                                  "| grep {assembly_report_path} -f - | grep -v assembled-molecule |".format(**config) \
                                  + final_formatting_genbank_accessions_cmd
    mongo_run_command_template = "mongo --quiet --host {0} --port {1} --username {2} " \
                                 "--password {3} --authenticationDatabase=admin " \
                                 "{4} --eval 'db.{5}.findOne({{\"{6}\": \"{7}\", " \
                                 "\"contig\": {{$in: [{8}]}}}})'"

    mismatch_contig_set = run_command_with_output(
        "Get contigs with start mismatch against chromosome:",
        get_contigs_start_mismatch_cmd).strip()
    match_contig_set = run_command_with_output(
        "Get contigs with start match against chromosome:",
        get_contigs_start_match_cmd).strip()

    collections_to_check = {
        "dbsnpSubmittedVariantEntity": "seq",
        "dbsnpClusteredVariantEntity": "asm"
    }
    if mismatch_contig_set != '""':
        for collection, asm_col in collections_to_check.items():
            mongo_run_command = mongo_run_command_template.format(
                config["mongo_host"], config["mongo_port"],
                config["mongo_user"], config["mongo_password"],
                config["mongo_acc_db"], collection, asm_col,
                config["parameters.assemblyAccession"], mismatch_contig_set)
            mongo_run_command_output = run_command_with_output(
                "Check if mismatched contigs from above " + "are present in " +
                collection + " for the assembly", mongo_run_command)
            logger.info("Mongo command output:" + os.linesep +
                        mongo_run_command_output)
    else:
        logger.info("No mismatch contig set available!")

    if match_contig_set != '""':
        for collection, asm_col in collections_to_check.items():
            mongo_run_command = mongo_run_command_template.format(
                config["mongo_host"], config["mongo_port"],
                config["mongo_user"], config["mongo_password"],
                config["mongo_acc_db"], collection, asm_col,
                config["parameters.assemblyAccession"], match_contig_set)
            mongo_run_command_output = run_command_with_output(
                "Check if matched contigs from above " + "are present in " +
                collection + " for the assembly", mongo_run_command)
            logger.info("Mongo command output:" + os.linesep +
                        mongo_run_command_output)
    else:
        logger.info("No matched non-chromosome contig set available!")
Example #23
0
    def print_summary(self):
        """
        Print function of configuration

        :return:
        """
        npar = self.npar
        logger.info("="*10 + "  Config Summary  " + "="*10)
        logger.info("Number of Inversion Par: %d" % npar)
        logger.info("   Par: [%s]" % (', '.join(self.par_name[0:npar])))
        logger.info("   Delta: [%s]" % (
            ', '.join(map(str, self.dcmt_par[0:npar]*self.scale_par[0:npar]))))

        logger.info("Weighting scheme")
        if self.weight_data:
            if self.weight_function == default_weight_function:
                logger.info("   Weighting data ===> "
                            "Using Default weighting function")
            else:
                logger.info("   Weighting data ===> "
                            "Using user-defined weighting function")
        else:
            logger.info("   No weighting applied")
        logger.info("Inversion Scheme")
        if self.double_couple:
            logger.info("   invert for double-couple source ===> "
                        "Non-linear Inversion")
        elif self.zero_trace:
            logger.info("   invert for zero-trace source ===> "
                        "Linear Inversion")
        else:
            logger.info("   No constraints applied ===> Linear Inversion ")
        logger.info("   inversion dampling lambda: %f" % self.lamda_damping)
Example #24
0
def github():
    """ Entry point for github webhook testkey"""
    logger.info("newnewnewcodenewcode")
    sha,signature = request.headers.get('X-Hub-Signature').split('=')
    logger.info(f'signature:{signature}'+ str(request.data))
    secret = str.encode(current_app.config.get('GITHUB_SECRET'))
    hashhex = hmac.new(secret, request.data, digestmod='sha1').hexdigest()
    logger.info(f'hashhex:{hashhex}')
    if hmac.compare_digest(hashhex, signature):
        try:
            if 'GIT_DIR' in os.environ:
                del os.environ['GIT_DIR']
            logger.info('hash对比正确' + current_app.config.get('REPO_PATH'))
            logger.info("开始拉仓库")
            # repo = Repo(current_app.config.get('REPO_PATH'))
            repo = Repo("/usr/local/python3/graduate998/backStageNew")
            # repo.git.pull()
            #---
            origin = repo.remotes.origin
            origin.pull()
            #---
            # 获取默认版本库 origin
            # remote = repo.remote()
            # 从远程版本库拉取分支
            # remote.pull()
            logger.info("pull操作完成了啊!")
            # commit = request.json['after'][0:6]
            # logger.info('Repository updated with commit {}'.format(commit))
        except:
            logger.info(traceback.format_exc())
            return jsonify({"error": str(traceback.format_exc())}),500
    return jsonify({}),200
Example #25
0
    def print_summary(self):
        """
        Print summary of data container

        :return:
        """
        nfiles_r = 0
        nfiles_t = 0
        nfiles_z = 0
        nwins_r = 0
        nwins_t = 0
        nwins_z = 0
        for window in self.window:
            if window.component[2:3] == "R":
                nfiles_r += 1
                nwins_r += window.num_wins
            elif window.component[2:3] == "T":
                nfiles_t += 1
                nwins_t += window.num_wins
            elif window.component[2:3] == "Z":
                nfiles_z += 1
                nwins_z += window.num_wins
            else:
                raise ValueError(
                    "Unrecognized compoent in windows: %s.%s.%s"
                    % (window.station, window.network, window.component))

        logger.info("="*10 + "  Data Summary  " + "="*10)
        logger.info("Number of Deriv synt: %d" % len(self.par_list))
        logger.info("   Par: [%s]" % (', '.join(self.par_list)))
        logger.info("Number of data pairs: %d" % self.nfiles)
        logger.info("   [Z, R, T] = [%d, %d, %d]"
                    % (nfiles_z, nfiles_r, nfiles_t))
        logger.info("Number of windows: %d" % self.nwins)
        logger.info("   [Z, R, T] = [%d, %d, %d]"
                    % (nwins_z, nwins_r, nwins_t))
        logger.info("Loading takes %6.2f seconds" % self.elapse_time)
Example #26
0
    def print_cmtsource_summary(cmt):
        """
        Print CMTSolution source summary

        :return:
        """
        logger.info("=" * 10 + "  Event Summary  " + "=" * 10)
        logger.info("Event name: %s" % cmt.eventname)
        logger.info("   Latitude and longitude: %.2f, %.2f" % (
            cmt.latitude, cmt.longitude))
        logger.info("   Depth: %.1f km" % (cmt.depth_in_m / 1000.0))
        logger.info("   Region tag: %s" % cmt.region_tag)
        logger.info("   Trace: %.3e" % (
            (cmt.m_rr + cmt.m_tt + cmt.m_pp) / cmt.M0))
        logger.info("   Moment Magnitude: %.2f" % cmt.moment_magnitude)
Example #27
0
    def inversion_result_table(self):
        """
        Print out the inversion table

        :return:
        """
        title = "*" * 20 + " Inversion Result Table(%d npar) " % \
            self.config.npar + "*" * 20
        logger.info(title)

        if not self.config.bootstrap:
            logger.info("PAR         Old_CMT        New_CMT")
            logger.info("Mrr:  %15.6e  %15.6e" % (
                self.cmtsource.m_rr, self.new_cmtsource.m_rr))
            logger.info("Mtt:  %15.6e  %15.6e" % (
                self.cmtsource.m_tt, self.new_cmtsource.m_tt))
            logger.info("Mpp:  %15.6e  %15.6e" % (
                self.cmtsource.m_pp, self.new_cmtsource.m_pp))
            logger.info("Mrt:  %15.6e  %15.6e" % (
                self.cmtsource.m_rt, self.new_cmtsource.m_rt))
            logger.info("Mrp:  %15.6e  %15.6e" % (
                self.cmtsource.m_rp, self.new_cmtsource.m_rp))
            logger.info("Mtp:  %15.6e  %15.6e" % (
                self.cmtsource.m_tp, self.new_cmtsource.m_tp))
            logger.info(
                "dep:  %15.3f  %15.3f" % (
                    self.cmtsource.depth_in_m / 1000.0,
                    self.new_cmtsource.depth_in_m / 1000.0))
            logger.info("lon:  %15.3f  %15.3f" % (
                self.cmtsource.longitude, self.new_cmtsource.longitude))
            logger.info("lat:  %15.3f  %15.3f" % (
                self.cmtsource.latitude, self.new_cmtsource.latitude))
            logger.info("ctm:  %15.3f  %15.3f" % (
                self.cmtsource.time_shift, self.new_cmtsource.time_shift))
            logger.info("hdr:  %15.3f  %15.3f" % (
                self.cmtsource.half_duration,
                self.new_cmtsource.half_duration))
        else:
            logger.info("PAR         Old_CMT          New_CMT     "
                        "Bootstrap_Mean     Bootstrap_STD     STD/Mean")
            logger.info(
                "Mrr:  %15.6e  %15.6e  %15.6e  %15.6e   %10.2f%%" % (
                    self.cmtsource.m_rr, self.new_cmtsource.m_rr,
                    self.par_mean[0], self.par_std[0],
                    self.std_over_mean[0] * 100))
            logger.info(
                "Mtt:  %15.6e  %15.6e  %15.6e  %15.6e   %10.2f%%" % (
                    self.cmtsource.m_tt, self.new_cmtsource.m_tt,
                    self.par_mean[1], self.par_std[1],
                    self.std_over_mean[1] * 100))
            logger.info(
                "Mpp:  %15.6e  %15.6e  %15.6e  %15.6e   %10.2f%%" % (
                    self.cmtsource.m_pp, self.new_cmtsource.m_pp,
                    self.par_mean[2], self.par_std[2],
                    self.std_over_mean[2] * 100))
            logger.info(
                "Mrt:  %15.6e  %15.6e  %15.6e  %15.6e   %10.2f%%" % (
                    self.cmtsource.m_rt, self.new_cmtsource.m_rt,
                    self.par_mean[3], self.par_std[3],
                    self.std_over_mean[3] * 100))
            logger.info(
                "Mrp:  %15.6e  %15.6e  %15.6e  %15.6e   %10.2f%%" % (
                    self.cmtsource.m_rp, self.new_cmtsource.m_rp,
                    self.par_mean[4], self.par_std[4],
                    self.std_over_mean[4] * 100))
            logger.info(
                "Mtp:  %15.6e  %15.6e  %15.6e  %15.6e   %10.2f%%" % (
                    self.cmtsource.m_tp, self.new_cmtsource.m_tp,
                    self.par_mean[5], self.par_std[5],
                    self.std_over_mean[5] * 100))
            logger.info("dep:  %15.3f  %15.3f  %15.3f  %15.3f   %10.2f%%" % (
                self.cmtsource.depth_in_m / 1000.0,
                self.new_cmtsource.depth_in_m / 1000.0,
                self.par_mean[6], self.par_std[6],
                self.std_over_mean[6] * 100))
            logger.info("lon:  %15.3f  %15.3f  %15.3f  %15.3f   %10.2f%%" % (
                self.cmtsource.longitude, self.new_cmtsource.longitude,
                self.par_mean[7], self.par_std[7],
                self.std_over_mean[7] * 100))
            logger.info("lat:  %15.3f  %15.3f  %15.3f  %15.3f   %10.2f%%" % (
                self.cmtsource.latitude, self.new_cmtsource.latitude,
                self.par_mean[8], self.par_std[8],
                self.std_over_mean[8] * 100))
            logger.info("ctm:  %15.3f  %15.3f  %15.3f  %15.3f   %10.2f%%" % (
                self.cmtsource.time_shift, self.new_cmtsource.time_shift,
                self.par_mean[9], self.par_std[9],
                self.std_over_mean[9] * 100))
            logger.info("hdr:  %15.3f  %15.3f  %15.3f  %15.3f   %10.2f%%" % (
                self.cmtsource.half_duration, self.new_cmtsource.half_duration,
                self.par_mean[10], self.par_std[10],
                self.std_over_mean[10] * 100))
Example #28
0
    def stats_energy(self):

        logger.info('Energy Search...')

        self.calculate_misfit_for_m00(1.00)
Example #29
0
 def one(*ignore):
     compute = Compute(args.strategy)
     logger.info(compute.attempt_provision('create',
                                           prefer_provider=args.provider,
                                           prefer_image=args.image))
Example #30
0
def _build_parser():
    parser = ArgumentParser(description='Create compute nodes')
    parser.add_argument('-s', '--strategy', help='strategy file [strategy.sample.json]',
                        default=config_join('strategy.sample.json'))
    parser.add_argument('-n', '--number_of_nodes', help='number of nodes to create [1]',
                        default=1, type=int)
    parser.add_argument('--provider', help='Try this provider first')
    parser.add_argument('--image', help='Try this image first')
    return parser


if __name__ == '__main__':
    args = _build_parser().parse_args()


    def one(*ignore):
        compute = Compute(args.strategy)
        logger.info(compute.attempt_provision('create',
                                              prefer_provider=args.provider,
                                              prefer_image=args.image))


    logger.info(('Provisioning {} node'.format(args.number_of_nodes) +
                 's' if args.number_of_nodes > 1 else ''))
    if args.number_of_nodes == 1:
        one()
    else:
        p = Pool(args.number_of_nodes)
        p.map(one, xrange(args.number_of_nodes))
Example #31
0
    def print_inversion_summary(self):
        """
        Print out the inversion summary

        :return:
        """
        logger.info("*" * 20)
        logger.info("Invert cmt parameters(%d par)" % self.config.npar)

        logger.info("Old CMT par: [%s]" % (
            ', '.join(map(str, self.cmt_par))))
        logger.info("dm: [%s]" % (
            ', '.join(map(str, self.new_cmt_par - self.cmt_par))))
        logger.info("New CMT par: [%s]" % (
            ', '.join(map(str, self.new_cmt_par))))

        logger.info("Trace: %e" % (np.sum(self.new_cmt_par[0:3])))
        logger.info("Energy change(scalar moment): %5.2f%%" % (
            (self.new_cmtsource.M0 - self.cmtsource.M0) /
            self.cmtsource.M0 * 100.0))

        self.inversion_result_table()
def test_second():
    global supported_commands
    try:
        tests_file_path = 'tests_descriptions.json'
        logger.info(
            f'---------------------- run test {tests_file_path} ----------------------'
        )
        saved_values = {}
        test_data = {}
        with open(tests_file_path, 'r', encoding='utf-8') as file:
            test_data = json.load(file)

        for query in test_data:
            if query['commands_before_running'] != {}:
                logger.info(
                    f"commands_before_running: {query['commands_before_running']}"
                )
                for command in query['commands_before_running']:
                    logger.info(
                        f"run: {query['commands_before_running']}{query['commands_before_running'][command]}"
                    )
                    supported_commands[command]['func'](
                        **(query['commands_before_running'][command]))

            if 'saved_before' in query['params'].keys():
                for type_param, array_name_params in query['params'][
                        'saved_before'].items():
                    for name in array_name_params:
                        query['params'][type_param][name] = saved_values[name]

            if 'saved_before' in query['custom_headers'].keys():
                for name_header in query['custom_headers']['saved_before']:
                    query['custom_headers']['headers'][
                        name_header] = saved_values[name_header]

            # print(f"params: {query['params']}")
            # print(f"custom_headers: {query['custom_headers']}")

            url = str(query['url']).format(**query['params']['path'])
            response = http_query.http_query_by_type[str(
                query['method']).lower()](
                    url=url,
                    headers=query['custom_headers']['headers'],
                    query_params=query['params']['query'],
                    json_data=query['request_body_json'],
                )
            # checks.check_mimetype(mimetype=response.content_type)
            checks.check_mimetype(mimetype=response.headers['content-type'])
            checks.check_http_code(code=response.status_code)
            response_data_json = response.json()
            checks.check_structure_successful_response(
                response=response_data_json)
            for scan_area, pattern in query['checks'].items():
                if pattern is not None and pattern != {} and pattern != []:
                    area = str(scan_area).replace('in_', '')
                    ok, res_comparison = compare(pattern,
                                                 response_data_json[area])

                    assert ok is True, f'A result that does not match the pattern was found.\nResult of comparing the url [{url}] response:\n{res_comparison}'

            for param_for_save in query['saves_from_body']:
                saved_values[param_for_save] = response_data_json['body'][
                    param_for_save]
        # show_dialog()
        return True
    except AssertionError as ex:
        logger.error(ex)
        print(f'[Error]{ex}')
        raise MyValueError(f'Тест не пройден: {ex}')
Example #33
0
 def stats_tshift(self):                                          
                                                                             
     logger.info("Origin time search...")                                    
                                                                             
     self.calculate_tshift()                                                 
Example #34
0
    def invert_cmt(self):
        """
        ensemble all measurements together to form Matrix A and vector
        b to solve the A * (dm) = b
        A is the Hessian Matrix and b is the misfit

        :return:
        """
        logger.info("*"*15)
        logger.info("CMT Inversion")
        logger.info("*"*15)
        # ensemble A and b
        A = util.sum_matrix(self.weight_array, self.A1_all)
        b = util.sum_matrix(self.weight_array, self.b1_all)
        logger.info("Inversion Matrix A is as follows:")
        logger.info("\n%s" % ('\n'.join(map(self._float_array_to_str, A))))
        logger.info("Condition number of A: %10.2f" % (np.linalg.cond(A)))
        logger.info("RHS vector b is as follows:")
        logger.info("[%s]" % (self._float_array_to_str(b)))

        # source inversion
        self.new_cmt_par = self.invert_solver(A, b, print_mode=True)
        self.convert_new_cmt_par()
Example #35
0
def destroy(config_filename, restrict_provider_to=None):
    with open(config_filename, 'rt') as f:
        config_contents = f.read()

    config_dict = loads(replace_variables(config_contents))
    del config_contents

    providers = tuple(obj for obj in config_dict['provider']['options']
                      if obj['provider']['name'] in restrict_provider_to
                      or obj['provider']['name'] == restrict_provider_to) if restrict_provider_to \
        else tuple(obj for obj in config_dict['provider']['options'])

    client = (lambda etcd_server_location: Client(
        protocol=etcd_server_location.scheme,
        host=etcd_server_location.hostname,
        port=etcd_server_location.port
    ))(urlparse(config_dict['etcd_server']))

    provider2conf_and_driver = dict(
        imap(lambda provider_dict: (provider_dict['provider']['name'],
                                    namedtuple('_', 'conf driver_cls')(
                                        provider_dict,
                                        (lambda provider_cls: provider_cls(
                                            region=provider_dict['provider']['region'],
                                            **provider_dict['auth']
                                        ))(get_driver(
                                            getattr(Provider, provider_dict['provider']['name'])
                                            if hasattr(Provider, provider_dict['provider']['name'])
                                            else itemgetter(1)(next(ifilter(
                                                lambda (prov_name, value): value == provider_dict['provider'][
                                                    'name'].lower(),
                                                imap(lambda prov_name: (prov_name, getattr(Provider, prov_name)),
                                                     dir(Provider))
                                            )))
                                        )))), providers)
    )

    # Map nodes to their provider, including ones outside etcd
    provider2nodes = {
        provider: tuple(
            namedtuple('_', 'uuid node')(node.uuid, node) for node in
            driver.driver_cls.list_nodes(*((driver.conf['create_with']['ex_cloud_service_name'],)
                                           if driver.conf['provider']['name'] == 'AZURE'
                                           else tuple()
                                           ))
            if driver.driver_cls.NODE_STATE_MAP and node.state in (
                driver.driver_cls.NODE_STATE_MAP.get(
                    'running',
                    next((node.state for k, v in driver.driver_cls.NODE_STATE_MAP.iteritems()
                          if 'running' in v), None)
                ),
                driver.driver_cls.NODE_STATE_MAP.get('active')
            ) or not driver.driver_cls.NODE_STATE_MAP and node.state in ('running',)
        )
        for provider, driver in provider2conf_and_driver.iteritems()}

    uuid2key = {loads(client.get(key).value)['uuid']: key
                for key in flatten(etcd_ls(client))
                if (lambda v: isinstance(v, basestring) and v.startswith('{'))(client.get(key).value)}
    # TODO: Only call `client.get` once per `key` ^

    # Filter to just ones inside etcd; then deprovision and delete from etcd
    logger.info('Dropped: {}'.format(
        {
            provider: tuple(imap(lambda n: rm_prov_etcd(client, n.node), nodes))
            for provider, nodes in provider2nodes.iteritems()
            for node in nodes
            if node.uuid in uuid2key
            }
    ))

    # Delete all empty etcd directories.
    for i in xrange(20):  # TODO: walk the tree rather than hackily rerun
        it_consumes(
            logger.info('rmdir {directory}'.format(directory=directory, res=client.delete(directory, dir=True)))
            for directory in flatten(etcd_empty_dirs(client))
        )

    return client
Example #36
0
    def print_summary(self):
        """
        Print function of configuration

        :return:
        """
        logger.info("="*10 + "  Config Summary  " + "="*10)
        logger.info("Origin time inversion: %s" % self.origin_time_inversion)
        logger.info("Energy inversion: %s" % self.energy_inversion)
        logger.info("Weighting scheme")
        if self.weight_data:
            if self.weight_function == default_weight_function:
                logger.info("   Weighting data ===> Using Default "
                            "weighting function")
            else:
                logger.info("   Weighting data ===> Using user-defined"
                            "weighting function")
        else:
            logger.info("   No weighting applied")
        if self.origin_time_inversion:
            logger.info("Time start, grid ratio and end: [%6.3f %6.3f]"
                        % (self.t00_s, self.t00_e))
        if self.energy_inversion:
            logger.info("Energy start, grid, and end   : [%6.3f %6.3f %6.3f]"
                        % (self.m00_s, self.dm00, self.m00_e))
 def get_extracts(self):
     logger.info('extracts: {}'.format(self.extracts))
     onlydirs = [
         f for f in listdir(self.extracts) if isdir(join(self.extracts, f))
     ]
     return onlydirs