def ensure_ceph_storage(service, pool, rbd_img, sizemb, mount_point,
                        blk_device, fstype, system_services=[]):
    """
    To be called from the current cluster leader.
    Ensures given pool and RBD image exists, is mapped to a block device,
    and the device is formatted and mounted at the given mount_point.

    If formatting a device for the first time, data existing at mount_point
    will be migrated to the RBD device before being remounted.

    All services listed in system_services will be stopped prior to data
    migration and restarted when complete.
    """
    # Ensure pool, RBD image, RBD mappings are in place.
    if not pool_exists(service, pool):
        utils.juju_log('INFO', 'ceph: Creating new pool %s.' % pool)
        create_pool(service, pool)

    if not rbd_exists(service, pool, rbd_img):
        utils.juju_log('INFO', 'ceph: Creating RBD image (%s).' % rbd_img)
        create_rbd_image(service, pool, rbd_img, sizemb)

    if not image_mapped(rbd_img):
        utils.juju_log('INFO', 'ceph: Mapping RBD Image as a Block Device.')
        map_block_storage(service, pool, rbd_img)

    # make file system
    # TODO: What happens if for whatever reason this is run again and
    # the data is already in the rbd device and/or is mounted??
    # When it is mounted already, it will fail to make the fs
    # XXX: This is really sketchy!  Need to at least add an fstab entry
    #      otherwise this hook will blow away existing data if its executed
    #      after a reboot.
    if not filesystem_mounted(mount_point):
        make_filesystem(blk_device, fstype)

        for svc in system_services:
            if utils.running(svc):
                utils.juju_log('INFO',
                               'Stopping services %s prior to migrating '\
                               'data' % svc)
                utils.stop(svc)

        place_data_on_ceph(service, blk_device, mount_point, fstype)

        for svc in system_services:
            utils.start(svc)
Exemple #2
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jul  5 22:36:10 2017

@author: konodera
"""

import pandas as pd
import numpy as np
from tqdm import tqdm
import utils
utils.start(__file__)

#==============================================================================
# load
#==============================================================================

col = ['order_id', 'user_id', 'product_id', 'order_number_rev']
log = utils.read_pickles('../input/mk/log', col).sort_values('user_id')

streak = pd.read_pickle('../input/mk/streak_order-product.p')
#==============================================================================
# def
#==============================================================================
def make(T):
    """
    T = 0
    folder = 'trainT-0'
    """
    if T==-1:
Exemple #3
0
def start(message):
    if message.chat.id != config.group_id:
        config.the_bot.forward_message(config.group_id, message.chat.id,
                                       message.message_id)
    utils.start(message)
import numpy as np
import pandas as pd
import gc

import lightgbm as lgb
from sklearn.metrics import roc_auc_score
from sklearn.preprocessing import StandardScaler

from multiprocessing import cpu_count
from tqdm import tqdm

import sys

import utils
utils.start(__file__)
#==============================================================================

# parameters

params = {
    'bagging_freq': 5,
    'bagging_fraction': 1.0,
    'boost_from_average': 'false',
    'boost': 'gbdt',
    'feature_fraction': 1.0,
    'learning_rate': 0.005,
    'max_depth': -1,
    'metric': 'binary_logloss',
    'min_data_in_leaf': 30,
    'min_sum_hessian_in_leaf': 10.0,
Exemple #5
0
MLM partners                94
Jewelry                     41
Tourism                      2
Name: revo_NAME_SELLER_INDUSTRY, dtype: int64
XNA    193164
Name: revo_NAME_YIELD_GROUP, dtype: int64
Card Street    112582
Card X-Sell     80582
Name: revo_PRODUCT_COMBINATION, dtype: int64
#========================================================================
'''

#========================================================================
# Start
#========================================================================
utils.start(sys.argv[0])

app = utils.read_df_pkl(path='../input/clean_app*.p')[[key, target]]

fname_list = [
    #  'bureau'
    'prev'
    #  ,'install'
    #  ,'pos'
    #  ,'ccb'
]
for fname in fname_list:
    logger.info(f"{fname} Start!")
    df_feat = utils.read_df_pkl(path=f'../input/clean_{fname}*.p')

    # Data Check