Exemple #1
0
    def __init__(self, rules_file, s3_base_url=None, s3_atch_dir=None, aws_access_key_id=None, aws_secret_access_key=None):
        with open(rules_file, "rb") as file_handle:
            self.rules = json.loads(file_handle.read().decode("utf-8"))
        # '__rootdir__' is the root directory in s3 we expect to find the email
        # files. save it to the object, then delete it from the rules (since
        # it's not actually a rule)
        if "__rootdir__" in self.rules:
            del self.rules["__rootdir__"]
        for dest in self.rules:
            rule_sets = to_list(self.rules[dest])
            for rule_set in rule_sets:
                for key in rule_set:
                    rule_set[key] = to_list(rule_set[key])
            self.rules[dest] = rule_sets
        self.client = None
        if not s3_base_url:
            return

        # split the 's3_base_url' into the bucket / directory format that the
        # boto library expects.
        s3_base_url = s3_base_url.strip("/ ")
        if s3_base_url.startswith("s3://"):
            s3_base_url = s3_base_url[5:]
        parts = s3_base_url.split("/")
        self.s3_bucket = parts[0]
        self.s3_base_dir = "/".join(parts[1:])
        self.attachment_dest_dir = self.s3_base_dir + "/" + s3_atch_dir
        #logging.info("email processor", self.s3_bucket, self.s3_base_dir, self.attachment_dest_dir)
        self.client = s3.Client(key=aws_access_key_id, secret=aws_secret_access_key)
Exemple #2
0
def _build_project(project, env):
    children = []
    children += to_list(project.pre_build(env))
    children += to_list(project.build(env))
    children += to_list(project.post_build(env))
    children += to_list(project.install(env))
    return children
Exemple #3
0
def index():
    bundles = db.fetch("SELECT bundle AS path, COUNT(id) AS count FROM " + db.tbl_image + " GROUP BY bundle ORDER BY path")
    current_labels = to_list(request.args.get('labels', None))
    current_not_labels = to_list(request.args.get('notlabels', None))
    related_labels = get_related_labels(current_labels, current_not_labels)
    labels = db.fetch("SELECT id, name, COUNT(image) AS count FROM " + db.tbl_label + " INNER JOIN " +
                      db.tbl_image_label + " ON (label = id) GROUP BY id ORDER BY name ASC")
    return render_template('index.html', config=config, bundles=bundles, labels=labels,
                           current_labels=current_labels, current_not_labels=current_not_labels,
                           related_labels=related_labels)
Exemple #4
0
def authorize(auth, group_name, region):
    has_from = 'from' in auth
    has_to = 'to' in auth
    if has_from and has_to:
        raise Exception("illegal authorization block: using both 'from' and 'to' is not allowed: " + str(auth))
    if not has_from and not has_to:
        raise Exception("illegal authorization block: neither 'from' nor 'to' was specified: " + str(auth))

    if has_from:
        from_addrs = util.to_list(auth['from'])
        for from_addr in from_addrs:
            do_authorize(from_addr, group_name, auth, region)
    else:
        to_addrs = util.to_list(auth['to'])
        for to_addr in to_addrs:
            do_authorize(group_name, to_addr, auth, region)
Exemple #5
0
 def __init__(self, manager, key, uselist, callable_, typecallable, trackparent=False, extension=None, **kwargs):
     self.manager = manager
     self.key = key
     self.uselist = uselist
     self.callable_ = callable_
     self.typecallable= typecallable
     self.trackparent = trackparent
     self.extensions = util.to_list(extension or [])
Exemple #6
0
def init():
    """
    Clear old files and iterate posts and collect basic info
    """
    global _post_page_count
    global _post_file_names
    global _tags
    global _categories

    # remove old static files
    if os.path.exists(_deploy_dir):
        for file in os.listdir(_deploy_dir):
            if file.startswith('.'):
                continue
            path = os.path.join(_deploy_dir, file)
            if os.path.isdir(path):
                shutil.rmtree(path)
            else:
                os.remove(path)
    else:
        os.mkdir(_deploy_dir)

    file_list = util.get_posts_list()

    # get post count
    if C.entry_count_one_page == 0:
        _post_page_count = 1
    else:
        _post_page_count = len(file_list) // C.entry_count_one_page + 1

    for file in file_list:
        # get post file names
        _post_file_names.append(os.path.splitext(file)[0])

        # get tags and categories
        file_path = os.path.join('posts', file)
        d = util.render_yaml(util.read_md_file_head(file_path))
        if 'categories' in d:
            _categories |= set(util.to_list(d['categories']))
        if 'tags' in d:
            _tags |= set(util.to_list(d['tags']))

    return True
Exemple #7
0
def init():
    """
    Clear old files and iterate posts and collect basic info
    """
    global _post_page_count
    global _post_file_names
    global _tags
    global _categories

    # remove old static files
    if os.path.exists(_deploy_dir):
        for file in os.listdir(_deploy_dir):
            if file.startswith('.'):
                continue
            path = os.path.join(_deploy_dir, file)
            if os.path.isdir(path):
                shutil.rmtree(path)
            else:
                os.remove(path)
    else:
        os.mkdir(_deploy_dir)

    file_list = util.get_posts_list()

    # get post count
    if C.entry_count_one_page == 0:
        _post_page_count = 1
    else:
        total = len(file_list)
        _post_page_count = total // C.entry_count_one_page \
                           + (1 if total % C.entry_count_one_page != 0 else 0)

    for file in file_list:
        # get post file names
        _post_file_names.append(os.path.splitext(file)[0])

        # get tags and categories
        file_path = os.path.join('posts', file)
        d = util.render_yaml(util.read_md_file_head(file_path))
        if 'categories' in d:
            _categories |= set(util.to_list(d['categories']))
        if 'tags' in d:
            _tags |= set(util.to_list(d['tags']))
Exemple #8
0
def do_authorize(from_group, to_group, auth, region):
    protocols = [ 'tcp' ]
    if 'protocols' in auth:
        protocols = util.to_list(auth['protocols'])

    account = str(region['aws_account'])
    region_name = str(region['EC2_REGION']).strip()

    auths = 0
    for protocol in protocols:
        if protocol == 'icmp':
            authorize_icmp(from_group, to_group, auth['icmp_types'], account, region_name)
            auths += 1
        else:
            if 'ports' not in auth:
                raise Exception("no ports specified in auth: "+str(auth))

            ports = util.to_list(auth['ports'])
            for port in ports:
                authorize_normal(from_group, to_group, port, protocol, account, region_name)
                auths += 1

    if auths == 0:
        raise Exception("no authorizations made for auth: "+str(auth))
Exemple #9
0
    def __init__(self,
                 prob=1.0,
                 words_substr_triggers=None,
                 words_trigger=None,
                 exact_trigger=None,
                 substrings_trigger=None,
                 custom_trigger=None,
                 bot_only=False,
                 active_guild=None,
                 active_channel=None,
                 active_users=None,
                 regex_trigger=None,
                 delete_message=False,
                 log=False):
        if words_trigger is None:
            words_trigger = []
        if exact_trigger is None:
            exact_trigger = []
        if substrings_trigger is None:
            substrings_trigger = []
        if words_substr_triggers is None:
            words_substr_triggers = []

        if regex_trigger is not None:
            regex_trigger = re.compile(regex_trigger)

        words_trigger = to_list(words_trigger)
        exact_trigger = to_list(exact_trigger)
        substrings_trigger = to_list(substrings_trigger)
        active_guild = to_list(active_guild)
        active_channel = to_list(active_channel)
        active_users = to_list(active_users)
        words_substr_triggers = to_list(words_substr_triggers)

        extra_words, extra_substr = split_words_phrases(words_substr_triggers)
        words_trigger += extra_words
        substrings_trigger += extra_substr

        self.prob = prob
        self.words_trigger = words_trigger
        self.exact_trigger = exact_trigger
        self.substrings_trigger = substrings_trigger
        self.custom_trigger = custom_trigger
        self.bot_only = bot_only  # If False, won't react to bots, if True will react only to bots
        self.active_guilds = active_guild  # List of guild name in which it can get triggered
        self.active_channels = active_channel  # List of channel ids in which it can get triggered
        self.active_users = active_users
        self.regex_trigger = regex_trigger
        self.delete_message = delete_message
        self.log = log
Exemple #10
0
    def run(self, env):
        sh = env.shell

        def _expand_vars(cmd):
            cmd_type = type(cmd)
            if cmd_type == str:
                cmd = replace_variables(cmd, env.config['variables'])
            elif cmd_type == list:
                cmd = [
                    replace_variables(sub, env.config['variables'])
                    for sub in cmd
                ]
            return cmd

        # Interpolate any variables
        self.commands = [_expand_vars(cmd) for cmd in self.commands]

        # Run each of the commands
        children = []
        for cmd in self.commands:
            cmd_type = type(cmd)
            # See if the string is actually an action
            if cmd_type == str:
                action_cls = Scripts.find_action(cmd)
                if action_cls:
                    cmd = action_cls()
                    cmd_type = type(cmd)

            if cmd_type == str:
                result = sh.exec(*cmd.split(' '))
                if result.returncode != 0:
                    print('Command failed, exiting')
                    sys.exit(12)
            elif cmd_type == list:
                result = sh.exec(*cmd)
                if result.returncode != 0:
                    print('Command failed, exiting')
                    sys.exit(12)
            elif isinstance(cmd, Action):
                Scripts.run_action(cmd, env)
            elif callable(cmd):
                children += to_list(cmd(env))
            else:
                print('Unknown script sub command: {}: {}', cmd_type, cmd)
                sys.exit(4)
        return children
Exemple #11
0
def create_tokenizer():

    """
    根据训练数据集中图像名,和其对应的标题,生成一个tokenizer,作为LSTM的输入/输出必须是数字,所以需要我们使用
    字典数据类型来存储文字和数字对应关系。
    :return: 生成的tokenizer
    https://keras-cn.readthedocs.io/en/latest/legacy/preprocessing/text/#tokenizer
    """

    train_image_names = util.load_image_names('{}{}{}'.format(current_path, os.sep, 'Flickr_8k.trainImages.txt'))
    description_path = '{}{}{}'.format(current_path, os.sep, 'descriptions.txt')
    train_descriptions = util.load_clean_captions(description_path, train_image_names)
    lines = util.to_list(train_descriptions)

    tokenizer = Tokenizer()
    tokenizer.fit_on_texts(lines)
    return tokenizer
Exemple #12
0
def run_instance(unit, instance_number):

    config = unit.config

    region_config = unit.region_context.region_config
    region_name = unit.region_context.region

    args = ["ec2-run-instances", unit.get_ami(),
            "--instance-initiated-shutdown-behavior", "terminate",
            "--region", region_config['EC2_REGION'],
            "-g", unit.env_group_name,
            "-g", unit.group_name]

    if 'extra_groups' in config:
        for group in util.to_list(config['extra_groups']):
            args.append("-g")
            args.append(group)

    args += [ "-t", unit.get_instance_size(), "-k", unit.get_keypair() ]

    data = unit.get_user_data()
    if data is not None:
        data_file = util.write_temp_file("run_"+unit.unit_name+"_"+unit.role_name+"_userdata_", suffix=".json")
        args += [ "-f", data_file ]

    azone = unit.get_availability_zone()
    if azone is not None:
        args += [ "-z", azone]

    elif 'default_availability_zone' in region_config:
        args += [ "-z", region_config['default_availability_zone'] ]

    output = util.execute_shell(args)
    if util.is_noop():
        iid = "i-debug"
    else:
        iid = util.find_element_on_line_starting_with(output[0], "INSTANCE", 1)
    return iid
Exemple #13
0
def bootstrap(unit, host, iid):
    node_name = unit.group_name + "-" + iid
    region_config = unit.region_context.region_config
    # print ">>>>>bootstrap: region_context.region_config="+str(region_config)
    knife_args = ["knife", "bootstrap", host,
                  "--run-list", "role["+unit.role_name+"]",
                  "--node-name", node_name,
                  "--ssh-user", "ubuntu",
                  "-i", util.subst_homedir(region_config['KEY_DIR']) +"/"+region_config['keypair']+".pem",
                  "--sudo", "--no-host-key-verify",
                  "--environment", unit.env_name]
    util.execute_shell(knife_args)

    util.execute_shell(["ec2-create-tags", iid, "--tag", "Name=" + node_name])
    util.execute_shell(["create-cname", node_name+".unit", host+".", "86400"])

    if POST_BOOTSTRAP in unit.config:
        add_to_haproxy(unit, host, iid)
        for type in ['sg', 'asg', 'tag']:
            key = 'refresh_' + type + '_hosts'
            if key in unit.config[POST_BOOTSTRAP]:
                for group in util.to_list(unit.config[POST_BOOTSTRAP][key]):
                    util.execute_shell(["ssh-"+type+"-hosts", group, "sudo", "chef-client"])
file = './datasets/iris/iris.data'
#file =  './datasets/pendigits/pendigits.tra'
#file = './datasets/satimage/sat.trn'
#file = './datasets/letter-recognition/letter-recognition.data'
# given

dataset = util.load_data(file, delimiter=',')

def remove_label(data):
    return map(lambda x: x[:-1],
               data)

data = remove_label(dataset)
data = util.to_number(data)
data = util.to_list(data)
data = util.rescale(data)

def get_label(data):
    return map(lambda x: x[-1],
    data)

target = get_label(dataset)
target = util.to_list(target)

#cluster_count_list = [26, 52, 104, 208, 416, 832]
cluster_count_list = [3]

for cluster_count in cluster_count_list:
    print('cluster count:', cluster_count)
# cluster_count = 8
Exemple #15
0
        return tf.transpose(tf.concat([first, hats, last], axis=1))

    def net_pred(self, x):
        u = NNUtil.neural_net(x, self.weights, self.biases)
        return u


if __name__ == "__main__":
    working_dir = "./Test_Collection/CB/"
    paras = util.load_parameter(working_dir + "paras.txt")
    N = int(paras["N"])
    width = int(paras["width"])
    depth = int(paras["depth"])
    max_epoch = int(paras["max_epoch"])
    num_tested_per_element = int(paras["num_tested_per_element"])
    pivot = util.to_list(paras["pivot"])

    upper_bound = pivot[-1]
    lower_bound = pivot[0]
    num_element = len(pivot) - 1
    hidden_layers = [width] * depth

    new_dir = "{}_{}_{}/N={}/{}_{}/".format(lower_bound, upper_bound,
                                            num_element, N, depth, width)
    dump_dir = working_dir + new_dir
    if not os.path.exists(dump_dir):
        os.makedirs(dump_dir)
    shutil.copy(working_dir + "paras.txt", dump_dir)

    model = CBSolutionNN(N, pivot, hidden_layers)
Exemple #16
0
def evaluate(args, model, dev_dataset, data_loader, gold_dict, tokenizer,
             device, max_len, use_squad_v2):
    dataset, examples, features = dev_dataset.dataset, dev_dataset.examples, dev_dataset.features
    model.eval()
    all_results = []
    with torch.no_grad(), \
            tqdm(total=len(data_loader.dataset)) as progress_bar:
        for batch in data_loader:
            batch = tuple(t.to(device) for t in batch)
            inputs = {
                "input_ids": batch[0],
                "attention_mask": batch[1],
                "token_type_ids": batch[2],
            }
            if args.bidaf:
                inputs['char_ids'] = batch[6]
            example_indices = batch[3]
            batch_size = inputs["input_ids"].size(0)

            # Forward
            outputs = model(**inputs)
            for i, example_index in enumerate(example_indices):
                eval_feature = features[example_index.item()]
                unique_id = int(eval_feature.unique_id)

                output = [to_list(output[i]) for output in outputs]
                start_logits, end_logits = output
                result = SquadResult(unique_id, start_logits, end_logits)

                all_results.append(result)

            # Log info
            progress_bar.update(batch_size)

    if args.dev_logits_save_file is not None:
        with open(args.dev_logits_save_file, 'wb') as f:
            pickle.dump(all_results, f)

    pred_dict = compute_predictions_logits(
        examples,
        features,
        all_results,
        1,
        args.max_ans_len,
        True,
        'save/temp/predict_temp.json',
        'save/temp/nbest_temp.json',
        'save/temp/nlog_odd.log',
        False,
        args.use_squad_v2,
        args.null_score_diff_threshold,
        tokenizer,
    )
    model.train()

    results = util.eval_dicts(gold_dict, pred_dict, use_squad_v2)
    results_list = [('F1', results['F1']), ('EM', results['EM'])]
    if use_squad_v2:
        results_list.append(('AvNA', results['AvNA']))
    results = OrderedDict(results_list)

    return results, pred_dict
def select_only(label, data):
    return filter(lambda x: x[4] == label,
                  data)

data = select_only(label, data)

def remove_label(data):
    return map(lambda x: x[:-1],
               data)

data = remove_label(data)

data = util.to_number(data)

data = util.to_list(data)

data = util.rescale(data)

# data
x = list(map(lambda x: x[features[0]],
             data))
y = list(map(lambda x: x[features[1]],
             data))

nullfmt = NullFormatter()         # no labels

# definitions for the axes
left, width = 0.1, 0.65
bottom, height = 0.1, 0.65
bottom_h = left_h = left + width + 0.02
import keras.layers as klay
import keras.backend as K
import keras.callbacks as kclbk

import util
import polylib
import skbasis as skb

_epsilon = 1.0e-14

## ========================================= ##
root_dir = "./Test_Collection/"
result_dir = root_dir + "CB_keras/"
paras = util.load_parameter(root_dir + "paras.txt")
# domain contains coordinates of element boundaries
domain = util.to_list(paras["pivot"])

depth = int(sys.argv[2])
width = int(sys.argv[3])
max_epoch = int(sys.argv[4])

# domain parameters
N_elem = len(domain) - 1  # number of elements
upper_bound = domain[-1]
lower_bound = domain[0]

# C_k continuity
CK = 0  # C^k continuity

LAMBDA_COEFF = 1.0  # lambda constant
aa = 3.0