Exemplo n.º 1
0
def scrape_news_contents():
    checkpoint = read_file("data/scraped/cp/news-links-cp.txt")
    start = int(checkpoint[0])
    if start == 501:
        print("Status: Finished!")
        return

    urls = read_file("data/scraped/news-links.txt", start=start)
    contents = []
    for idx, url in enumerate(urls):
        start += 1
        print("Link [" + str(start) + "]: " + url)
        page = urlopen(url)
        soup = BeautifulSoup(page, 'html.parser')
        div = soup.find('div', {
            'class': 'field-item even',
            'property': 'content:encoded'
        })
        for child in div.findChildren():
            contents.append(child.getText())
        write_file("data/scraped/news-raw-nc.txt",
                   contents=contents,
                   per_line=True,
                   mode="a")
        contents = []
        endpoints = [str(start + 1)]

        write_file("data/scraped/cp/news-links-cp.txt",
                   contents=endpoints,
                   mode="w")
Exemplo n.º 2
0
def scrape_news_links():
    links = read_file('data/scraped/news-links.txt')
    if len(links) == 500:
        print("Status: Finished!\n")
        return

    url = "http://www.sunstar.com.ph/superbalita-cebu/balita"
    main_url = urlparse.urlparse(url).scheme + '://' + urlparse.urlparse(
        url).hostname
    stop_scraping_process = False
    i = 0
    limit = 500
    while i < limit and not stop_scraping_process:
        page = urlopen(url)
        soup = BeautifulSoup(page, 'html.parser')
        titles = soup.findAll('h3', {'class': 'title'})
        for title in titles:
            child = title.findChildren()[0]
            write_file("data/scraped/news-links.txt",
                       contents=[main_url + child.get('href')],
                       mode="a")
            print(main_url + child.get('href'))
            print("\n")
            i += 1
            if i == limit:
                break

        next_page = soup.find('a', {'title': 'Go to next page'})
        if next_page:
            url = main_url + next_page.get('href')
        else:
            stop_scraping_process = True
Exemplo n.º 3
0
def correct_ADV():
    contents = read_file('data/new-ADV.txt', strip=True, dict_format=True)
    entries = read_file('data/cebposdict-nc.txt', strip=True, dict_format=True)

    for key, value in contents.iteritems():
        if 'REM' in value:
            if key in entries:
                del entries[key]
        else:
            entries[key] = value

    if len(contents):
        write_file('data/cebposdict-nc.txt',
                   contents=[''],
                   add_newline=False,
                   mode='w')
        for key, value in sorted(entries.iteritems()):
            new_entry = [key + ' ']
            value = list(set(value))
            new_entry.append(' '.join(value))
            new_entry.append('\n')
            write_file('data/cebposdict-nc.txt',
                       contents=new_entry,
                       add_newline=False,
                       mode='a')
            new_entry = []
 def compile_and_write(self, idl_filename):
     interface_name = idl_filename_to_interface_name(idl_filename)
     definitions = self.reader.read_idl_definitions(idl_filename)
     output_code_list = self.code_generator.generate_code(
         definitions, interface_name)
     for output_path, output_code in output_code_list:
         write_file(output_code, output_path, self.only_if_changed)
def main():
    options = parse_options()

    idl_file_names = read_idl_files_list_from_file(
        options.idl_files_list, is_gyp_format=options.gyp_format_list)

    meta_data_list = extract_meta_data(idl_file_names)
    interface_names = [
        'V8%sPartial' % meta_data['name'] for meta_data in meta_data_list
    ]
    interface_names.sort()

    includes = [
        '#include "bindings/modules/v8/%s.h"' % interface_name
        for interface_name in interface_names
    ]
    initialize_calls = [
        '  %s::initialize();' % interface_name
        for interface_name in interface_names
    ]

    content = _INIT_PARTIAL_INTERFACE % (_COPYRIGHT, '\n'.join(includes),
                                         '\n'.join(initialize_calls))

    write_file(content, options.output)
Exemplo n.º 6
0
def main():
    # Check input
    if len(sys.argv) != 5:
        print"usage: python run_ML_FW.py [train file] [setting file] [model folder] [test data folder]"
        exit()
    # Get environment variables
    train_file = sys.argv[1]
    setting_file = sys.argv[2]
    model_folder = sys.argv[3]
    test_data_folder = sys.argv[4]
    tops = 10#int(sys.argv[5])    
    # Create model folder if it doesn't exist
    if os.path.exists(model_folder):
        shutil.rmtree(model_folder)
    os.makedirs(model_folder)
    # Read settings
    print'reading setting ...'
    ddict = utilities.read_setting(setting_file)
    print'write setting ...'
    file_name = '%s/setting.txt'%(model_folder)
    utilities.write_setting(ddict, file_name)
    # Read data for computing perplexities
    print'read data for computing perplexities ...'
    (wordids_1, wordcts_1, wordids_2, wordcts_2) = \
    utilities.read_data_for_perpl(test_data_folder)
    # Initialize the algorithm
    print'initialize the algorithm ...'
    ml_fw = ML_FW.MLFW(ddict['num_terms'], ddict['num_topics'], ddict['tau0'], ddict['kappa'], ddict['iter_infer'])
    # Start
    print'start!!!'
    i = 0
    while i < ddict['iter_train']:
        i += 1
        print'\n***iter_train:%d***\n'%(i)
        datafp = open(train_file, 'r')
        j = 0
        while True:
            j += 1
            (wordids, wordcts) = utilities.read_minibatch_list_frequencies(datafp, ddict['batch_size'])
            # Stop condition
            if len(wordids) == 0:
                break
            # 
            print'---num_minibatch:%d---'%(j)
            (time_e, time_m, theta) = ml_fw.static_online(ddict['batch_size'], wordids, wordcts)
            # Compute sparsity
            sparsity = utilities.compute_sparsity(theta, theta.shape[0], theta.shape[1], 't')
            # Compute perplexities
            LD2 = utilities.compute_perplexities_fw(ml_fw.beta, ddict['iter_infer'], \
                               wordids_1, wordcts_1, wordids_2, wordcts_2)
            # Search top words of each topics
            list_tops = utilities.list_top(ml_fw.beta, tops)
            # Write files
            utilities.write_file(i, j, ml_fw.beta, time_e, time_m, theta, sparsity, LD2, list_tops, tops, model_folder)
        datafp.close()
    # Write final model to file
    file_name = '%s/beta_final.dat'%(model_folder)
    utilities.write_topics(ml_fw.beta, file_name)
    # Finish
    print'done!!!'        
Exemplo n.º 7
0
 def generate_callback_function_impl(output_directory, component):
     generator = CodeGeneratorCallbackFunction(
         component_info_providers[component], cache_dir=None,
         output_dir=output_directory, target_component=component)
     outputs = generator.generate_code()
     for output_path, output_code in outputs:
         write_file(output_code, output_path)
Exemplo n.º 8
0
 def generate_union_type_containers(output_directory, component):
     generator = CodeGeneratorUnionType(
         component_info_providers[component], cache_dir=None,
         output_dir=output_directory, target_component=component)
     outputs = generator.generate_code()
     for output_path, output_code in outputs:
         write_file(output_code, output_path)
Exemplo n.º 9
0
def create_pygame_classes(GAME_TITLE, NEW_GAME_FOLDER, color_dict):
    """This method will create pygame classes for each dictionary object"""

    print("[newgame] Creating PyGame classes...")

    # new_lines contains the data we will be writing to the output folder
    new_lines = ["import pygame as pg\n", "from settings import *\n"]

    # append data to new_lines.  This data is gathered from the color_dict.
    for color in color_dict:
        new_lines.append("\n")
        new_lines.append(
            "class {}(pg.sprite.Sprite):\n".format(
                color_dict[color][0]))
        new_lines.append("    def __init__(self, game, x, y):\n")
        new_lines.append("        self.groups = game.background_sprites\n")
        new_lines.append(
            "        pg.sprite.Sprite.__init__(self, self.groups)\n")
        new_lines.append("        self.game = game\n")
        new_lines.append(
            "        self.image = game.{}_img\n".format(
                color_dict[color][0]))
        new_lines.append("        self.rect = self.image.get_rect()\n")
        new_lines.append("        self.x = x\n")
        new_lines.append("        self.y = y\n")
        new_lines.append("        self.rect.x = x * TILE_SIZE\n")
        new_lines.append("        self.rect.y = y * TILE_SIZE\n")

    # declare a file in our new game called "classes.py"
    CLASS_FILE = os.path.join(NEW_GAME_FOLDER, "classes.py")
    # write the content of new_lines to the CLASS_FILE
    utilities.write_file(new_lines, CLASS_FILE)
Exemplo n.º 10
0
    def _before_install_new_packages(self):
        """
        Specific actions before new packages installation
        """
        print "The installation of MySQL for db_governor has started"

        check_file("/usr/local/directadmin/custombuild/build")
        check_file("/usr/local/directadmin/custombuild/options.conf")

        # MYSQL_DA_TYPE=`cat /usr/local/directadmin/custombuild/options.conf | grep mysql_inst= | cut -d= -f2`
        try:
            MYSQL_DA_TYPE = grep("/usr/local/directadmin/custombuild/options.conf", "mysql_inst=")[0].split("=")[1]
        except IndexError:
            MYSQL_DA_TYPE = ""

        if os.path.exists("/usr/share/lve/dbgovernor/da.tp.old"):
            if MYSQL_DA_TYPE == "no":
                MYSQL_DA_TYPE = read_file("/usr/share/lve/dbgovernor/da.tp.old")
            else:
                write_file("/usr/share/lve/dbgovernor/da.tp.old", MYSQL_DA_TYPE)
        else:
            write_file("/usr/share/lve/dbgovernor/da.tp.old", MYSQL_DA_TYPE)

        exec_command_out("/usr/local/directadmin/custombuild/build set mysql_inst no")

        self._mysqlservice("stop")
Exemplo n.º 11
0
def generate_training_set(follow, followed, ratio, solution_file, data_file):
    """ Uses the solution file to generate training set to train
    the model, hoping this method can get better result.
    Ratio controls the fraction of pos and neg data sets, if ratio is -1,
    the fraction is the origion fraction."""

    raw_solution = utilities.read_file(solution_file, False)
    dict_solution = {}
    for i in range(len(raw_solution)):
        row = raw_solution[i]
        dict_solution[int(row[0])] = set(int(n) for n in row[1::])

    x_train = [['spring brother is a true man']]
    for node in dict_solution.keys():
        nodes_pos = dict_solution[node]
        for n in nodes_pos:
            features = rank.get_features(follow, followed, node, n)
            x_train.append([1] + features)

        nodes_neg = candidate.get_candidates(follow, followed, node)
        nodes_neg.difference_update(nodes_pos)
        nodes_neg = list(nodes_neg)
        perm = random.permutation(len(nodes_neg))
        if ratio != -1:
            num = min(int(len(nodes_pos) * ratio), len(nodes_neg))
        else:
            num = len(nodes_neg)
        for i in range(num):
            node = nodes_neg[perm[i]]
            features = rank.get_features(follow, followed, node, n)
            x_train.append([0] + features)

    utilities.write_file(data_file, x_train)
Exemplo n.º 12
0
def generate_conditional_features(info_provider, options, idl_filenames):
    reader = IdlReader(info_provider.interfaces_info, options.cache_directory)
    jinja_env = initialize_jinja_env(options.cache_directory)

    # Extract the bidirectional mapping of conditional features <-> interfaces
    # from the global info provider and the supplied list of IDL files.
    feature_info = conditional_features_info(info_provider,
                                             reader, idl_filenames,
                                             options.target_component.lower())

    # Convert that mapping into the context required for the Jinja2 templates.
    template_context = conditional_features_context(
        MODULE_PYNAME, feature_info)

    # Generate and write out the header file
    header_text = render_template(jinja_env.get_template(
        "ConditionalFeaturesFor%s.h.tmpl" % options.target_component.title()), template_context)
    header_path = posixpath.join(options.output_directory,
                                 "ConditionalFeaturesFor%s.h" % options.target_component.title())
    write_file(header_text, header_path)

    # Generate and write out the implementation file
    cpp_text = render_template(jinja_env.get_template(
        "ConditionalFeaturesFor%s.cpp.tmpl" % options.target_component.title()), template_context)
    cpp_path = posixpath.join(options.output_directory,
                              "ConditionalFeaturesFor%s.cpp" % options.target_component.title())
    write_file(cpp_text, cpp_path)
Exemplo n.º 13
0
def generate_training_set(follow, followed, ratio, solution_file, data_file):
    """ Uses the solution file to generate training set to train
    the model, hoping this method can get better result.
    Ratio controls the fraction of pos and neg data sets, if ratio is -1,
    the fraction is the origion fraction."""

    raw_solution = utilities.read_file(solution_file, False)
    dict_solution = {}
    for i in range(len(raw_solution)):
        row = raw_solution[i]
        dict_solution[int(row[0])] = set(int(n) for n in row[1 : :])

    x_train = [['spring brother is a true man']]
    for node in dict_solution.keys():
        nodes_pos = dict_solution[node]
        for n in nodes_pos:
            features = rank.get_features(follow, followed, node, n)
            x_train.append([1] + features)

        nodes_neg = candidate.get_candidates(follow, followed, node)
        nodes_neg.difference_update(nodes_pos)
        nodes_neg = list(nodes_neg)
        perm = random.permutation(len(nodes_neg))
        if ratio != -1:
            num = min(int(len(nodes_pos) * ratio), len(nodes_neg))
        else:
            num = len(nodes_neg)
        for i in range(num):
            node = nodes_neg[perm[i]]
            features = rank.get_features(follow, followed, node, n)
            x_train.append([0] + features)

    utilities.write_file(data_file, x_train)
Exemplo n.º 14
0
def generate_origin_trial_features(info_provider, options, idl_filenames):
    reader = IdlReader(info_provider.interfaces_info, options.cache_directory)
    jinja_env = initialize_jinja_env(options.cache_directory)

    # Extract the bidirectional mapping of conditional features <-> interfaces
    # from the global info provider and the supplied list of IDL files.
    feature_info = origin_trial_features_info(info_provider, reader,
                                              idl_filenames,
                                              options.target_component)

    # Convert that mapping into the context required for the Jinja2 templates.
    template_context = origin_trial_features_context(MODULE_PYNAME,
                                                     feature_info)

    file_basename = 'origin_trial_features_for_%s' % options.target_component

    # Generate and write out the header file
    header_text = render_template(
        jinja_env.get_template(file_basename + '.h.tmpl'), template_context)
    header_path = posixpath.join(options.output_directory,
                                 file_basename + '.h')
    write_file(header_text, header_path)

    # Generate and write out the implementation file
    cpp_text = render_template(
        jinja_env.get_template(file_basename + '.cc.tmpl'), template_context)
    cpp_path = posixpath.join(options.output_directory, file_basename + '.cc')
    write_file(cpp_text, cpp_path)
Exemplo n.º 15
0
def generate_conditional_features(info_provider, options, idl_filenames):
    reader = IdlReader(info_provider.interfaces_info, options.cache_directory)
    jinja_env = initialize_jinja_env(options.cache_directory)

    # Extract the bidirectional mapping of conditional features <-> interfaces
    # from the global info provider and the supplied list of IDL files.
    feature_info = conditional_features_info(info_provider, reader,
                                             idl_filenames,
                                             options.target_component.lower())

    # Convert that mapping into the context required for the Jinja2 templates.
    template_context = conditional_features_context(MODULE_PYNAME,
                                                    feature_info)

    # Generate and write out the header file
    header_text = render_template(
        jinja_env.get_template("ConditionalFeaturesFor%s.h.tmpl" %
                               options.target_component.title()),
        template_context)
    header_path = posixpath.join(
        options.output_directory,
        "ConditionalFeaturesFor%s.h" % options.target_component.title())
    write_file(header_text, header_path)

    # Generate and write out the implementation file
    cpp_text = render_template(
        jinja_env.get_template("ConditionalFeaturesFor%s.cpp.tmpl" %
                               options.target_component.title()),
        template_context)
    cpp_path = posixpath.join(
        options.output_directory,
        "ConditionalFeaturesFor%s.cpp" % options.target_component.title())
    write_file(cpp_text, cpp_path)
Exemplo n.º 16
0
def write_event_interfaces_file(event_idl_files, destination_filename, only_if_changed):
    def extended_attribute_string(name, value):
        if name == 'RuntimeEnabled':
            value += 'Enabled'
        return name + '=' + value

    def interface_line(full_path):
        relative_path_local, _ = os.path.splitext(os.path.relpath(full_path, source_dir))
        relative_path_posix = relative_path_local.replace(os.sep, posixpath.sep)

        idl_file_contents = get_file_contents(full_path)
        extended_attributes = get_interface_extended_attributes_from_idl(idl_file_contents)
        extended_attributes_list = [
            extended_attribute_string(name, extended_attributes[name])
            for name in EXPORTED_EXTENDED_ATTRIBUTES
            if name in extended_attributes]

        return '%s %s\n' % (relative_path_posix,
                            ', '.join(extended_attributes_list))

    lines = ['namespace="Event"\n',
             '\n']
    interface_lines = [interface_line(event_idl_file)
                       for event_idl_file in event_idl_files]
    interface_lines.sort()
    lines.extend(interface_lines)
    write_file(''.join(lines), destination_filename, only_if_changed)
Exemplo n.º 17
0
 def compile_and_write(self, idl_filename):
     interface_name = idl_filename_to_interface_name(idl_filename)
     definitions = self.reader.read_idl_definitions(idl_filename)
     output_code_list = self.code_generator.generate_code(
         definitions, interface_name)
     for output_path, output_code in output_code_list:
         write_file(output_code, output_path, self.only_if_changed)
Exemplo n.º 18
0
def resolve_equals():
    write_file('data/cebposdict-4.txt',
               contents=[''],
               no_encode=True,
               add_newline=False,
               mode='w')
    entries = read_file('data/cebposdict-3.txt', dict_format=True)
    result = []
    for key, value in entries.iteritems():
        words = nltk.word_tokenize(" ".join(value))
        new_entry = [key + ' ']
        related_words = []
        for word in words:
            if word in ['PART', 'ADJ', 'PRON', 'VERB', 'NOUN', 'NUM']:
                new_entry.append(word + ' ')
            elif word != '=':
                related_words.append(word)

        for rel_word in related_words:
            if rel_word in entries:
                values = entries[rel_word]
                words = nltk.word_tokenize(" ".join(value))
                # words = list(Text(" ".join(values)).words)
                for word in words:
                    if word in ['PART', 'ADJ', 'PRON', 'VERB', 'NOUN', 'NUM']:
                        new_entry.append(word + ' ')

        new_entry.append('\n')
        write_file('data/cebposdict-4.txt',
                   contents=new_entry,
                   add_newline=False,
                   mode='a')
        new_entry = []

    print('resolve_equals: Finished!')
Exemplo n.º 19
0
def save_shopping_articles_from_json_to_file(articles: dict):
    path = os.getcwd()
    path = os.path.join(path, "data/")
    file = "shopping_articles.txt"
    content = json.dumps(articles, indent = 4)
    utils.write_file(base_path=path, filename=file, content=content)
    logging.debug("File successfully written to system.")
    return None
def generate_union_type_containers(code_generator_class, info_provider,
                                   options):
    generator = code_generator_class(info_provider, options.cache_directory,
                                     options.output_directory,
                                     options.target_component)
    output_code_list = generator.generate_code()
    for output_path, output_code in output_code_list:
        write_file(output_code, output_path)
Exemplo n.º 21
0
 def compile_and_write(self, idl_filename):
     interface_name = idl_filename_to_interface_name(idl_filename)
     definitions = self.reader.read_idl_definitions(idl_filename)
     target_definitions = definitions[self.target_component]
     output_code_list = self.code_generator.generate_code(
         target_definitions, interface_name)
     for output_path, output_code in output_code_list:
         write_file(output_code, output_path)
Exemplo n.º 22
0
 def generate_union_type_containers(output_directory, component):
     generator = CodeGeneratorUnionType(component_info_providers[component],
                                        cache_dir=None,
                                        output_dir=output_directory,
                                        target_component=component)
     outputs = generator.generate_code()
     for output_path, output_code in outputs:
         write_file(output_code, output_path)
Exemplo n.º 23
0
 def _save_previous_version(self):
     """
     Save current installed mysql version to cache file.
     It will be previous version after new installation.
     """
     version = mysql_version()
     if version:
         write_file(self.CACHE_VERSION_FILE, version)
Exemplo n.º 24
0
 def compile_and_write(self, idl_filename):
     interface_name = idl_filename_to_interface_name(idl_filename)
     definitions = self.reader.read_idl_definitions(idl_filename)
     target_definitions = definitions[self.target_component]
     output_code_list = self.code_generator.generate_code(
         target_definitions, interface_name)
     for output_path, output_code in output_code_list:
         write_file(output_code, output_path)
Exemplo n.º 25
0
 def _save_previous_version(self):
     """
     Save current installed mysql version to cache file.
     It will be previous version after new installation.
     """
     version = mysql_version()
     if version:
         write_file(self.CACHE_VERSION_FILE, version)
def generate_callback_function_impl(code_generator_class, info_provider,
                                    options):
    generator = code_generator_class(info_provider, options.cache_directory,
                                     options.output_directory,
                                     options.target_component)
    output_code_list = generator.generate_code()
    for output_path, output_code in output_code_list:
        write_file(output_code, output_path)
Exemplo n.º 27
0
 def generate_callback_function_impl(output_directory, component):
     generator = CodeGeneratorCallbackFunction(
         component_info_providers[component],
         cache_dir=None,
         output_dir=output_directory,
         target_component=component)
     outputs = generator.generate_code()
     for output_path, output_code in outputs:
         write_file(output_code, output_path)
Exemplo n.º 28
0
    def compile_file(self, idl_filename):
        definitions = self.reader.read_idl_definitions(idl_filename)

        files = self.code_generator.generate_code(definitions)

        for file_data in files:
            file_name = os.path.join(self.output_directory,
                                     PREFIX + file_data['file_name'])
            write_file(file_data['contents'], file_name)
Exemplo n.º 29
0
    def compile_file(self, idl_filename):
        definitions = self.reader.read_idl_definitions(idl_filename)

        files = self.code_generator.generate_code(definitions)

        for file_data in files:
            file_name = os.path.join(self.output_directory,
                                     file_data['file_name'])
            write_file(file_data['contents'], file_name)
Exemplo n.º 30
0
def generate_callback_function_impl(code_generator_class, info_provider,
                                    options):
    generator = code_generator_class(
        info_provider,
        options.cache_directory,
        options.output_directory,
        options.target_component)
    output_code_list = generator.generate_code()
    for output_path, output_code in output_code_list:
        write_file(output_code, output_path)
Exemplo n.º 31
0
def generate_union_type_containers(code_generator_class, info_provider,
                                   options):
    generator = code_generator_class(
        info_provider,
        options.cache_directory,
        options.output_directory,
        options.target_component)
    output_code_list = generator.generate_code()
    for output_path, output_code in output_code_list:
        write_file(output_code, output_path)
Exemplo n.º 32
0
def generate_callback_function_impl(options):
    info_provider = create_component_info_provider(options.info_dir,
                                                   options.target_component)
    generator = CodeGeneratorCallbackFunction(info_provider,
                                              options.cache_directory,
                                              options.output_directory,
                                              options.target_component)
    output_code_list = generator.generate_code()
    for output_path, output_code in output_code_list:
        write_file(output_code, output_path)
Exemplo n.º 33
0
def write_global_constructors_partial_interface(interface_name, idl_filename, constructor_attributes_list):
    # FIXME: replace this with a simple Jinja template
    lines = (['partial interface %s {\n' % interface_name] +
             ['    %s;\n' % constructor_attribute
              # FIXME: sort by interface name (not first by extended attributes)
              for constructor_attribute in sorted(constructor_attributes_list)] +
             ['};\n'])
    write_file(''.join(lines), idl_filename)
    header_filename = os.path.splitext(idl_filename)[0] + '.h'
    idl_basename = os.path.basename(idl_filename)
    write_file(HEADER_FORMAT.format(idl_basename=idl_basename), header_filename)
Exemplo n.º 34
0
def write_global_constructors_partial_interface(interface_name, idl_filename, constructor_attributes_list):
    # FIXME: replace this with a simple Jinja template
    lines = (['partial interface %s {\n' % interface_name] +
             ['    %s;\n' % constructor_attribute
              # FIXME: sort by interface name (not first by extended attributes)
              for constructor_attribute in sorted(constructor_attributes_list)] +
             ['};\n'])
    write_file(''.join(lines), idl_filename)
    header_filename = os.path.splitext(idl_filename)[0] + '.h'
    idl_basename = os.path.basename(idl_filename)
    write_file(HEADER_FORMAT.format(idl_basename=idl_basename), header_filename)
Exemplo n.º 35
0
 def run(self):
     # Initialize the algorithm
     print 'initialize the algorithm ...'
     online_vb = Online_VB.OnlineVB(
         self.settings['num_docs'], self.settings['num_terms'],
         self.settings['num_topics'], self.settings['alpha'],
         self.settings['eta'], self.settings['tau0'],
         self.settings['kappa'], self.settings['conv_infer'],
         self.settings['iter_infer'])
     # Start
     print 'start!!!'
     i = 0
     while i < self.settings['iter_train']:
         i += 1
         print '\n***iter_train:%d***\n' % (i)
         datafp = open(self.train_file, 'r')
         j = 0
         while True:
             j += 1
             (wordids, wordcts) = utilities.read_minibatch_list_frequencies(
                 datafp, self.settings['batch_size'])
             # Stop condition
             if len(wordids) == 0:
                 break
             #
             print '---num_minibatch:%d---' % (j)
             (time_e, time_m,
              theta) = online_vb.static_online(self.settings['batch_size'],
                                               wordids, wordcts)
             # Compute sparsity
             sparsity = utilities.compute_sparsity(theta, theta.shape[0],
                                                   theta.shape[1], 't')
             # Compute perplexities
             LD2 = utilities.compute_perplexities_vb(
                 online_vb._lambda, self.settings['alpha'],
                 self.settings['eta'], self.settings['iter_infer'],
                 self.test_data)
             # Search top words of each topics
             list_tops = utilities.list_top(online_vb._lambda, self.tops)
             # Write files
             utilities.write_file(i, j, online_vb._lambda, time_e, time_m,
                                  theta, sparsity, LD2, list_tops,
                                  self.tops, self.model_folder)
         datafp.close()
     # Write settings
     print 'write setting ...'
     file_name = '%s/setting.txt' % (self.model_folder)
     utilities.write_setting(self.settings, file_name)
     # Write final model to file
     print 'write final model ...'
     file_name = '%s/beta_final.dat' % (self.model_folder)
     utilities.write_topics(online_vb._lambda, file_name)
     # Finish
     print 'done!!!'
Exemplo n.º 36
0
def write_global_constructors_partial_interface(interface_name, destination_filename, constructor_attributes_list, only_if_changed):
    # FIXME: replace this with a simple Jinja template
    lines = (['[\n'] +
             ['    NoHeader,\n'] +
             [']\n'] +
             ['partial interface %s {\n' % interface_name] +
             ['    %s;\n' % constructor_attribute
              # FIXME: sort by interface name (not first by extended attributes)
              for constructor_attribute in sorted(constructor_attributes_list)] +
             ['};\n'])
    write_file(''.join(lines), destination_filename, only_if_changed)
Exemplo n.º 37
0
    def compile_and_write(self, idl_filename, output_filenames):
        interface_name = idl_filename_to_interface_name(idl_filename)
        idl_pickle_filename = os.path.join(
            self.output_directory, '%s_globals.pickle' % interface_name)
        definitions = self.reader.read_idl_definitions(idl_filename)
        output_code_list = self.code_generator.generate_code(
            definitions, interface_name, idl_filename, idl_pickle_filename,
            self.only_if_changed)

        for output_code, output_filename in zip(output_code_list,
                                                output_filenames):
            write_file(output_code, output_filename, self.only_if_changed)
Exemplo n.º 38
0
    def set_mysql_version(self, version):
        """
        Set new mysql version for next install
        """
        # check available versions
        versions = ["auto"] + self.REPO_NAMES.keys()
        if version not in versions:
            print >> sys.stderr, "Invalid mysql version."
            print >> sys.stderr, "Available versions: %s" % ", ".join(versions)
            sys.exit(2)

        write_file(self.NEW_VERSION_FILE, version)
Exemplo n.º 39
0
    def compile_and_write(self, idl_filename, output_filenames):
        interface_name = idl_filename_to_interface_name(idl_filename)
        idl_pickle_filename = os.path.join(self.output_directory,
                                           '%s_globals.pickle' % interface_name)
        definitions = self.reader.read_idl_definitions(idl_filename)
        output_code_list = self.code_generator.generate_code(definitions,
                                                             interface_name,
                                                             idl_pickle_filename,
                                                             self.only_if_changed)

        for output_code, output_filename in zip(output_code_list, output_filenames):
            write_file(output_code, output_filename, self.only_if_changed)
Exemplo n.º 40
0
def bi_freq():
    freq1 = 1000
    freq2 = 1550
    magn = {}
    data = util.generate_frequency([freq1, freq2])

    for freq in range(600, 1400, 1):
        magn1 = rec.goetzl([data], freq)
        magn[freq] = [magn1]

        print(str(freq) + ":" + str(magn1))
    util.write_file(magn, "bifreq100_155.csv")
Exemplo n.º 41
0
    def update_conan_config_yml(self, conan_approvaltests_dir,
                                new_version_without_v):
        conan_data_file = os.path.join(conan_approvaltests_dir, 'config.yml')
        conandata_yml_text = read_file(conan_data_file)

        conan_data = \
F'''  {new_version_without_v}:
    folder: all
'''
        conandata_yml_text += conan_data

        write_file(conan_data_file, conandata_yml_text)
Exemplo n.º 42
0
    def set_mysql_version(self, version):
        """
        Set new mysql version for next install
        """
        # check available versions
        versions = ["auto"] + self.REPO_NAMES.keys()
        if version not in versions:
            print >> sys.stderr, "Invalid mysql version."
            print >> sys.stderr, "Available versions: %s" % ", ".join(versions)
            sys.exit(2)

        write_file(self.NEW_VERSION_FILE, version)
Exemplo n.º 43
0
def generate_union_type_containers(options):
    info_provider = create_component_info_provider(options.info_dir,
                                                   options.target_component)
    if not info_provider.interfaces_info:
        raise Exception('Interfaces info is required to generate '
                        'union types containers')
    generator = CodeGeneratorUnionType(info_provider, options.cache_directory,
                                       options.output_directory,
                                       options.target_component)
    output_code_list = generator.generate_code()
    for output_path, output_code in output_code_list:
        write_file(output_code, output_path)
Exemplo n.º 44
0
def new_round() -> str:
    # returns true if there will be a new round and false if only one player remains
    bomb_dict = util.load_file(file_string)
    if len(bomb_dict) == 2:
        winner = get_players[0]
        points.change_points(winner, 300, '+')
        return f'{winner} is the last man standing and has won 300 points!'
    else:
        bomb_dict = {x: 0 for x in bomb_dict}  # resets all values to 0
        bomb_dict['bad_wire'] = util.rng(1, len(bomb_dict) - 1)
        util.write_file(file_string, bomb_dict)
        return f'{get_players[active_player_pos]}, you\'re up next. Cut one of these wires with !bomb cut: {bomb_squad.get_avail_wires()}'
Exemplo n.º 45
0
def generate_test_set(follow, followed, test_file, validation_file,
    solution_file, num,  max_remove_num):
    """ Generates the test set for analysis. """

    nodes_exclude = utilities.read_nodes_list(test_file)

    print 'Generating test nodes...'
    nodes_test = generate_test_nodes(follow, nodes_exclude, num)
    writable_nodes_test = [[n] for n in nodes_test]
    solution = generate_solution(follow, followed, nodes_test, max_remove_num)

    utilities.write_file(validation_file, writable_nodes_test)
    utilities.write_file(solution_file, solution)
    def compile_and_write(self, idl_filename):
        definitions = self.reader.read_idl_definitions(idl_filename)
        target_definitions = definitions[self.target_component]
        interface_name = target_definitions.first_name
        output_code_list = self.code_generator.generate_code(
            target_definitions, interface_name)

        # Generator may choose to omit the file.
        if output_code_list is None:
            return

        for output_path, output_code in output_code_list:
            write_file(output_code, output_path)
def main():
    opts, _ = parse_args()
    # TODO(peria): get rid of |info_provider|
    info_provider = create_component_info_provider(
        opts.info_dir, opts.target_component)
    generator = ExternalReferenceTableGenerator(opts, info_provider)

    idl_files = utilities.read_idl_files_list_from_file(opts.idl_files_list, False)
    for idl_file in idl_files:
        generator.process_idl_file(idl_file)
    output_code = generator.generate()
    output_path = opts.output
    write_file(output_code, output_path)
Exemplo n.º 48
0
def generate_union_type_containers(options):
    info_provider = create_component_info_provider(
        options.info_dir, options.target_component)
    if not info_provider.interfaces_info:
        raise Exception('Interfaces info is required to generate '
                        'union types containers')
    generator = CodeGeneratorUnionType(
        info_provider,
        options.cache_directory,
        options.output_directory,
        options.target_component)
    output_code_list = generator.generate_code()
    for output_path, output_code in output_code_list:
        write_file(output_code, output_path, options.write_file_only_if_changed)
Exemplo n.º 49
0
 def _get_mysqlup():
     """
     ? Set value for panel update MYSQLUP option
     """
     if os.path.exists("/etc/cpupdate.conf"):
         shutil.copy2("/etc/cpupdate.conf", "/etc/cpupdate.conf.governor")
         is_mysqlup = grep("/etc/cpupdate.conf", "MYSQLUP")
         if is_mysqlup:
             if not grep(is_mysqlup, "never$", True):
                 replace_lines("/etc/cpupdate.conf", "".join(is_mysqlup),
                               "MYSQLUP=never")
         else:
             add_line("/etc/cpupdate.conf", "\nMYSQLUP=never\n")
     else:
         write_file("/etc/cpupdate.conf.governor", "")
         write_file("/etc/cpupdate.conf", "MYSQLUP=never\n")
Exemplo n.º 50
0
def generate_union_type_containers(options):
    if not (options.interfaces_info_file and options.component_info_file):
        raise Exception('Interfaces info is required to generate '
                        'union types containers')
    with open(options.interfaces_info_file) as interfaces_info_file:
        interfaces_info = pickle.load(interfaces_info_file)
    with open(options.component_info_file) as component_info_file:
        component_info = pickle.load(component_info_file)
    generator = CodeGeneratorUnionType(
        interfaces_info,
        options.cache_directory,
        options.output_directory,
        options.target_component)
    output_code_list = generator.generate_code(component_info['union_types'])
    for output_path, output_code in output_code_list:
        write_file(output_code, output_path, options.write_file_only_if_changed)
Exemplo n.º 51
0
def generateFullMap(dimension, tile_size):
    """Generates a fully open map with walls bordering it."""
    map_data = []
    row_max = dimension["width"]//tile_size["width"]
    column_max = dimension["height"]//tile_size["height"]
    for row_index in range(row_max):
        row_data = []
        for column_index in range(column_max):
            if row_index == 0 or row_index == row_max - 1:
                row_data.append("0")
            elif column_index == 0 or column_index == column_max - 1:
                row_data.append("0")
            else:
                row_data.append("1")
        map_data.append("".join(row_data))
    write_file("__new__.map", "\n".join(map_data))
    print "Map generated to __new__.map"
Exemplo n.º 52
0
def write_event_interfaces_file(event_idl_files, destination_filename, suffix):
    def interface_line(full_path):
        relative_path_local, _ = os.path.splitext(os.path.relpath(full_path, source_dir))
        relative_path_posix = relative_path_local.replace(os.sep, posixpath.sep)

        idl_file_contents = get_file_contents(full_path)
        extended_attributes = get_interface_extended_attributes_from_idl(idl_file_contents)
        extended_attributes_list = [
            (name, extended_attributes[name])
            for name in EXPORTED_EXTENDED_ATTRIBUTES
            if name in extended_attributes]

        return (relative_path_posix, extended_attributes_list)

    lines = [
        '{',
        'metadata: {',
        '  namespace: "Event",'
    ]
    if suffix:
        lines.append('  suffix: "' + suffix + '",')
        lines.append('  export: "%s_EXPORT",' % suffix.upper())
    else:
        lines.append('  export: "CORE_EXPORT",')
    lines.extend([
        '},',
        'data: ['
    ])
    interface_lines = [interface_line(event_idl_file)
                       for event_idl_file in event_idl_files]
    interface_lines.sort()
    for name, attributes in interface_lines:
        lines.extend([
            '  {',
            '    name: "%s",' % name
        ])
        for param, value in attributes:
            if param == 'RuntimeEnabled':
                value += 'Enabled'
            lines.append('    %s: "%s",' % (param, value))
        lines.append('  },')
    lines.extend([
        ']',
        '}'
    ])
    write_file('\n'.join(lines), destination_filename)
def write_global_constructors_partial_interface(
    interface_name, idl_filename, constructor_attributes_list, only_if_changed
):
    # FIXME: replace this with a simple Jinja template
    lines = (
        ["partial interface %s {\n" % interface_name]
        + [
            "    %s;\n" % constructor_attribute
            # FIXME: sort by interface name (not first by extended attributes)
            for constructor_attribute in sorted(constructor_attributes_list)
        ]
        + ["};\n"]
    )
    write_file("".join(lines), idl_filename, only_if_changed)
    header_filename = os.path.splitext(idl_filename)[0] + ".h"
    idl_basename = os.path.basename(idl_filename)
    write_file(HEADER_FORMAT.format(idl_basename=idl_basename), header_filename, only_if_changed)
Exemplo n.º 54
0
 def run(self):
     # Initialize the algorithm
     print'initialize the algorithm ...'
     new2_online_ope = New2Online_OPE.New2OnlineOPE(self.settings['num_docs'], self.settings['num_terms'], 
                                       self.settings['num_topics'], self.settings['alpha'],
                                       self.settings['eta'], self.settings['tau0'], 
                                       self.settings['kappa'], self.settings['iter_infer'])
     # Start
     print'start!!!'
     i = 0
     while i < self.settings['iter_train']:
         i += 1
         print'\n***iter_train:%d***\n'%(i)
         datafp = open(self.train_file, 'r')
         j = 0
         while True:
             j += 1
             (wordids, wordcts) = utilities.read_minibatch_list_frequencies(datafp, self.settings['batch_size'])
             # Stop condition
             if len(wordids) == 0:
                 break
             # 
             print'---num_minibatch:%d---'%(j)
             (time_e, time_m, theta) = new2_online_ope.static_online(wordids, wordcts)
             # Compute sparsity
             sparsity = utilities.compute_sparsity(theta, theta.shape[0], theta.shape[1], 't')
             # Compute perplexities
             LD2 = utilities.compute_perplexities_vb(new2_online_ope._lambda, self.settings['alpha'], self.settings['eta'], 
                                                     self.settings['iter_infer'], self.test_data)
             # Search top words of each topics
             list_tops = utilities.list_top(new2_online_ope._lambda, self.tops)
             # Write files
             utilities.write_file(i, j, new2_online_ope._lambda, time_e, time_m, theta, sparsity, LD2, list_tops, self.tops, 
                                  self.model_folder)
         datafp.close()
     # Write settings
     print'write setting ...'
     file_name = '%s/setting.txt'%(self.model_folder)
     utilities.write_setting(self.settings, file_name)
     # Write final model to file
     print'write final model ...'
     file_name = '%s/beta_final.dat'%(self.model_folder)
     utilities.write_topics(new2_online_ope._lambda, file_name)
     # Finish
     print'done!!!'
Exemplo n.º 55
0
def baseline(training_file, submission_file, output_file):
    data = utilities.read_file(training_file)
    sub_data = utilities.read_file(submission_file, True)

    print 'Calculating hour averages...'
    hour_avg_by_chunk = utilities.get_hour_avg_by_chunk(data)
    hour_avg = utilities.get_hour_avg(data)

    print 'Filling submission file...'
    for i in range(1, len(sub_data)):
        chunk_id = sub_data[i][1]
        hour = sub_data[i][3]
        for j in range(5, len(sub_data[i])):
            if sub_data[i][j] == '0':
                if chunk_id in hour_avg_by_chunk:
                    sub_data[i][j] = hour_avg_by_chunk[chunk_id][hour][j - 5]
                else:
                    sub_data[i][j] = hour_avg[hour][j - 5]

    utilities.write_file(output_file, sub_data)
def main():
    options = parse_options()

    idl_file_names = read_idl_files_list_from_file(options.idl_files_list, is_gyp_format=options.gyp_format_list)

    meta_data_list = extract_meta_data(idl_file_names)
    interface_names = ['V8%sPartial' % meta_data['name']
                       for meta_data in meta_data_list]
    interface_names.sort()

    includes = ['#include "bindings/modules/v8/%s.h"' % interface_name
                for interface_name in interface_names]
    initialize_calls = ['    %s::initialize();' % interface_name
                        for interface_name in interface_names]

    content = _INIT_PARTIAL_INTERFACE % (
        _COPYRIGHT,
        '\n'.join(includes),
        '\n'.join(initialize_calls))

    write_file(content, options.output)
Exemplo n.º 57
0
def write_event_interfaces_file(interfaces_info, destination_filename, only_if_changed):
    # Event interfaces are interfaces that inherit from Event, and Event itself
    event_interfaces = set(
        interface_name
        for interface_name, interface_info in interfaces_info.iteritems()
        if (interface_name == 'Event' or
            (interface_info['ancestors'] and
             interface_info['ancestors'][-1] == 'Event')))

    def extended_attribute_string(name, value):
        if name == 'RuntimeEnabled':
            value += 'Enabled'
        return name + '=' + value

    def interface_line(interface_name):
        full_path = interfaces_info[interface_name]['full_path']

        relative_path_local, _ = os.path.splitext(os.path.relpath(full_path, source_dir))
        relative_path_posix = relative_path_local.replace(os.sep, posixpath.sep)

        idl_file_contents = get_file_contents(full_path)
        extended_attributes = get_interface_extended_attributes_from_idl(idl_file_contents)
        extended_attributes_list = [
            extended_attribute_string(name, extended_attributes[name])
            for name in EXPORTED_EXTENDED_ATTRIBUTES
            if name in extended_attributes]

        return '%s %s\n' % (relative_path_posix,
                            ', '.join(extended_attributes_list))

    lines = ['namespace="Event"\n',
             '\n']
    interface_lines = [interface_line(interface_name)
                       for interface_name in event_interfaces]
    interface_lines.sort()
    lines.extend(interface_lines)
    write_file(''.join(lines), destination_filename, only_if_changed)
Exemplo n.º 58
0
def avg(training_file, submission_file, output_file):
    data = utilities.read_file(training_file)

    train_data, cv_data = preprocess.get_train_cv_data_by_chunk(data)
    targets_train, targets_cv = preprocess.get_train_cv_targets(
        train_data, cv_data)

    (chunk_avg, hour_avg_by_chunk, weekday_avg_by_chunk,
     hour_avg, weekday_avg) = feature_extraction.get_avg_maps(train_data)

    x_train_all, x_cv_all = feature_extraction.get_x_by_avg(
            train_data, cv_data, chunk_avg, hour_avg_by_chunk,
             weekday_avg_by_chunk, hour_avg, weekday_avg)

    clfs = regression.linear_regression(
        x_train_all, x_cv_all, targets_train, targets_cv)
    clfs = regression.random_forest(
        x_train_all, x_cv_all, targets_train, targets_cv)

    print 'Filling submission file...'
    sub_data = utilities.read_file(submission_file, True)
    for i in range(1, len(sub_data)):
        chunk_id = sub_data[i][1]
        hour = sub_data[i][3]
        weekday = ''
        all_features = feature_extraction.get_features(
            chunk_id, weekday, hour, chunk_avg, hour_avg_by_chunk,
            weekday_avg_by_chunk, hour_avg, weekday_avg)

        for j in range(5, len(sub_data[i])):
            if sub_data[i][j] == '0':
                feature = []
                for f in all_features:
                    feature.append(f[j - 5])
                sub_data[i][j] = clfs[j - 5].predict([feature])[0]

    utilities.write_file(output_file, sub_data)
Exemplo n.º 59
0
 def generate_dart_blink_and_write(self, global_entries, output_filename):
     output_code = self.code_generator.generate_dart_blink(global_entries)
     write_file(output_code, output_filename, self.only_if_changed)