def main():
    options, args = parse_options()

    # File paths of input IDL files are passed in a file, which is generated at
    # GN time. It is OK because the target IDL files are static.
    idl_files = read_file_to_list(options.idl_files_list)

    # Output IDL files (to generate) are passed at the command line, since
    # these are in the build directory, which is determined at build time, not
    # GN time.
    # These are passed as pairs of GlobalObjectName, global_object.idl
    interface_name_idl_filename = [(args[i], args[i + 1])
                                   for i in range(0, len(args), 2)]

    interface_name_to_global_names.update(read_pickle_file(options.global_objects_file))

    for idl_filename in idl_files:
        record_global_constructors(idl_filename)

    # Check for [Exposed] / [Global] mismatch.
    known_global_names = EXPOSED_EXECUTION_CONTEXT_METHOD.keys()
    exposed_global_names = frozenset(global_name_to_constructors)
    if not exposed_global_names.issubset(known_global_names):
        unknown_global_names = exposed_global_names.difference(known_global_names)
        raise ValueError('The following global names were used in '
                         '[Exposed=xxx] but do not match any global names: %s'
                         % list(unknown_global_names))

    # Write partial interfaces containing constructor attributes for each
    # global interface.
    for interface_name, idl_filename in interface_name_idl_filename:
        constructors = interface_name_to_constructors(interface_name)
        write_global_constructors_partial_interface(
            interface_name, idl_filename, constructors)
def main():
    options, args = parse_options()

    # Input IDL files are passed in a file, due to OS command line length
    # limits. This is generated at GYP time, which is ok b/c files are static.
    idl_files = read_file_to_list(options.idl_files_list)

    # Output IDL files (to generate) are passed at the command line, since
    # these are in the build directory, which is determined at build time, not
    # GYP time.
    # These are passed as pairs of GlobalObjectName, GlobalObject.idl
    interface_name_idl_filename = [(args[i], args[i + 1])
                                   for i in range(0, len(args), 2)]

    interface_name_to_global_names.update(read_pickle_file(options.global_objects_file))

    for idl_filename in idl_files:
        record_global_constructors(idl_filename)

    # Check for [Exposed] / [Global] mismatch.
    known_global_names = EXPOSED_EXECUTION_CONTEXT_METHOD.keys()
    exposed_global_names = frozenset(global_name_to_constructors)
    if not exposed_global_names.issubset(known_global_names):
        unknown_global_names = exposed_global_names.difference(known_global_names)
        raise ValueError('The following global names were used in '
                         '[Exposed=xxx] but do not match any [Global] / '
                         '[PrimaryGlobal] interface: %s'
                         % list(unknown_global_names))

    # Write partial interfaces containing constructor attributes for each
    # global interface.
    for interface_name, idl_filename in interface_name_idl_filename:
        constructors = interface_name_to_constructors(interface_name)
        write_global_constructors_partial_interface(
            interface_name, idl_filename, constructors)
Esempio n. 3
0
def main():
    options, _ = parse_options()

    # IDL files are passed in a file, due to OS command line length limits
    idl_files = read_idl_files_list_from_file(options.idl_files_list)

    # Compute information for individual files
    # Information is stored in global variables interfaces_info and
    # partial_interface_files.
    info_collector = InterfaceInfoCollector(options.cache_directory)
    for idl_filename in idl_files:
        info_collector.collect_info(idl_filename)

    write_pickle_file(options.interfaces_info_file,
                      info_collector.get_info_as_dict())
    runtime_enabled_features = read_pickle_file(
        options.runtime_enabled_features_file)
    write_pickle_file(
        options.component_info_file,
        info_collector.get_component_info_as_dict(runtime_enabled_features))
Esempio n. 4
0
def main():
    options, args = parse_options()

    # Input IDL files are passed in a file, due to OS command line length
    # limits. This is generated at GYP time, which is ok b/c files are static.
    idl_files = read_file_to_list(options.idl_files_list)

    # Output IDL files (to generate) are passed at the command line, since
    # these are in the build directory, which is determined at build time, not
    # GYP time.
    # These are passed as pairs of GlobalObjectName, GlobalObject.idl
    interface_name_idl_filename = [(args[i], args[i + 1])
                                   for i in range(0, len(args), 2)]

    interface_name_to_global_names.update(read_pickle_file(options.global_objects_file))

    for idl_filename in idl_files:
        record_global_constructors(idl_filename)

    # Check for [Exposed] / [Global] mismatch.
    known_global_names = EXPOSED_EXECUTION_CONTEXT_METHOD.keys()
    exposed_global_names = frozenset(global_name_to_constructors)
    if not exposed_global_names.issubset(known_global_names):
        unknown_global_names = exposed_global_names.difference(known_global_names)
        raise ValueError('The following global names were used in '
                         '[Exposed=xxx] but do not match any [Global] / '
                         '[PrimaryGlobal] interface: %s'
                         % list(unknown_global_names))

    # Write partial interfaces containing constructor attributes for each
    # global interface.
    for interface_name, idl_filename in interface_name_idl_filename:
        # Work around gyp's path relativization for this parameter that is not
        # a path, but gets interpreted as such.
        interface_name = os.path.basename(interface_name)
        constructors = interface_name_to_constructors(interface_name)
        write_global_constructors_partial_interface(
            interface_name, idl_filename, constructors)
Esempio n. 5
0
from underthesea import pos_tag, word_tokenize

from utilities import filter_stopword, read_pickle_file, BRANDS

BRAND = 'brand'
GENDER = 'gender'
COLOR = 'color'
SIZE = 'size'
MEN = 'nam'
WOMEN = 'nữ'
MONGODB_URI = os.getenv('MONGODB_URI')

client = MongoClient(MONGODB_URI)
collection = client['nlp']['test']

intents = read_pickle_file('data/intents.pkl')
# Load trained model
model = load_model('saved_models/model_1')
# Load bag of words and tags
bag_of_words = read_pickle_file('data/words.pkl')
tags = read_pickle_file('data/tags.pkl')

# Initialize LabelBinarizer
data_lb = LabelBinarizer()
data_lb.fit(bag_of_words)
label_lb = LabelBinarizer()
label_lb.fit(tags)


def classify_question(tokens):
    temp = [
Esempio n. 6
0
from underthesea import word_tokenize
import time
from functools import reduce
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelBinarizer
from tensorflow.keras.layers import Dense, Flatten, Dropout
from tensorflow.keras.losses import CategoricalCrossentropy
from tensorflow.keras.models import Sequential
from utilities import read_pickle_file

if __name__ == "__main__":
    data = []
    labels = read_pickle_file('data/labels.pkl')
    documents = read_pickle_file('data/documents.pkl')
    bag_of_words = read_pickle_file('data/words.pkl')
    tags = read_pickle_file('data/tags.pkl')

    data_lb = LabelBinarizer()
    data_lb.fit(bag_of_words)
    for doc in documents:
        temp = data_lb.transform(doc)
        temp = reduce(np.add, list(temp))
        data.append(temp)
    data = np.array(data)
    labels = np.array(labels)

    (train_data, test_data, train_labels,
     test_labels) = train_test_split(data, labels, test_size=0.2)

    # train_data = data