def __init__(self, database): self._web_handlers = {} for c in get_all_classes(["web_handlers.py"], WebHandler): obj = c(database, cache) self._web_handlers[obj.url] = obj self._web_server = Flask("web_server") self._register(database)
def __init__(self, database): self._web_handlers = {} for c in get_all_classes(["web_handlers.py"], WebHandler): obj = c(database) self._web_handlers[obj.url] = obj self._web_server = Flask("web_server") self._register(database)
def __init__(self, database): self._database = database self._articles = database.get_collection("article") self._database_writers = {} for c in get_all_classes(["database_writers.py"], DatabaseWriter): obj = c(database) self._database_writers[obj.flag] = obj self._file_path = ""
def __init__(self): self._markdown_parser = MarkdownParser() self._meta_parsers = {} for c in get_all_classes(["meta_parsers.py"], MetaDataParser): obj = c() self._meta_parsers[obj.flag] = obj self._file_path = ""
def train_model(train_file, classifier, batch_size): all_classes = get_all_classes(train_file) col_label, col_description = get_column_position(form=1) with open(train_file, "r") as input_file: first_line = input_file.readline().strip() entry = literal_eval(first_line) vector_size = len(entry[col_description]) input_file.seek(0) # reset the file pointer after reading first line y_train = [] X_train = np.full(shape=(batch_size, vector_size), fill_value=0, dtype=float) line_count = 0 idx = 0 start_time = time.time() for line in input_file: line_count += 1 entry = literal_eval(line) y_train.append(entry[col_label]) X_train[idx, :] = entry[col_description] if line_count % batch_size == 0: update_model(classifier, X_train, y_train, all_classes) print "training model for lines = ", line_count, 'time=', int( time.time() - start_time), 's' print "precision score", classifier.score(X_train, y_train) del y_train, X_train y_train = [] X_train = np.full(shape=(batch_size, vector_size), fill_value=0, dtype=float) idx = -1 idx += 1 if line_count % batch_size > 1: X_train = X_train[:idx, :] update_model(classifier, X_train, y_train, all_classes) print "training model for lines = ", line_count, 'time=', int( time.time() - start_time), 's' print "precision score", classifier.score(X_train, y_train)
def train_model(train_file, classifier, batch_size): all_classes = get_all_classes(train_file) col_label, col_description = get_column_position(form=1) with open(train_file, "r") as input_file: first_line = input_file.readline().strip() entry = literal_eval(first_line) vector_size = len(entry[col_description]) input_file.seek(0) # reset the file pointer after reading first line y_train = [] X_train = np.full(shape=(batch_size, vector_size), fill_value=0, dtype=float) line_count = 0 idx = 0 start_time = time.time() for line in input_file: line_count += 1 entry = literal_eval(line) y_train.append(entry[col_label]) X_train[idx, :] = entry[col_description] if line_count % batch_size == 0: update_model(classifier, X_train, y_train, all_classes) print "training model for lines = ", line_count, 'time=', int(time.time() - start_time), 's' print "precision score", classifier.score(X_train, y_train) del y_train, X_train y_train = [] X_train = np.full(shape=(batch_size, vector_size), fill_value=0, dtype=float) idx = -1 idx += 1 if line_count % batch_size > 1: X_train = X_train[:idx, :] update_model(classifier, X_train, y_train, all_classes) print "training model for lines = ", line_count, 'time=', int(time.time() - start_time), 's' print "precision score", classifier.score(X_train, y_train)
def __init__(self): self._web_caches = {} for c in get_all_classes(["web_caches.py"], WebCache): obj = c() self._web_caches[obj.flag] = obj
def __init__(self): self._slug_wrappers = {} for c in get_all_classes(["slug_wrappers.py"], SlugWrapper): obj = c() self._slug_wrappers[obj.flag] = obj