def __init__(self, filename): initialize(self, locals()) self.reader = iter(possibly_compressed_file(filename)) self.header = parse_kv_list(self.reader.next()) assert 'S' in self.header self.num_sentences = self.header['S']
def __init__(self, filename): initialize(self, locals()) self.reader = iter(possibly_compressed_file(filename)) self.header = parse_kv_list(next(self.reader)) assert 'S' in self.header self.num_sentences = self.header['S']
def __init__(self, corpus_filename1, corpus_filename2, mapping1, mapping2, merged_corpus_filename, merged_mapping_filename, verbose=True, warn_only=False): initialize(self, locals()) self.read_mappings() self.make_merged_mapping() self.make_merged_feature_values()
def __init__(self, file_id, id, event_type, event_term, args): if file_id is not None: file_id = file_id.basename() try: file_id = int(file_id) except ValueError: pass initialize(self, locals()) self._not_in_repr = ['file_id', 'id']
def __init__(self, file_id, *terms): if file_id is not None: file_id = file_id.basename() try: file_id = int(file_id) except ValueError: pass # this is so we have some unique ID id = (file_id, terms) initialize(self, locals()) self._not_in_repr = ['file_id', 'id']
def __init__(self, file_id, id, term_type, start_offset, end_offset, name, given): start_offset = int(start_offset) end_offset = int(end_offset) if file_id is not None: file_id = file_id.basename() try: file_id = int(file_id) except ValueError: pass initialize(self, locals()) self._not_in_repr = ['file_id', 'id'] self._already_expanded = False
def __init__(self, gold_brackets, parses, index): initialize(self, locals())
def __init__(self, proposed_brackets, matched_brackets, features): features = defaultdict(int, features) initialize(self, locals())
def __init__(self, proposed_brackets, matched_brackets, features): initialize(self, locals())