Example #1
0
def extract_features(parsed_file, unified_file, cube_file, cube_context_file, freq_data_file, stop_word_file, freq_pattern_file, base_dir, total_docs, filtered_cell_str):
  print filtered_cell_str
  freq_patterns = load_freq_patterns(freq_pattern_file)
  freq_data = load_freq(freq_data_file)
  cubes = load_cube(cube_file)
  contexts = load_context(cube_context_file)
  unified_list = load_unified_list(unified_file)
  print contexts.keys()
  #cubes['all'] = [i for i in range(total_docs)]
  all_docs = [i for i in range(total_docs)]
  total_cnt = agg_phrase_cnt(freq_data, all_docs)

  print sum(total_cnt.values())
 
  #extract the features of phrases in each cube
  phrase_feature_all = {}
  idx = 0
  for att in cubes:
    if att != filtered_cell_str: #'Topic|Sports;Location|Illinois;':
      continue
    print "start processing " + att
    selected_doc = cubes[att]
    selected_context = contexts[att]
    #print selected_context
    feature_extractor = FeatureExtractor(parsed_file, selected_doc, selected_context, freq_data, stop_word_file, freq_patterns, total_cnt, unified_list)
    phrase_features = feature_extractor.extract_features()
    for phrase in phrase_features:
      norm_phrase = normalize(phrase)
      phrase_features[phrase].append(unified_list[norm_phrase])
      cell_phrase = "{0}{1}".format(att.replace('|', '_').replace(';', '_').replace(' ', '_').lower(), norm_phrase)
      phrase_feature_all[cell_phrase] = phrase_features[phrase]

  file_name = "{0}/{1}.fea".format(base_dir, "cells.fea")
  save_features(file_name, phrase_feature_all)
Example #2
0
def extract_features(parsed_file, cube_file, freq_data_file, stop_word_file, freq_pattern_file, base_dir, total_docs):
  freq_patterns = load_freq_patterns(freq_pattern_file)
  freq_data = load_freq(freq_data_file)
  cubes = load_cube(cube_file)
  cubes['all'] = [i for i in range(total_docs)]

  #extract the features of phrases in each cube
  for att in cubes:
    selected_doc = cubes[att]
    feature_extractor = FeatureExtractor(parsed_file, selected_doc, freq_data, stop_word_file, freq_patterns)
    phrase_features = feature_extractor.extract_features()

    file_name = "{0}/{1}.fea".format(base_dir, att.replace('|', '_').replace(';', '_').replace(' ', '_'))
    save_features(file_name, phrase_features)