sent_doc_path = output_path + "/sent_doc.json" logging.info("Saving tokenization information...") with open(pos_path, 'w') as file_out: json.dump(pos_sentences, file_out) with open(doc_sent_path, 'w') as file_out: json.dump(document_sentences, file_out) with open(sent_doc_path, 'w') as file_out: json.dump(sentence_documents, file_out) def parse(): """Handle all command line argument parsing. Returns the parsed args object from the parser """ parser = argparse.ArgumentParser() parser = utility.add_common_parsing(parser) cmd_args = parser.parse_args() return cmd_args if __name__ == "__main__": ARGS = parse() utility.init_logging(ARGS.log_path) input_path, output_path = utility.fix_paths(ARGS.experiment_path, ARGS.input_path, ARGS.output_path) tokenize(input_path, output_path, ARGS.count, ARGS.overwrite)
import utility import sys import logging def test(): logging.info("Hello world from the test function") if __name__ == "__main__": utility.init_logging(sys.argv[1]) test() #ARGS = parse() #docify(ARGS.input_folder, ARGS.output_path, ARGS.count, ARGS.column, ARGS.overwrite)