def search(query, page = 1, size=10): lasttime = time.clock() suggestor.add_query(query) tokens = stop_and_stem_seq(parse(query, '')) result = get_doc_list(query, tokens) result = collect(result[(page - 1)*size:page*size], tokens, len(result), lasttime) return result
def search(query, page=1, size=10): lasttime = time.clock() suggestor.add_query(query) tokens = stop_and_stem_seq(parse(query, '')) result = get_doc_list(query, tokens) result = collect(result[(page - 1) * size:page * size], tokens, len(result), lasttime) return result
def run(self, command): parsed = analysis.parse(command) print parsed print self._call_api(parsed)
while error != "": error = input("Error " + email + " is subscribed to: ").upper() if error == "": continue if error in emails: emails[error].add(email) else: print("Invalid error") num_emails = {} for error in emails: for email in emails[error]: num_emails[email] = 0 timestamps = {} #populate datasets analysis.parse(logs, errors, emails, num_emails, timestamps) num_errors = analysis.num_errors(errors) worst_to_best = analysis.most_to_least(errors) frequent_to_unfrequent = analysis.most_to_least(timestamps) #output results of analyses if num_emails != {}: print("Email Summary:") for email in num_emails: print(email + ": " + str(num_emails[email]) + " emails sent") print("There are " + str(num_errors) + " unique errors in the log.") print("From most to least problematic, the different error categories are:") for error in worst_to_best: print(error[0] + ": " + str(error[1]) + " times") print( "From most to least common, the hours of the day when errors occured are:") for hour in frequent_to_unfrequent:
) sys.exit() for i in range(1, len(sys.argv)): try: if sys.argv[i] == '-t': thorough = True if sys.argv[i] == '-r': ratio = float(sys.argv[i+1]) if sys.argv[i].endswith('.csv'): file = '../files/' + sys.argv[i] except: print("'python main.py help' to see a list of possible arguments") sys.exit() ''' print('[' + str(datetime.datetime.now().time()) + '] Initializing parsing') object_list = analysis.parse(file) print('\n[' + str(datetime.datetime.now().time()) + '] Updating vendor list') analysis.pull_vendors() print('[' + str(datetime.datetime.now().time()) + '] Identifying vendors') vendorlist = analysis.find_vendors(object_list) print('[' + str(datetime.datetime.now().time()) + '] Pulling vendor product lists') for vendor in vendorlist: analysis.pull_products(vendor) print('[' + str(datetime.datetime.now().time()) + '] Identifying products') object_list = identify.determine_products(object_list, ratio, thorough)