def main(): """Main function to scaffold functions""" args = parseArgs() # See if scraping has been called if (args.phase == 1): scrape = scrapers.VolScraper(args.vStart, args.vStop, baseURL) caseUrls = scrape.scrapeVolumes() #Grab cases cScraper = scrapers.CaseScraper(args.stopCase, caseUrls, args.output, args.emailsend, baseURL) cases = cScraper.getCases() print "Cases scraped" #or load from json else: try: with open(args.input, 'r') as fp: cases = json.load(fp) #print cases print "yeha! Json loaded" except EnvironmentError: print "Select select a valid load file." if (args.phase < 3): CB = citation_builders.citations(cases, args.citeOutput) cites, metrics = CB.processText(True) #print cites print metrics else: cites = cases grapher.GraphBuilder(cites, args.graphOutput, args.format, baseURL).drawGraph() print "done" if args.emailsend: helper.emailSend('Your Script done', "ALL DONE")
def setUpClass(cls): cls._citation = citations([], 'outfile.txt')