def get_crawl(path): path = "http://"+path domain = fc.findDomain(path) urls = fc.crawl(path, domain, 100) print "Length of URLS:", len(urls) out = fc.analyseMain(urls) return jsonify(output=out)
def create_crawl(): path = request.form.get("url") page = request.form.get("pages") path = fc.getCorrectURL(path) if path == None: abort(404) domain = fc.findDomain(path) urls = fc.crawl(path, domain, int(page)) if len(urls) > int(page): urls = urls[:int(page)] out = fc.analyseMain(urls) if len(out) == 0: response = jsonify(output=out) response.status_code = 500 return response return jsonify(output=out)