def clean_url(self): cleaned_data = self.cleaned_data if cleaned_data.get('url'): self.active = 'url' url = cleaned_data.get('url') logger.debug("Going to pull from %r", url) pull_req = urllib2.Request(url) if pull_req.get_type == "file": raise forms.ValidationError("Illegal URL") try: fh = urllib2.urlopen(pull_req) fd, savepath, relpath = mksavefile("url") util.stream_to_file(fh, savepath) cleaned_data['path'] = relpath except: logger.exception("Error fetching url %s", url) raise forms.ValidationError("Error fetching url")
def fetchurl(): args = dict(request.values.items()) url = args.pop('fetchurl', None) if not url: flash('URL Required in URL Field') return redirect(url_for('start', **args)) logger.debug("Going to pull from %r", url) pull_req = urllib2.Request(url) if pull_req.get_type == "file": flash("Illegal URL") return redirect(url_for('start', **args)) try: fh = urllib2.urlopen(pull_req) fd, savepath, relpath = mksavefile("url") util.stream_to_file(fh, savepath) return redirect(url_for('run_frame', path=relpath)) except: logger.exception("Error fetching url %s", url) flash("Error fetching url") return redirect(url_for('start', **args))