Exemplo n.º 1
0
def query_error(err, prefix, prange):
	if err.check(TooManyResults):
		clrln()
		print('  Too many results for %s, suffixing...' % prefix)
		suffixed = [prefix + ch for ch in atoz()]
		prange.remove(prefix)
		prange += suffixed
		fetch(suffixed)
	else:
		return err
Exemplo n.º 2
0
Arquivo: msudh.py Projeto: psev/MSU-DH
def retrieve(prefixes):
    d = None
    for prefix in prefixes:
        try:
            clrln()
            print("Fetching: %s..." % prefix, end=" ")
            params = urlencode({"q": prefix}, doseq=True)
            html = urlopen(base + params).readall().decode("ascii")
            uids = uparser.get_uids(html)
            uid_count = len(uids)
            print("found %d uids." % uid_count)
            count = 0
            for uid in uids:
                count += 1
                interval = get_interval(timetable)
                print(
                    "-- processing %d of %d / %.2f %% | %ss interval--\r"
                    % (count, uid_count, (count / uid_count) * 100, interval),
                    end="",
                )
                params = urlencode({"uid": uid}, doseq=True)
                html = urlopen(base + params).readall().decode("ascii")
                row = [uid]
                row += dparser.get_data(html)
                csv_writer.writerow(row)
                sleep(interval)
            csv_file.flush()
        except TooManyResults:
            print("too many results, suffixing...")
            prefixes = [prefix + x for x in atoz()]
            retrieve(prefixes)
        except URLError:
            raise SystemExit("Network unreachable.")
        except KeyboardInterrupt:
            del d
            clrln()
            raise SystemExit("Keyboard interrupt... aborting at %s." % prefix)
Exemplo n.º 3
0
Arquivo: msudh.py Projeto: psev/MSU-DH
                count += 1
                interval = get_interval(timetable)
                print(
                    "-- processing %d of %d / %.2f %% | %ss interval--\r"
                    % (count, uid_count, (count / uid_count) * 100, interval),
                    end="",
                )
                params = urlencode({"uid": uid}, doseq=True)
                html = urlopen(base + params).readall().decode("ascii")
                row = [uid]
                row += dparser.get_data(html)
                csv_writer.writerow(row)
                sleep(interval)
            csv_file.flush()
        except TooManyResults:
            print("too many results, suffixing...")
            prefixes = [prefix + x for x in atoz()]
            retrieve(prefixes)
        except URLError:
            raise SystemExit("Network unreachable.")
        except KeyboardInterrupt:
            del d
            clrln()
            raise SystemExit("Keyboard interrupt... aborting at %s." % prefix)


az = [x + y for x in atoz() for y in atoz()]

retrieve(az)
clrln()