def do(): while True: try: req = urllib2.urlopen(message.getMediaUrl()) hashsum = hashlib.sha256() with open(filepath, "wb") as f: while True: chunk = req.read(4096) if not chunk: break f.write(chunk) hashsum.update(chunk) computed_hashsum = base64.b64encode(hashsum.digest()) logger.debug("Downloaded file "+filepath+" with hashsum "+computed_hashsum) if message.fileHash != computed_hashsum: logger.info("hashsum mismatch: %s /= %s" % (message.fileHash, computed_hashsum)) raise Exception() else: db = get_database(self.config.database) dbfileid = db.save_path(filepath, message.getFrom(), message.getId()) url = "http://%s:%s/%s" % (self.config.http_address, self.config.http_port, dbfileid) logger.debug("MediaDownloader url: "+url) # we are in a thread context and need locking with self.account.xmpp.lock: self.account.sendToJabber(url, waTransportJid) self.account.markWAMessageAsReceived(msg = message) return except: logger.exception("Unknown exception: ") # don't use to much cpu in case of endless loop time.sleep(5.0)
def score_by_capes(sectionIDs): database = get_database() sections_ratings = [] for secID in sectionIDs: # get professor name and course name response = getSectionByID(secID) subject_code = response['subjectCode'] course_code = response['courseCode'] course_name = subject_code + ' ' + course_code instructors = list() for section in response['sections']: for instructor in section['instructors']: instructors.append(instructor['instructorName']) instructor_name = str(instructors).replace('[', '') instructor_name = instructor_name.replace('\'', '') instructor_name = instructor_name.replace(']', '') capes = get_capes_by_course_and_prof(subject_code, course_code, instructor_name, database) grade_sum = 0 grade_count = 0 rating_sum = 0 rating_count = 0 time_spent_sum = 0 time_spent_count = 0 for i in range(len(capes)): for key in capes[i]: if key == "received_grade": if (capes[i][key] != -1): grade_sum += capes[i][key] grade_count += 1 if key == "recommend_professor": rating_sum += capes[i][key] rating_count += 1 if key == "hours_per_week": time_spent_sum += capes[i][key] time_spent_count += 1 grade_avg = 0.0 if (grade_count != 0): grade_avg = grade_sum / grade_count rating_avg = 0.0 if (rating_count != 0): rating_avg = rating_sum / rating_count time_spent_avg = 100.0 if (time_spent_count != 0): time_spent_avg = time_spent_sum / time_spent_count cape_dict = { "section ID": secID, "grade": grade_avg, "rating": rating_avg, "time spent": time_spent_avg } sections_ratings.append(cape_dict) close_database(database) return sections_ratings
def __init__(self, config): asyncore.dispatcher_with_send.__init__(self) self.config = config self.database = get_database(self.config.database) self.parser = XMLParser(target = self) members = inspect.getmembers(self, predicate=inspect.ismethod) for m in members: if hasattr(m[1], "callback"): fn = m[1] fname = m[0] self.mapping[fn.callback] = getattr(self, fname) self.create_socket(socket.AF_INET, socket.SOCK_STREAM) self.connect( (self.config.xmpp_host, self.config.xmpp_port) )
def __init__(self, accounts, config, sock): HttpRequest.__init__(self, sock) self.accounts = accounts self.database = get_database(self.config.database)
import requests from bs4 import BeautifulSoup from Trade import Trade from DraftPick import DraftPick from Database import get_database URL_ID = 1 tradeList = [] ID = 0 BASE_YEAR = 2010 DB = get_database() indexId = 0 pickIndexId = 0 while BASE_YEAR != 2020: while True: URL = 'http://www.nhltradetracker.com/user/trade_list_by_season/' + str( BASE_YEAR) + ''.join( ["-", str(int(str(BASE_YEAR)[2:]) + 1), "/"]) + str(URL_ID) response = requests.get(URL) soup = BeautifulSoup(response.content, 'html.parser') findTradeItems = soup.findAll('td', width='75%') findDate = soup.findAll('td', width='20%') findTeams = soup.findAll('td', width='40%') if len(findDate) == 0: break teams = [] dates = []