def get_questions_since(self, date, method='activity'): ts_from = calendar.timegm(date.utctimetuple()) ts_to = calendar.timegm(datetime.datetime.now().utctimetuple()) url = BASE url += "&sort=%s" % (method) url += "&min=%d&max=%d" % (ts_from, ts_to) url += "&tagged=%s" % (";".join(self.tags)) logger.debug("stackoverflow: getting %s" % url) resp = urllib2.urlopen(url).read() resp = gzip.GzipFile(fileobj=StringIO(resp)).read() questions = json.loads(resp)['items'] logger.info("stackoverflow: Getting questions for tags: %s" % (",".join(self.tags))) questions = map(Question, questions) return questions
def email_location(): recieved = message.get_all('Original-Received') ips = [IP.findall(h) for h in recieved] ips = [ip[0] for ip in ips if ip and not ip[0].startswith("10.") and not ip[0].startswith("192.168")] likely = ips[-1] try: logger.info("geocoder: Getting location for %s" % (likely)) url = "http://freegeoip.net/json/%s"%likely logger.debug("geocoder: Fetching %s" % (url)) loc = json.loads(urllib2.urlopen(url).read()) ll = float(loc['latitude']), float(loc['longitude']) if any(ll): return ll, 0 except: pass
def coords(self): geocoder = ggeocoder.Geocoder() user = self.answer.owner['user_id'] url = "https://api.stackexchange.com/2.1/users/%s?site=stackoverflow" url %= (user) try: resp = urllib2.urlopen(url).read() resp = gzip.GzipFile(fileobj=StringIO(resp)).read() user = json.loads(resp)['items'][0] except: return None try: logger.info("geocoder: Getting coordinates for %s" % (user['location'])) location = geocoder.geocode(user['location'])[0].coordinates except: location = None return location
def get_users(self, token): url = "https://api.github.com/orgs/%s/members?access_token=%s" % (self.org, token) while True: logger.debug("github: getting %s" % url) members_resp = urllib2.urlopen(url) members = json.loads(members_resp.read()) all_members = set() for member in members: all_members.add(member['login']) links = LINKS.findall(members_resp.headers.get('Link', '')) links = {link[1]:link[0] for link in links} if 'next' in links: logger.debug("github: %s has too many users, requesting more" % (self.org)) url = links['next'] else: logger.info("github: Got %s users for %s" % (len(members), self.org)) break return self._get_github_accounts(all_members, token)
def email_location(): recieved = message.get_all('Original-Received') ips = [IP.findall(h) for h in recieved] ips = [ ip[0] for ip in ips if ip and not ip[0].startswith("10.") and not ip[0].startswith("192.168") ] likely = ips[-1] try: logger.info("geocoder: Getting location for %s" % (likely)) url = "http://freegeoip.net/json/%s" % likely logger.debug("geocoder: Fetching %s" % (url)) loc = json.loads(urllib2.urlopen(url).read()) ll = float(loc['latitude']), float(loc['longitude']) if any(ll): return ll, 0 except: pass
def get_data(self, token, checkout_directory): self.token = token data = set() if checkout_directory == "temp": checkout_directory = None five_days_ago = (datetime.datetime.now() - datetime.timedelta(days=5)).date().isoformat() url = "https://api.github.com/orgs/%s/issues?access_token=%s&filter=all&since=%s" % (self.org, self.token, five_days_ago) while True: logger.debug("github: getting %s" % url) issues_resp = urllib2.urlopen(url) issues = json.loads(issues_resp.read()) for issue in issues: data.add(Issue(issue)) links = LINKS.findall(issues_resp.headers.get('Link', '')) links = {link[1]:link[0] for link in links} if 'next' in links: logger.debug("github: %s has too many issues, requesting more" % (self.org)) url = links['next'] else: logger.info("github: Got all issues for %s" % (self.org)) break url = "https://api.github.com/orgs/%s/repos?access_token=%s&type=public" % (self.org, self.token) self.repos = {} while True: logger.debug("github: getting %s" % url) org_repos_resp = urllib2.urlopen(url) org_repos = json.loads(org_repos_resp.read()) for repo in org_repos: repo_name = repo['name'] repo_url = repo['git_url'] if checkout_directory: location = os.path.join(checkout_directory, repo_name) else: location = None logger.info("github: Getting changes for %s/%s" % (self.org, repo_name)) self.repos[repo_name] = GitRepo(repo_url, location=location) links = LINKS.findall(org_repos_resp.headers.get('Link', '')) links = {link[1]:link[0] for link in links} if 'next' in links: logger.debug("github: %s has too many repos, requesting more" % (self.org)) url = links['next'] else: logger.info("github: Got all repos for %s" % (self.org)) break logger.debug("github: Got data for %d repos in %s" % (len(self.repos), self.org)) for repo in self.repos.values(): data |= repo.get_data() return data