def gen_host_updates(self, host): ''' Populate all updates ''' # First let's delete them all host.update_set.all().delete() try: xml = parseString(host.get_fact_value('package_updates')) except: return for update in xml.getElementsByTagName("package"): name = update.getAttribute("name") cv = update.getAttribute("current_version") nv = update.getAttribute("new_version") sn = update.getAttribute("source_name") org = update.getAttribute("origin") # Note: facts are forcefully stringified until puppet 3.7 is_sec = (update.getAttribute("is_security") == "true") try: p = Package.objects.get(name=name) except Package.DoesNotExist: p = Package(name=name, sourcename=sn) p.save() u = Update(host=host, package=p, installedVersion=cv, candidateVersion=nv, origin=org, is_security=is_sec) u.save()
def gen_host_updates(self, host): ''' Populate all updates ''' # First let's delete them all host.update_set.all().delete() try: xml = parseString(host.get_fact_value('package_updates')) except: return for update in xml.getElementsByTagName("package"): name = update.getAttribute("name") cv = update.getAttribute("current_version") nv = update.getAttribute("new_version") sn = update.getAttribute("source_name") org = update.getAttribute("origin") is_sec = (update.getAttribute("is_security") == "true") try: p = Package.objects.filter(name=name)[0] except IndexError: p = None if not p: p = Package(name=name, sourcename=sn) p.save() u = Update(host=host, package=p, installedVersion=cv, candidateVersion=nv, origin=org, is_security=is_sec) u.save()
def handle(self, *args, **options): """Create fake users and statuses for Mowdie.""" from faker import Faker import random from django.conf import settings from PyMarkovTextGenerator import Markov fake = Faker() textgen = Markov(prob=True, level=3) with open(settings.BASE_DIR + "/../john_carter.txt") as file: textgen.parse(file.read()) def update_text(): return textgen.generate( startf=lambda db: random.choice([x for x in db if x[0][0].isupper()]), endf=lambda s: len(s) > 120) Favorite.objects.all().delete() Update.objects.all().delete() User.objects.all().delete() users = [] for _ in range(20): user = User(username=fake.user_name(), email=fake.email()) user.set_password("password") user.save() Profile(user=user).save() users.append(user) user = User(username="******", email="*****@*****.**", is_staff=True, is_superuser=True) user.set_password("password") user.save() Profile(user=user).save() updates = [] for _ in range(100): update = Update(text=update_text(), posted_at=make_aware(fake.date_time_this_year()), user=random.choice(users)) update.save() updates.append(update) combos = random.sample([(user, update) for user in users for update in updates], 200) for user, update in combos: favorite = Favorite(user=user, update=update) favorite.save()
def handle_label(self, label, **options): args = label.split('--') message = args[0] severity = args[1] new_update = Update( message = message, severity = severity, ) new_update.save() update_twitter_official(message) print("Update saved to database.")
def handle(self, *args, **options): # create a github handle gh = github.GitHub() # get the latest github update that's in the db updates = Update.objects.filter(source=SOURCE).order_by('-timestamp') # if there are no github updates in the db, just add all of them # if there are updates in the db already, check the date of the most recent one. for r in gh.repos.forUser(GITHUB_USER): most_recent_branch = False most_recent_branch_ts = False most_recent_commit = False branches = gh.repos.branches(GITHUB_USER, r.name) for branchname, _ in branches.items(): commits = gh.commits.forBranch('qmat', r.name, branchname) # ignore the branch if there are no commits if not commits: continue branch_ts = self.__interpret_date(commits[0].authored_date) # if it's the first check then take that one, otherwise compare if not most_recent_branch or branch_ts > most_recent_branch_ts: most_recent_branch = branchname most_recent_branch_ts = branch_ts most_recent_commit = commits[0] # we now have the most recent branch if branches and (not updates or most_recent_branch_ts > updates[0].timestamp): new_update = Update() new_update.source = SOURCE new_update.timestamp = most_recent_branch_ts new_update.author = most_recent_commit.author.name new_update.text = "Changed %s repo" % r.name new_update.save()
def create_updates_updates(self): updates = [] packages = list( Package.objects.all()) # Fetch them all as an optimization hosts = list(Host.objects.all()) # Fetch them all for host in hosts: host_packages = list( set([ random.choice(packages) for x in range(random.randint(0, len(packages))) ])) print 'Creating %s package updates for host %s' % ( len(host_packages), host) for package in host_packages: updates.append( Update( package=package, host=host, installedVersion=random.choice( ['', '1.0-1', '1.5-2', '1.0-2', '4.2-1', '5.6']), candidateVersion=random.choice( ['1.0-2', '1.5-3', '1.0-6', '4.3-1', '5.7']), source=Command.id_generator(5), origin=random.choice(['Debian', 'Ubuntu']), is_security=random.choice([True, False]), )) Update.objects.bulk_create(updates)
def handle(self, *args, **options): """Create fake users and statuses for Mowdie.""" from faker import Faker import random from django.conf import settings from PyMarkovTextGenerator import Markov fake = Faker() textgen = Markov(prob=True, level=3) with open(settings.BASE_DIR + "/../john_carter.txt") as file: textgen.parse(file.read()) def update_text(): return textgen.generate( startf=lambda db: random.choice([x for x in db if x[0][0].isupper()]), endf=lambda s: len(s) > 120 ) Favorite.objects.all().delete() Update.objects.all().delete() User.objects.all().delete() users = [] for _ in range(20): user = User(username=fake.user_name(), email=fake.email()) user.set_password("password") user.save() Profile(user=user).save() users.append(user) user = User(username="******", email="*****@*****.**", is_staff=True, is_superuser=True) user.set_password("password") user.save() Profile(user=user).save() updates = [] for _ in range(100): update = Update( text=update_text(), posted_at=make_aware(fake.date_time_this_year()), user=random.choice(users) ) update.save() updates.append(update) combos = random.sample([(user, update) for user in users for update in updates], 200) for user, update in combos: favorite = Favorite(user=user, update=update) favorite.save()
def handle(self, *args, **options): conn = imaplib.IMAP4_SSL('imap.gmail.com', 993) try: conn.login(GMAIL_USER, GMAIL_PASS) except: print sys.exc_info()[1] sys.exit(1) conn.select('Inbox') # Select inbox or default namespace retcode, messages = conn.search(None, '(UNSEEN)') if retcode == 'OK' and messages[0]: for message in messages[0].split(' '): print 'Processing :', message ret, mesginfo = conn.fetch(message, '(BODY[HEADER.FIELDS (SUBJECT FROM DATE)])') if ret == 'OK': # get the right bits out of the imap data new_update = Update() new_update.source = SOURCE fields = mesginfo[0][1].split('\r\n') for field in fields: if field.startswith('Date: '): new_update.timestamp = self.__interpret_date(field[len('Date: '):]) if field.startswith('From: '): author = field[len('From: '):] # filter out email address if a name was specified bps = [author.find('<'), author.find('>')] # do some sanity checking if bps[0] < bps[1] and bps[0] >= 0 and bps[1] >= 0: author = author[0:bps[0]-1] author = author.strip("\"' ") new_update.author = author if field.startswith('Subject: '): new_update.text = field[len('Subject: '):] new_update.save() conn.store(messages[0].replace(' ',','),'+FLAGS','SEEN') conn.close()
def latest_update(): try: items = Update.objects.all().order_by('-updated') update = items[0] except: items = Update( name="Nothing Yet", description= "You haven't entered any data here. Add something in the admin", author=User.objects.get(username='******')) update = items return {'update': update}
def handle(self, *args, **options): articles = feedparser.parse(FEED_URL)['entries'] # get the latest vimeo update timestamp= updates = Update.objects.filter(source=SOURCE).order_by('-timestamp') for article in articles: timestamp = self.__interpret_date(article['updated']) if not updates or timestamp > updates[0].timestamp: new_update = Update() new_update.source = SOURCE new_update.timestamp = timestamp new_update.author = article['author'] new_update.text = article['title'] new_update.save()
def handle(self, *args, **options): videos = json.loads(urllib2.urlopen(VIMEO_CHANNEL_API_URI).read()) # get the latest vimeo update timestamp= updates = Update.objects.filter(source=SOURCE).order_by('-timestamp') for video in videos: timestamp = self.__interpret_date(video['upload_date']) if not updates or timestamp > updates[0].timestamp: new_update = Update() new_update.source = SOURCE new_update.timestamp = timestamp new_update.author = video['user_name'] new_update.text = video['description'] new_update.save()
def fetch_and_save_new_threads_content(thread_urls = None): """ Saves new thread updates into the database. """ if not thread_urls: return for url in thread_urls: try: content = html.parse(url).getroot() except IOError: return id = url.split('&t=')[1] title = content.xpath('//div[@class="smallfont"]/strong')[0].text if title.lower().find('patch notes') == -1: continue body = etree.tostring(content.xpath('//div[starts-with(@id, "post_message_")]')[0], with_tail = False) source = re.sub(r's=.*?&', '', url) update = Update(title = title, body = body, thread_id = id, source = source) update.save()
def create_from_template(request): username = None group = 'parent' if request.user.is_authenticated(): username = request.user.username if request.user.has_perm('updates.add_update'): group = 'teacher' if request.POST: text = request.POST.get('template_1') update = Update(updateText=text) update.save() return HttpResponseRedirect('/updates/all') else: args = {} args.update(csrf(request)) args['username'] = username args['group'] = group return render_to_response('create_from_template.html', args)
def handle(self, *args, **options): client = gdata.docs.service.DocsService() client.ClientLogin(GDOCS_USER, GDOCS_PASS) documents_feed = client.GetDocumentListFeed() updates = Update.objects.filter(source=SOURCE).order_by('-timestamp') for document_entry in documents_feed.entry: timestamp = self.__interpret_date(document_entry.updated.text) if not updates or timestamp > updates[0].timestamp: new_update = Update() new_update.source = SOURCE new_update.timestamp = timestamp new_update.author = document_entry.lastModifiedBy.name.text new_update.text = document_entry.title.text new_update.save()
def handle(self, *args, **options): auth = tweepy.OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET) auth.set_access_token(TWITTER_ACCESS_KEY, TWITTER_ACCESS_SECRET) api = tweepy.API(auth) updates = Update.objects.filter(source=SOURCE).order_by('-timestamp') #tweets = api.search('@QMATChandelier') tweets = api.home_timeline() for tweet in tweets: if not updates or tweet.created_at > updates[0].timestamp: new_update = Update() new_update.source = SOURCE new_update.timestamp = tweet.created_at new_update.author = tweet.user.screen_name new_update.text = tweet.text new_update.save()
def handle(self, *args, **options): auth = tweepy.auth.OAuthHandler(TWITTER_USER, TWITTER_PASS) api = tweepy.API(auth) updates = Update.objects.filter(source=SOURCE).order_by('-timestamp') tweets = api.search('@QMATChandelier') for tweet in tweets: if not updates or tweet.created_at > updates[0].timestamp: new_update = Update() new_update.source = SOURCE new_update.timestamp = tweet.created_at new_update.author = tweet.from_user new_update.text = tweet.text new_update.save()
def handle(self, *args, **options): client = gdata.docs.service.DocsService() client.ClientLogin(GDOCS_USER, GDOCS_PASS) documents_feed = client.GetDocumentListFeed() updates = Update.objects.filter(source=SOURCE).order_by("-timestamp") for document_entry in documents_feed.entry: timestamp = self.__interpret_date(document_entry.updated.text) if not updates or timestamp > updates[0].timestamp: new_update = Update() new_update.source = SOURCE new_update.timestamp = timestamp new_update.author = document_entry.lastModifiedBy.name.text new_update.text = document_entry.title.text new_update.save()
def handle(self, *args, **options): conn = imaplib.IMAP4_SSL('imap.gmail.com', 993) try: conn.login(GMAIL_USER, GMAIL_PASS) except: print sys.exc_info()[1] sys.exit(1) conn.select('Inbox') # Select inbox or default namespace retcode, messages = conn.search(None, '(UNSEEN)') if retcode == 'OK' and messages[0]: for message in messages[0].split(' '): print 'Processing :', message ret, mesginfo = conn.fetch( message, '(BODY[HEADER.FIELDS (SUBJECT FROM DATE)])') if ret == 'OK': # get the right bits out of the imap data new_update = Update() new_update.source = SOURCE fields = mesginfo[0][1].split('\r\n') for field in fields: if field.startswith('Date: '): new_update.timestamp = self.__interpret_date( field[len('Date: '):]) if field.startswith('From: '): author = field[len('From: '):] # filter out email address if a name was specified bps = [author.find('<'), author.find('>')] # do some sanity checking if bps[0] < bps[1] and bps[0] >= 0 and bps[1] >= 0: author = author[0:bps[0] - 1] author = author.strip("\"' ") new_update.author = author if field.startswith('Subject: '): new_update.text = field[len('Subject: '):] new_update.save() conn.store(messages[0].replace(' ', ','), '+FLAGS', 'SEEN') conn.close()