def process(self, event): path = event.src_path if path not in self.files: self.files[path] = File(path) timestamp = time.mktime(datetime.datetime.now().timetuple()) file = self.files[path] change = Change() change.save( path=path, timestamp=timestamp, line_delta=file.delta() ) file.add(change)
def get_change_object(change): """ Retrieves a Change from a given change API object, getting user data from the API and saving the User if it doesn't already exist in the DB. """ try: return Change.get(Change.change_id == change['rcid']) except DoesNotExist: size_diff = change['newlen'] - change['oldlen'] timestamp_dt = parse_iso_dt(change['timestamp']) return Change.create(change_id=change['rcid'], change_type=change['type'], user=get_user_object(change['user']), timestamp=timestamp_dt, page=get_page_object(change['pageid']), comment=change['comment'], size_diff=size_diff)
def postChanges(): newChange = json.loads(request.data) change = Change(id=newChange["id"], change_product=newChange["change_product"], state=newChange["state"], city=newChange["city"], address=newChange["address"], commune=newChange["commune"]) db.session.add(change) db.session.commit() return jsonify(list(map(lambda item: item.serialize(), Change.query.all())))
def get_context_data(self, **kwargs): context = super(BoardHandler, self).get_context_data(**kwargs) context['board'] = self.board context['board_id'] = self.board.key().id() context['user_boards'] = \ Board.get_all_for_user(self.get_current_user()) context['permission'] = self.permission context['changes'] = Change.all().ancestor(self.board) context['board_navigation'] = [ (self.uri_for('board', identifier=self.board.key().id()), '<i class="icon-table"></i> Board'), (self.uri_for('board-sharing', identifier=self.board.key().id()), '<i class="icon-user"></i> Sharing'), ] return context
def get_recent_change_descs(num_changes= config.LayoutConfig.num_recent_changes): changelog = [] changes = (Change.select() .order_by(Change.timestamp.desc()) .limit(num_changes)) for change in changes: username = change.user.username pos_chars = 0 neg_chars = 0 new_pages = 0 if change.size_diff >= 0: char_diff = ('<span class="plus-chars">+%s</span> chars' % change.size_diff) pos_chars = change.size_diff else: char_diff = ('<span class="minus-chars">-%s</span> chars' % -change.size_diff) neg_chars = change.size_diff if change.change_type == 'new': change_action = 'created page' new_pages = 1 elif change.change_type == 'edit': change_action = 'edited page' page_title = change.page.page_title page_url = '#' page_link = '<a href="%s">%s</a>' % (page_url, page_title) timeago = parse_datetime(change.timestamp) points = calc_user_score(pos_chars, neg_chars, new_pages) points_html = ('<span class="change-points">%s wikipoints</span>' % points) # [Ckarpfinger] # [created page] # [What the f is git] # [2 days ago] # [(+31 chars)] # for [9001 wikipoints]. summary_str = '<span class="username">%s</span> %s %s %s (%s) for %s.' summary = summary_str % (username, change_action, page_link, timeago, char_diff, points_html) changelog.append({'summary': summary, 'change': change}) return changelog
def route_data(): adds = [] deletes = [] # TODO - The data should be stored in a more efficent way for querying for i, change in enumerate([x for x in sorted(map(Change, Change.keys()), key=lambda k: k.timestamp)]): x = change.timestamp if change.line_delta > 0: adds.append({'x': x, 'y': change.line_delta}) deletes.append({'x': x, 'y': 0}) elif change.line_delta < 0: adds.append({'x': x, 'y': 0}) deletes.append({'x': x, 'y': abs(change.line_delta)}) else: adds.append({'x': x, 'y': 0}) deletes.append({'x': x, 'y': 0}) files = dict([(f.id, f.length()) for f in (map(File, File.keys()))]) return jsonify({'add': adds, 'delete': deletes, 'files': files})
def get_recent_changes(num_changes= config.LayoutConfig.num_recent_changes): changes = (Change.select() .order_by(Change.timestamp.desc()) .limit(num_changes)) return changes
def scrape_mediawiki(): """Scrape MediaWiki for new changes and save them to the database.""" # Create DB tables if they don't already exist. User.create_table(fail_silently=True) Page.create_table(fail_silently=True) Change.create_table(fail_silently=True) # Parse changes from the MediaWiki API. print 'Parsing recent changes from MediaWiki API.' # Properties to request from the MediaWiki API. recent_changes_props = ['user', 'ids', 'title', 'comment', 'sizes', 'timestamp'] # `rctype` specifies that we only want new page and edit page events. recent_changes_action = {'action': 'query', 'list': 'recentchanges', 'rctype': 'new|edit', 'rcprop': '|'.join(recent_changes_props)} new_changes = [] req_num = 1 while True: print 'Request %s' % req_num # Make the API request. resp = request(recent_changes_action) # Parse the response data. resp_data = resp['query']['recentchanges'] req_num += 1 # `changes_exist` is a flag that lets us break out of the double loop. changes_exist = False for resp_item in resp_data: change_id = resp_item['rcid'] try: # `Change.get` either returns a `Change` object if one exists # for `change_id` or raises a `DoesNotExist` exception if a # `Change` object does not exist. Change.get(Change.change_id == change_id) # If we've gotten this far, a `DoesNotExist` exception has not # been raised, so the `Change` object exists for `change_id`. # Set the breakout flag and break out of the inner loop. changes_exist = True break # We're expecting the exception if a `Change` object does not # already exist in the DB for the given change. Ignore it. except DoesNotExist: # Append the response data to the existing data. new_changes.append(resp_item) # Two conditions break the continuing `recentchanges` request loop: # # * Reaching changes that exist in the DB # * Reaching the end of data, indicated by receiving data that doesn't # contain the `query-continue` key if changes_exist: print 'Reached changes that already exist in DB.' break if not 'query-continue' in resp: print 'No more query-continue; reached end of data.' break # The request loop hasn't been broken, and the data contains a # `query-continue` key. Use it to request the next page of data. next_start_point = resp['query-continue']['recentchanges']['rcstart'] recent_changes_action['rcstart'] = next_start_point print ' query-continue: %s' % recent_changes_action['rcstart'] # Done! print 'Done scraping.' # Add all User objects to the DB if they don't already exist. usernames = {change['user'] for change in new_changes} print('Verifying %s users...' % len(usernames)), for username in usernames: get_user_object(username) print 'done.' # Add all Page objects to the DB if they don't already exist. page_ids = {change['pageid'] for change in new_changes} print('Verifying %s pages...' % len(page_ids)), for page_id in page_ids: get_page_object(page_id) print 'done.' # Add all new Change objects to the DB. print('Verifying %s changes...' % len(new_changes)), for change in new_changes: get_change_object(change) print 'done.' print 'Finished!'
def UploadBundle(self, rpc_controller, req, done): """ Receives an uploaded Git style bundle and queues it for processing. """ rsp = UploadBundleResponse() # can't get here if there isn't a current user current_user = users.get_current_user() if not Account.get_account_for_user(current_user).cla_verified: rsp.status_code = UploadBundleResponse.UNAUTHORIZED_USER done(rsp) return # Validate that we have an Account for everyone in reviewers invalid_reviewers = [] reviewers = Account.get_accounts_for_emails([x for x in req.reviewers]) for i in range(0, len(reviewers)): if not reviewers[i]: invalid_reviewers.append(req.reviewers[i]) # Validate all of the email addresses invalid_cc = [] for e in req.cc: if not forms.fields.email_re.search(e): invalid_cc.append(e) # Return failure if any of that was bad if invalid_reviewers or invalid_cc: rsp.status_code = UploadBundleResponse.UNKNOWN_EMAIL for e in invalid_reviewers: rsp.invalid_reviewers.append(e) for e in invalid_cc: rsp.invalid_cc.append(e) done(rsp) return reviewers = [x.user for x in reviewers] cc = [db.Email(x) for x in req.cc] if not req.dest_project: rsp.status_code = UploadBundleResponse.UNKNOWN_PROJECT done(rsp) return proj = req.dest_project if proj.endswith(".git"): proj = proj[0 : len(proj) - 4] proj = Project.get_project_for_name(proj) if not proj: rsp.status_code = UploadBundleResponse.UNKNOWN_PROJECT done(rsp) return if not req.dest_branch: rsp.status_code = UploadBundleResponse.UNKNOWN_BRANCH done(rsp) brch = Branch.get_branch_for_name(proj, req.dest_branch) if not brch: rsp.status_code = UploadBundleResponse.UNKNOWN_BRANCH done(rsp) return replaces = list() ids_to_check = list() for p in req.replace: id = int(p.change_id) ids_to_check.append(id) replaces.append('%d %s' % (id, p.object_id)) if ids_to_check: for id, c in zip(ids_to_check, Change.get_by_id(ids_to_check)): if not c: rsp.status_code = UploadBundleResponse.UNKNOWN_CHANGE done(rsp) return if c.closed: rsp.status_code = UploadBundleResponse.CHANGE_CLOSED done(rsp) return rb = git_models.ReceivedBundle( dest_project = proj, dest_branch = brch, owner = current_user, state = git_models.ReceivedBundle.STATE_UPLOADING, reviewers = reviewers, cc = cc, contained_objects = list(req.contained_object), replaces = replaces) rb.put() rsp.bundle_id = str(rb.key().id()) self._store_segment(req, rsp, rb, 1) done(rsp)