Exemplo n.º 1
0
 def import_from(self, data):
     super(ContentMixin, self).import_from(data)
     self.start_datetime = parse_isoformat(data['start_datetime'])
     self.end_datetime = parse_isoformat(data['end_datetime'])
     self.timezone = data['timezone']
     self.location_name = data['location_name']
     self.location_address = data['location_address']
     self.mapmarker = data['mapmarker']
     self.capacity = data['capacity']
     self.allow_waitlisting = data['allow_waitlisting']
     self.allow_maybe = data['allow_maybe']
Exemplo n.º 2
0
 def import_from(self, data):
     super(ContentMixin, self).import_from(data)
     self.start_datetime = parse_isoformat(data['start_datetime'])
     self.end_datetime = parse_isoformat(data['end_datetime'])
     self.timezone = data['timezone']
     self.location_name = data['location_name']
     self.location_address = data['location_address']
     self.mapmarker = data['mapmarker']
     self.capacity = data['capacity']
     self.allow_waitlisting = data['allow_waitlisting']
     self.allow_maybe = data['allow_maybe']
Exemplo n.º 3
0
 def import_from(self, data):
     self.uuid = data['uuid']
     self.name = data['name']
     self.title = data['title']
     self.author = data.get('author')
     self.published_at = parse_isoformat(data['published_at'])
     self.properties = data['properties']
Exemplo n.º 4
0
 def import_from(self, data):
     self.uuid = data['uuid']
     self.name = data['name']
     self.title = data['title']
     self.author = data.get('author')
     self.published_at = parse_isoformat(data['published_at'])
     self.properties = data['properties']
Exemplo n.º 5
0
 def import_from(self, data):
     super(ContentMixin, self).import_from(data)
     last = self.last_revision()
     if last and last.updated_at >= parse_isoformat(data['revision_updated_at']):
         # Don't import if data is older than or the same as the last revision
         return
     revision = self.revise()
     revision.title = data['title']
     revision.description = data['description']
     revision.content = data['content']
     revision.template = data['template']
Exemplo n.º 6
0
 def import_from(self, data):
     super(ContentMixin, self).import_from(data)
     last = self.last_revision()
     if last and last.updated_at >= parse_isoformat(
             data['revision_updated_at']):
         # Don't import if data is older than or the same as the last revision
         return
     revision = self.revise()
     revision.title = data['title']
     revision.description = data['description']
     revision.content = data['content']
     revision.template = data['template']
Exemplo n.º 7
0
def folder_import(website, folder):
    g.website = website
    g.folder = folder
    import_count = 0
    create_count = 0
    form = ImportForm()
    internal_imports = []
    if form.validate_on_submit():
        data = json.loads(request.files['import_file'].getvalue(),
                          use_decimal=True)
        for inode in data['nodes']:
            mtime = parse_isoformat(
                inode.get('revision_updated_at', inode['updated_at']))
            node = Node.query.filter_by(folder=folder,
                                        uuid=inode['uuid']).first()
            if node is None:
                nreg = node_registry.get(inode['type'])
                if nreg is None:
                    flash(
                        "Could not import node of unknown type '%s'" %
                        inode['type'], "error")
                    continue
                node = nreg.model(folder=folder)
                user = User.query.filter_by(userid=inode['userid']).first()
                node.user = user or g.user
                db.session.add(node)
                create_count += 1
            else:
                if form.import_updated.data and mtime <= node.updated_at:
                    continue
            node.import_from(inode)
            internal_imports.append(inode)
            import_count += 1
        db.session.commit()
        # Second pass for internal data of nodes
        for inode in internal_imports:
            node = Node.query.filter_by(folder=folder,
                                        uuid=inode['uuid']).first()
            node.import_from_internal(inode)
        db.session.commit()
        flash(
            "%d nodes were imported and %d new nodes were created" %
            (import_count, create_count), "success")
        return render_redirect(url_for('folder',
                                       website=website.name,
                                       folder=folder.name),
                               code=303)
    return render_form(form=form,
                       title=u"Import to folder",
                       submit=u"Upload",
                       cancel_url=url_for('folder',
                                          website=website.name,
                                          folder=folder.name))
Exemplo n.º 8
0
def folder_import(website, folder):
    g.website = website
    g.folder = folder
    import_count = 0
    create_count = 0
    form = ImportForm()
    internal_imports = []
    if form.validate_on_submit():
        data = json.loads(request.files["import_file"].getvalue(), use_decimal=True)
        for inode in data["nodes"]:
            mtime = parse_isoformat(inode.get("revision_updated_at", inode["updated_at"]))
            node = Node.query.filter_by(folder=folder, uuid=inode["uuid"]).first()
            with db.session.no_autoflush:
                if node is None:
                    nreg = node_registry.get(inode["type"])
                    if nreg is None:
                        flash("Could not import node of unknown type '%s'" % inode["type"], "error")
                        continue
                    node = nreg.model(folder=folder)
                    user = User.query.filter_by(userid=inode["userid"]).first()
                    node.user = user or g.user
                    db.session.add(node)
                    create_count += 1
                else:
                    if form.import_updated.data and mtime <= node.updated_at:
                        continue
                node.import_from(inode)
            internal_imports.append(inode)
            import_count += 1
        db.session.commit()
        # Second pass for internal data of nodes
        for inode in internal_imports:
            node = Node.query.filter_by(folder=folder, uuid=inode["uuid"]).first()
            node.import_from_internal(inode)
        db.session.commit()
        flash("%d nodes were imported and %d new nodes were created" % (import_count, create_count), "success")
        return render_redirect(url_for("folder", website=website.name, folder=folder.name), code=303)
    return render_form(
        form=form,
        title=u"Import to folder",
        submit=u"Upload",
        cancel_url=url_for("folder", website=website.name, folder=folder.name),
    )
Exemplo n.º 9
0
 def _data(self):
     if not hasattr(self, '_data_cached'):
         # Get JSON and cache locally
         try:
             r = requests.get('http://funnel.hasgeek.com/%s/json' % self.funnel_name)
             data = r.json() if callable(r.json) else r.json
             sectionmap = dict([(s['title'], s['name']) for s in data['sections']])
             for proposal in data['proposals']:
                 proposal['submitted'] = parse_isoformat(proposal['submitted'])
                 proposal['section_name'] = sectionmap.get(proposal['section'])
                 v = proposal['votes']
                 proposal['votes'] = '+%d' % v if v > 0 else '%d' % v
             self._data_cached = data
         except ConnectionError:
             self._data_cached = {
                 'proposals': [],
                 'sections': [],
                 'space': {},
             }
     return self._data_cached
Exemplo n.º 10
0
 def _data(self):
     if not hasattr(self, '_data_cached'):
         # Get JSON and cache locally
         try:
             r = requests.get('http://funnel.hasgeek.com/%s/json' %
                              self.funnel_name)
             data = r.json() if callable(r.json) else r.json
             sectionmap = dict([(s['title'], s['name'])
                                for s in data['sections']])
             for proposal in data['proposals']:
                 proposal['submitted'] = parse_isoformat(
                     proposal['submitted'])
                 proposal['section_name'] = sectionmap.get(
                     proposal['section'])
                 v = proposal['votes']
                 proposal['votes'] = '+%d' % v if v > 0 else '%d' % v
             self._data_cached = data
         except ConnectionError:
             self._data_cached = {
                 'proposals': [],
                 'sections': [],
                 'space': {},
             }
     return self._data_cached
Exemplo n.º 11
0
    def _sync(self):
        if self.node.source != 'doattend':
            yield "Unsupported data source, aborting.\n"
            return
        if not self.node.sourceid or not self.node.api_key:
            yield "Source event id and API key are required.\n"
            return
        # All good, start pulling data...
        data_url = 'http://doattend.com/api/events/%s/participants_list.json?api_key=%s' % (
            self.node.sourceid, self.node.api_key)
        yield "Receiving data from DoAttend..."
        r = requests.get(data_url)
        data = r.json
        yield " OK\n"
        yield "Participant count: %d\n" % len(data['participants'])
        yield "Previously synced count: %d\n\n" % len(self.node.participants)

        by_ticket = {}
        local_tickets = set()
        upstream_tickets = set()
        unindexed = []
        for participant in self.node.participants:
            if participant.ticket is not None:
                by_ticket[participant.ticket] = participant
                local_tickets.add(participant.ticket)
            else:
                unindexed.append(participant)
        plist = data['participants']
        plist.reverse()  # DoAttend list is sorted by most-recent first
        for p in plist:
            upstream_tickets.add(p['Ticket_Number'])
            participant = by_ticket.get(p['Ticket_Number'])
            if participant is None:
                participant = Participant(participant_list=self.node)
                db.session.add(participant)
                participant.ticket = p['Ticket_Number'].strip()
                by_ticket[participant.ticket] = participant
                local_tickets.add(participant.ticket)
            syncinfo = {
                'datetime':
                parse_isoformat(p['Date']),
                'fullname':
                p['Name'].strip()
                if isinstance(p['Name'], basestring) else p['Name'],
                'email':
                p['Email'].strip()
                if isinstance(p['Email'], basestring) else p['Email'],
                'ticket_type':
                p['Ticket_Name'].strip() if isinstance(
                    p['Ticket_Name'], basestring) else p['Ticket_Name'],
            }
            pinfo = p.get('participant_information', [])
            if isinstance(pinfo, dict):
                pinfo = [pinfo]
            for keyval in pinfo:
                key = keyval['desc']
                value = keyval.get('info')
                if key == 'Job Title':
                    syncinfo['jobtitle'] = value.strip() if isinstance(
                        value, basestring) else value
                elif key == 'Company':
                    syncinfo['company'] = value.strip() if isinstance(
                        value, basestring) else value
                elif key == 'Twitter Handle':
                    syncinfo['twitter'] = value.strip() if isinstance(
                        value, basestring) else value
                elif key == 'City':
                    syncinfo['city'] = value.strip() if isinstance(
                        value, basestring) else value
                elif key == 'T-shirt size':
                    syncinfo['tshirt_size'] = value.split(
                        '-', 1)[0].strip() if isinstance(value,
                                                         basestring) else value
            edited = False
            for key, value in syncinfo.items():
                if getattr(participant, key) != value:
                    setattr(participant, key, value)
                    if 'key' == 'email':
                        participant.user = None
                    edited = True
            if edited:
                if participant.id is None:
                    yield "New participant (#%s): %s\n" % (
                        participant.ticket, participant.fullname)
                else:
                    yield "Edited participant (#%s): %s\n" % (
                        participant.ticket, participant.fullname)
        # Check for deleted participants
        removed_tickets = local_tickets - upstream_tickets
        for ticket in removed_tickets:
            participant = by_ticket.get(ticket)
            if participant:
                yield "Removed participant (#%s): %s\n" % (
                    ticket, participant.fullname)
                db.session.delete(participant)
        db.session.commit()
        yield '\nAll done.'
Exemplo n.º 12
0
    def _sync(self):
        if self.node.source != 'doattend':
            yield "Unsupported data source, aborting.\n"
            return
        if not self.node.sourceid or not self.node.api_key:
            yield "Source event id and API key are required.\n"
            return
        # All good, start pulling data...
        data_url = 'http://doattend.com/api/events/%s/participants_list.json?api_key=%s' % (
            self.node.sourceid, self.node.api_key)
        yield "Receiving data from DoAttend..."
        r = requests.get(data_url)
        data = r.json() if callable(r.json) else r.json
        yield " OK\n"
        yield "Participant count: %d\n" % len(data['participants'])
        yield "Previously synced count: %d\n\n" % len(self.node.participants)

        by_ticket = {}
        local_tickets = set()
        upstream_tickets = set()
        unindexed = []
        for participant in self.node.participants:
            if participant.ticket is not None:
                by_ticket[participant.ticket] = participant
                local_tickets.add(participant.ticket)
            else:
                unindexed.append(participant)
        plist = data['participants']
        plist.reverse()  # DoAttend list is sorted by most-recent first
        for p in plist:
            upstream_tickets.add(p['Ticket_Number'])
            participant = by_ticket.get(p['Ticket_Number'])
            if participant is None:
                participant = Participant(participant_list=self.node)
                db.session.add(participant)
                participant.ticket = p['Ticket_Number'].strip()
                by_ticket[participant.ticket] = participant
                local_tickets.add(participant.ticket)
            syncinfo = {
                'datetime': parse_isoformat(p['Date']),
                'fullname': p['Name'].strip() if isinstance(p['Name'], basestring) else p['Name'],
                'email': p['Email'].strip() if isinstance(p['Email'], basestring) else p['Email'],
                'ticket_type': p['Ticket_Name'].strip() if isinstance(p['Ticket_Name'], basestring) else p['Ticket_Name'],
            }
            pinfo = p.get('participant_information', [])
            if isinstance(pinfo, dict):
                pinfo = [pinfo]
            for keyval in pinfo:
                key = keyval['desc']
                value = keyval.get('info')
                if key == 'Job Title':
                    syncinfo['jobtitle'] = value.strip() if isinstance(value, basestring) else value
                elif key == 'Company':
                    syncinfo['company'] = value.strip() if isinstance(value, basestring) else value
                elif key == 'Twitter Handle':
                    syncinfo['twitter'] = value.strip() if isinstance(value, basestring) else value
                elif key == 'City':
                    syncinfo['city'] = value.strip() if isinstance(value, basestring) else value
                elif key == 'T-shirt size':
                    syncinfo['tshirt_size'] = value.split('-', 1)[0].strip() if isinstance(value, basestring) else value
            edited = False
            for key, value in syncinfo.items():
                if getattr(participant, key) != value:
                    setattr(participant, key, value)
                    if 'key' == 'email':
                        participant.user = None
                    edited = True
            if edited:
                if participant.id is None:
                    yield "New participant (#%s): %s\n" % (participant.ticket, participant.fullname)
                else:
                    yield "Edited participant (#%s): %s\n" % (participant.ticket, participant.fullname)
        # Check for deleted participants
        removed_tickets = local_tickets - upstream_tickets
        for ticket in removed_tickets:
            participant = by_ticket.get(ticket)
            if participant:
                yield "Removed participant (#%s): %s\n" % (ticket, participant.fullname)
                db.session.delete(participant)
        db.session.commit()
        yield '\nAll done.'