Exemplo n.º 1
0
def convert_cmdline():
    logging.basicConfig()
    parser = argparse.ArgumentParser(
        description='Convert an Open511 document to another format.')
    parser.add_argument('-f',
                        '--format',
                        type=str,
                        help='Target format: ' +
                        ', '.join(f.name for f in FORMATS_LIST))
    parser.add_argument(
        'source',
        metavar='DOC',
        type=str,
        help='Document to validate: path, URL, or - to read from stdin')
    arguments = parser.parse_args()
    obj, obj_type = load_path(arguments.source)
    if arguments.format:
        output_format = arguments.format
    else:
        output_format = 'xml' if obj_type == 'json' else 'json'
    result = open511_convert(obj, output_format, serialize=True)
    stdout = sys.stdout
    if hasattr(stdout, 'detach'):
        stdout = stdout.detach()
    stdout.write(result)
    sys.stdout.write("\n")
Exemplo n.º 2
0
def convert():
    doc_content = _load_document()
    if not isinstance(doc_content, unicode):
        doc_content = doc_content.decode('utf8')
    doc, doc_format = deserialize(doc_content)

    format = request.values['format']
    result = open511_convert(doc, format)
    format_info = FORMATS[format]
    return Response(result, mimetype=format_info.content_type)
Exemplo n.º 3
0
def convert():
    doc_content = _load_document()
    if not isinstance(doc_content, unicode):
        doc_content = doc_content.decode('utf8')
    doc, doc_format = deserialize(doc_content)

    format = request.values['format']
    result = open511_convert(doc, format)
    format_info = FORMATS[format]
    return Response(result, mimetype=format_info.content_type)
Exemplo n.º 4
0
def convert_cmdline():
    logging.basicConfig()
    parser = argparse.ArgumentParser(description='Convert an Open511 document to another format.')
    parser.add_argument('-f', '--format', type=str,
        help='Target format: ' + ', '.join(f.name for f in FORMATS_LIST))
    parser.add_argument('source', metavar='DOC', type=str,
        help='Document to validate: path, URL, or - to read from stdin')
    arguments = parser.parse_args()
    obj, obj_type = load_path(arguments.source)
    if arguments.format:
        output_format = arguments.format
    else:
        output_format = 'xml' if obj_type == 'json' else 'json'
    result = open511_convert(obj, output_format, serialize=True)
    stdout = sys.stdout
    if hasattr(stdout, 'detach'):
        stdout = stdout.detach()
    stdout.write(result)
    sys.stdout.write("\n")
Exemplo n.º 5
0
def sync_masas(doc, secret, author_url, feed_url, timezone=None):
    # Get the timezone
    if not timezone:
        jdoc = ensure_format(doc, 'json')
        jurisdiction_urls = set(e['jurisdiction_url'] for e in jdoc['events'])
        if len(jurisdiction_urls) == 0:
            logger.warning("No events provided")
            timezone = 'UTC'
        elif len(jurisdiction_urls) > 1:
            raise Exception("Not all events are from the same jurisdiction")
        else:
            jurisdiction_url = list(jurisdiction_urls)[0]
            timezone = get_jurisdiction_settings(jurisdiction_url)['timezone']

    # Convert the source doc to atom
    doc = open511_convert(doc, "atom", serialize=False, include_expires=True, default_timezone_name=timezone)
    _add_fingerprints(doc)
    entries = _feed_to_dict(doc)

    # Get the existing items on the hub
    auth_header = {'Authorization': "MASAS-Secret %s" % secret}
    push_headers = dict(auth_header)
    push_headers['Content-Type'] = 'application/atom+xml'
    resp = requests.get(feed_url, params={'author': author_url}, headers=auth_header)
    resp.raise_for_status()
    existing_entries = _feed_to_dict(etree.fromstring(resp.content))

    # Figure out what we need to do
    tasks = []
    for entry_id, entry in entries.iteritems():
        task = {'entry': entry, 'entry_id': entry_id}
        if entry_id in existing_entries:
            existing = existing_entries[entry_id]
            if _get_fingerprint(entry) == _get_fingerprint(existing):
                logger.info("%s is up-to-date" % entry_id)
                # Already up-to-date
                continue
            else:
                # Already on MASAS, but we have a different version
                task.update(
                    action='UPDATE',
                    url=_get_url(existing)
                )
        else:
            task['action'] = 'CREATE'
        tasks.append(task)
    for entry_id, entry in existing_entries.iteritems():
        if entry_id not in entries:
            task = {'entry': entry, 'entry_id': entry_id}
            logger.info("%s is on MASAS, but not in provided file" % entry_id)
            task.update({
                'action': 'DELETE',
                'url': _get_url(entry)
            })
            tasks.append(task)

    def _resp_error(resp):
        if str(resp.status_code)[0] == '2':
            return resp
        sys.stderr.write(resp.content)
        logger.error("Error %s on %s" % (resp.status_code, resp.url))

    # Perform queued tasks
    for task in tasks:
        if task['action'] == 'CREATE':
            logger.info("Creating %s" % task['entry_id'])
            _resp_error(requests.post(feed_url, headers=push_headers, data=etree.tostring(task['entry'], pretty_print=True)
                ))
        elif task['action'] == 'UPDATE':
            logger.info("Updating %s" % task['entry_id'])
            _resp_error(requests.put(task['url'], headers=push_headers, data=etree.tostring(task['entry'], pretty_print=True)
                ))
        elif task['action'] == 'DELETE':
            logger.info("Expiring %s" % task['entry_id'])
            entry = task['entry']
            _change_expires(entry)
            _resp_error(requests.put(task['url'], headers=push_headers, data=etree.tostring(entry, pretty_print=True)))