Exemple #1
0
def main():  # pragma: no cover
    parser = argparse.ArgumentParser(description='pyhocon tool')
    parser.add_argument('-i', '--input', help='input file')
    parser.add_argument('-o', '--output', help='output file')
    parser.add_argument('-c', '--compact', action='store_true', default=False, help='compact format')
    parser.add_argument('-f', '--format', help='output format: json, properties, yaml or hocon', default='json')
    parser.add_argument('-n', '--indent', help='indentation step (default is 2)', default=2, type=int)
    parser.add_argument('-v', '--verbosity', action='count', default=0, help='increase output verbosity')
    args = parser.parse_args()

    # Python 2.6 support
    def null_handler():
        return logging.NullHandler() if hasattr(logging, 'NullHandler') else logging.FileHandler('/dev/null')

    logger = logging.getLogger()
    log_handler = logging.StreamHandler() if args.verbosity > 0 else null_handler()
    log_handler.setFormatter(logging.Formatter(LOG_FORMAT))
    logger.addHandler(log_handler)
    if args.verbosity == 1:
        logger.setLevel(logging.ERROR)
    elif args.verbosity == 2:
        logger.setLevel(logging.INFO)
    elif args.verbosity >= 3:
        logger.setLevel(logging.DEBUG)
    HOCONConverter.convert_from_file(args.input, args.output, args.format.lower(), args.indent, args.compact)
Exemple #2
0
 def _test_convert_from_file(self, input, expected_output, format):
     with tempfile.NamedTemporaryFile('w') as fdin:
         fdin.write(input)
         fdin.flush()
         with tempfile.NamedTemporaryFile('r') as fdout:
             HOCONConverter.convert_from_file(fdin.name, fdout.name, format)
             with open(fdout.name) as fdi:
                 converted = fdi.read()
                 assert [line.strip() for line in expected_output.split('\n') if line.strip()]\
                     == [line.strip() for line in converted.split('\n') if line.strip()]
Exemple #3
0
def main():  # pragma: no cover
    parser = argparse.ArgumentParser(description='pyhocon tool')
    parser.add_argument('-i', '--input', help='input file')
    parser.add_argument('-o', '--output', help='output file')
    parser.add_argument('-c',
                        '--compact',
                        action='store_true',
                        default=False,
                        help='compact format')
    parser.add_argument('-f',
                        '--format',
                        help='output format: json, properties, yaml or hocon',
                        default='json')
    parser.add_argument('-n',
                        '--indent',
                        help='indentation step (default is 2)',
                        default=2,
                        type=int)
    parser.add_argument('-v',
                        '--verbosity',
                        action='count',
                        default=0,
                        help='increase output verbosity')
    args = parser.parse_args()

    # Python 2.6 support
    def null_handler():
        return logging.NullHandler() if hasattr(
            logging, 'NullHandler') else logging.FileHandler('/dev/null')

    logger = logging.getLogger()
    log_handler = logging.StreamHandler(
    ) if args.verbosity > 0 else null_handler()
    log_handler.setFormatter(logging.Formatter(LOG_FORMAT))
    logger.addHandler(log_handler)
    if args.verbosity == 1:
        logger.setLevel(logging.ERROR)
    elif args.verbosity == 2:
        logger.setLevel(logging.INFO)
    elif args.verbosity >= 3:
        logger.setLevel(logging.DEBUG)
    HOCONConverter.convert_from_file(args.input, args.output,
                                     args.format.lower(), args.indent,
                                     args.compact)
import requests, json, os
from elasticsearch import Elasticsearch
from pyhocon.converter import HOCONConverter

# Imports some default data to a local Elasticsearch index 'terms' available at port 9200
# It first converts initial-data.conf to json, and then takes each item in the json list and loads it to the db
index_name = "terms"

template_file = 'initial-template.json'

input_file = 'initial-data.conf'
output_file = 'initial-data.json'

HOCONConverter.convert_from_file(input_file=input_file, output_file=output_file, output_format='json', indent=2, compact=False)

es = Elasticsearch([{'host': 'localhost', 'port': '9200'}])

with open(output_file) as json_data:
    documents = json.load(json_data)[index_name]
    print(f"Loading {len(documents)} documents into Elasticsearch in index {index_name} using template {template_file}")
    with open(template_file) as template:
        es.indices.put_template(name="terms_template", body=template.read())
    for document in documents:
        es.index(index=index_name, body=document)