Beispiel #1
0
def main(workspace):
    DATA_DIR = ffs.Path(workspace)
    datasets = json.load(open(DATA_DIR / 'ccgois_indicators.json'))
    dc.ensure_publisher('hscic')
    dc.ensure_group('ccgois')
    load_ccgois(datasets)
    group_ccgois(datasets)
def ensure_unify_datasets_exist():
    """
    Read the unify datasets to create from the CSV file.

    1. Check if they exist.
    2. If they don't, create 'em.
    3. There is no step 3.
    4. Profit
    """
    dc.ensure_publisher('unify')
    unifyfile = DATA_DIR/'datasets.csv'
    with unifyfile.csv(header=True) as csv:
        for row in csv:
            if row.source == 'UNIFY2':
                dc.Dataset.create_or_update(
                    name=slugify(row.title).lower(),
                    title=row.title,
                    state='active',
                    private=row.public=='N', 
                    license_id='ogl',
                    url='http://data.england.nhs.uk',
                    owner_org='unify',
                    resources=[]
                )
                print slugify(row.title).lower()
    return
Beispiel #3
0
def main(workspace):
    DATA_DIR = ffs.Path(workspace)
    datasets = json.load(open(DATA_DIR / 'ccgois_indicators.json'))
    dc.ensure_publisher('hscic')
    dc.ensure_group('ccgois')
    load_ccgois(datasets)
    group_ccgois(datasets)
Beispiel #4
0
def main(workspace):
    global DATA_DIR
    DATA_DIR = ffs.Path(workspace) / 'data'
    dc.ensure_publisher('phe')
    dc.ensure_group('phof')
    load_phe()
    #group_phe()
    return 0
def main(workspace):
    DATA_DIR = ffs.Path(workspace)

    dc.ensure_publisher('hscic')

    datasets = json.load(open(os.path.join(DATA_DIR, "metadata.json"), "r"))
    for dataset in datasets:
        load_dataset(dataset, DATA_DIR)
Beispiel #6
0
def main(workspace):
    global DATA_DIR
    DATA_DIR = ffs.Path(workspace) / 'data'
    dc.ensure_publisher('hscic')
    dc.ensure_group('qof')
    load_qof()
    group_qof()
    return 0
Beispiel #7
0
def main(workspace):
    global DATA_DIR
    DATA_DIR = ffs.Path(workspace) / 'data'
    dc.ensure_publisher('phe')
    dc.ensure_group('phof')
    load_phe()
    #group_phe()
    return 0
def load(workspace):
    global DATA_DIR
    DATA_DIR = ffs.Path(workspace) / "data"
    DATA_DIR.mkdir()

    dc.ensure_publisher('hscic')
    publish_indicators(0)
    publish_datasets(0)
    return 0
Beispiel #9
0
def main(workspace):
    DATA_DIR = ffs.Path(workspace) / 'data'
    DATA_DIR.mkdir()

    dc.ensure_publisher('nhs-england')
    dc.ensure_group('surveys')

    datasets = json.load(open(os.path.join(DATA_DIR, "metadata.json"), "r"))
    for dataset in datasets:
        if load_dataset(dataset, DATA_DIR):
            groups(dataset)
Beispiel #10
0
def main(workspace):
    DATA_DIR = ffs.Path(workspace) / 'data'
    DATA_DIR.mkdir()

    dc.ensure_publisher('nhs-england')
    dc.ensure_group('statistics')

    datasets = json.load(open(os.path.join(DATA_DIR, "metadata.json"), "r"))
    for dataset in datasets:
        if load_statistic(dataset, DATA_DIR):
            groups(dataset)
Beispiel #11
0
def main(workspace):
    global DATA_DIR
    DATA_DIR = ffs.Path(workspace) / 'data'
    DATA_DIR.mkdir()

    datasets = json.load(get_resource_file(DATA_DIR / 'nhsof_metadata_indicators.json'))
    print "Ensuring publisher"
    dc.ensure_publisher('hscic')
    print "Ensuring group"
    dc.ensure_group('nhsof')
    wrote = load_nhsof(datasets)
    if wrote:
        group_nhsof(datasets)
    else:
        print "Created/processed no datasets ..."
Beispiel #12
0
def main(workspace):
    DATA_DIR = ffs.Path(workspace) / 'data'
    DATA_DIR.mkdir()

    dc.ensure_publisher('gp-survey')
    dc.ensure_group('surveys')

    def year_as_key(x):
        return x['title'][-4:]

    datasets = json.load(open(os.path.join(DATA_DIR, "metadata.json"), "r"))
    datasets = sorted(datasets, key=year_as_key)
    for dataset in datasets:
        load_statistic(dataset, DATA_DIR)
        groups(dataset)
Beispiel #13
0
def main(workspace):
    global DATA_DIR
    DATA_DIR = ffs.Path(workspace) / 'data'
    DATA_DIR.mkdir()

    datasets = json.load(
        get_resource_file(DATA_DIR / 'nhsof_metadata_indicators.json'))
    print "Ensuring publisher"
    dc.ensure_publisher('hscic')
    print "Ensuring group"
    dc.ensure_group('nhsof')
    wrote = load_nhsof(datasets)
    if wrote:
        group_nhsof(datasets)
    else:
        print "Created/processed no datasets ..."
def ensure_unify_datasets_exist():
    """
    Read the unify datasets to create from the CSV file.

    1. Check if they exist.
    2. If they don't, create 'em.
    3. There is no step 3.
    4. Profit
    """
    dc.ensure_publisher('unify')
    unifyfile = DATA_DIR / 'datasets.csv'
    with unifyfile.csv(header=True) as csv:
        for row in csv:
            if row.source == 'UNIFY2':
                dc.Dataset.create_or_update(name=slugify(row.title).lower(),
                                            title=row.title,
                                            state='active',
                                            private=row.public == 'N',
                                            license_id='ogl',
                                            url='http://data.england.nhs.uk',
                                            owner_org='unify',
                                            resources=[])
                print slugify(row.title).lower()
    return
Beispiel #15
0
def main():
    dc.ensure_publisher('hscic-ods')
    publish_ods()
    return 0
Beispiel #16
0
def main():
    dc.ensure_publisher('hscic')
    dc.ensure_group('indicators', 'hscic')
    publish_indicators()
    publish_datasets()
    return 0
def main():
    dc.ensure_publisher('hscic')
    publish_indicators(266)
    publish_datasets()
    return 0
def main():
    dc.ensure_publisher('hscic')
    dc.ensure_group('indicators', 'hscic')
    publish_indicators()
    publish_datasets()
    return 0
Beispiel #19
0
def main():
    dc.ensure_publisher('choose-and-book')
    download_choose_and_book()
    publish_choose_and_book()
    return 0
Beispiel #20
0
def main(workspace):
    global DATA_DIR
    DATA_DIR = ffs.Path(workspace) / 'data'
    dc.ensure_publisher('hscic')
    load_pp()
    return 0
Beispiel #21
0
def main(workspace):
    global DATA_DIR
    DATA_DIR = ffs.Path(workspace) / 'data'
    dc.ensure_publisher('hscic')
    load_pp()
    return 0
def main():
    dc.ensure_publisher('hscic-ods')
    publish_ods()
    return 0
Beispiel #23
0
def main():
    dc.ensure_publisher('hscic')
    publish_indicators(266)
    publish_datasets()
    return 0