Exemple #1
0
def update(archive_dir, dataset=None):
    """
    Download all sources into an archive directory. If dataset parameter
    is provided only sources for that dataset will be fetched (otherwise
    all source in the database will be fetched)
    """

    # Create archive directory if it doesn't exist
    if not os.path.isdir(archive_dir):
        os.makedirs(archive_dir)

    # If a dataset is provided we limit to only its sources (else we take all)
    sources = Source.all() if dataset is None else dataset.sources

    # Update each source
    for source in sources:
        update_source(archive_dir, source)
Exemple #2
0
def update(archive_dir, dataset=None):
    """
    Download all sources into an archive directory. If dataset parameter
    is provided only sources for that dataset will be fetched (otherwise
    all source in the database will be fetched)
    """

    # Create archive directory if it doesn't exist
    if not os.path.isdir(archive_dir):
        os.makedirs(archive_dir)

    # If a dataset is provided we limit to only its sources (else we take all)
    sources = Source.all() if dataset is None else dataset.sources

    # Update each source
    for source in sources:
        update_source(archive_dir, source)
Exemple #3
0
    def output_json(**args):
        """ Output JSON data  """
        outputfile = args.get('outputfile', None)

        if len(outputfile) != 1:
            print "You need to specific one and only one output file"
            return

        outputfile = outputfile[0]

        #need to load in this order for relations
            #metadataorg
            #dataorg
            #source
            #sourcefile
                #wrap up files
            #dataset

        outputobj = []

        for metadataorg in MetadataOrg.all().all():
            outputobj.append(metadataorg.to_json_dump())

        for dataorg in DataOrg.all().all():
            outputobj.append(dataorg.to_json_dump())

        for source in Source.all().all():
            outputobj.append(source.to_json_dump())

        for sourcefile in SourceFile.all().all():

            outputobj.append(sourcefile.to_json_dump())

        for dataset in Dataset.all().all():
            outputobj.append(dataset.to_json_dump())



        with open(outputfile, 'wb') as f:
            json.dump(outputobj, f)

        print "success"
        print "written to ", outputfile
Exemple #4
0
    def output_json(**args):
        """ Output JSON data  """
        outputfile = args.get('outputfile', None)

        if len(outputfile) != 1:
            print "You need to specific one and only one output file"
            return

        outputfile = outputfile[0]

        #need to load in this order for relations
        #metadataorg
        #dataorg
        #source
        #sourcefile
        #wrap up files
        #dataset

        outputobj = []

        for metadataorg in MetadataOrg.all().all():
            outputobj.append(metadataorg.to_json_dump())

        for dataorg in DataOrg.all().all():
            outputobj.append(dataorg.to_json_dump())

        for source in Source.all().all():
            outputobj.append(source.to_json_dump())

        for sourcefile in SourceFile.all().all():

            outputobj.append(sourcefile.to_json_dump())

        for dataset in Dataset.all().all():
            outputobj.append(dataset.to_json_dump())

        with open(outputfile, 'wb') as f:
            json.dump(outputobj, f)

        print "success"
        print "written to ", outputfile
Exemple #5
0
def update(archive_dir):
    if not os.path.isdir(archive_dir):
        os.makedirs(archive_dir)
    for source in Source.all():
        update_source(archive_dir, source)
Exemple #6
0
def update(archive_dir):
    if not os.path.isdir(archive_dir):
        os.makedirs(archive_dir)
    for source in Source.all():
        update_source(archive_dir, source)