Exemplo n.º 1
0
    def Test_get_products(self):

        product = querydb.get_products(activated=True)
        logger.info("Active products: %s", product)
        for row in product:
            print row

        product = querydb.get_products(activated=False)
        logger.info("Non active products: %s", product)
        for row in product:
            print row

        product = querydb.get_products(masked=False)
        logger.info("Not masked products: %s", product)
        for row in product:
            print row

        self.assertEqual(1, 1)
Exemplo n.º 2
0
    def Test_get_products(self):

        product = querydb.get_products(activated=True)
        logger.info("Active products: %s", product)
        for row in product:
            print row

        product = querydb.get_products(activated=False)
        logger.info("Non active products: %s", product)
        for row in product:
            print row

        product = querydb.get_products(masked=False)
        logger.info("Not masked products: %s", product)
        for row in product:
            print row

        self.assertEqual(1, 1)
Exemplo n.º 3
0
    def test_all_products_to_json(self):
        def row2dict(row):
            d = {}
            for column_name in list(row.c.keys()):  # all_cols:
                d[column_name] = str(getattr(row, column_name))
            return d

        # get full distinct list of products (native only)
        db_products = querydb.get_products()
        try:
            db_products.__len__()
        except AttributeError:
            db_products = querydb.get_product_native(allrecs=True)
        self.assertTrue(db_products.__len__() > 0)
        products_dict_all = []
        # loop the products list
        for product in db_products:
            if python_version == 2:
                prod_dict = row2dict(product)
            if python_version == 3:
                prod_dict = product
            productcode = prod_dict['productcode']
            version = prod_dict['version']
            p = Product(product_code=productcode, version=version)

            # does the product have mapsets AND subproducts?
            all_prod_mapsets = p.mapsets
            all_prod_subproducts = p.subproducts
            if all_prod_mapsets.__len__() > 0 and all_prod_subproducts.__len__() > 0:
                prod_dict['productmapsets'] = []
                for mapset in all_prod_mapsets:
                    mapset_info = querydb.get_mapset(mapsetcode=mapset, allrecs=False)
                    mapset_dict = row2dict(mapset_info)
                    mapset_dict['mapsetdatasets'] = []
                    all_mapset_datasets = p.get_subproducts(mapset=mapset)
                    for subproductcode in all_mapset_datasets:
                        dataset_info = querydb.get_subproduct(productcode=productcode,
                                                              version=version,
                                                              subproductcode=subproductcode)

                        dataset_dict = row2dict(dataset_info)
                        dataset = p.get_dataset(mapset=mapset, sub_product_code=subproductcode)
                        completeness = dataset.get_dataset_normalized_info()
                        dataset_dict['datasetcompleteness'] = completeness

                        mapset_dict['mapsetdatasets'].append(dataset_dict)
                    prod_dict['productmapsets'].append(mapset_dict)
            products_dict_all.append(prod_dict)

        # See ES2-596
        self.assertEqual(len(db_products), len(products_dict_all))
Exemplo n.º 4
0
    def test_all_products_to_json(self):
        def row2dict(row):
            d = {}
            for column_name in row.c.keys():  # all_cols:
                d[column_name] = str(getattr(row, column_name))
            return d

        # get full distinct list of products (native only)
        db_products = querydb.get_products(echo=False)
        try:
            db_products.__len__()
        except AttributeError:
            db_products = querydb.get_product_native(allrecs=True, echo=False)
        self.assertTrue(db_products.__len__() > 0)
        products_dict_all = []
        # loop the products list
        for product in db_products:
            prod_dict = row2dict(product)
            productcode = prod_dict['productcode']
            version = prod_dict['version']
            p = Product(product_code=productcode, version=version)

            # does the product have mapsets AND subproducts?
            all_prod_mapsets = p.mapsets
            all_prod_subproducts = p.subproducts
            if all_prod_mapsets.__len__() > 0 and all_prod_subproducts.__len__() > 0:
                prod_dict['productmapsets'] = []
                for mapset in all_prod_mapsets:
                    mapset_info = querydb.get_mapset(mapsetcode=mapset, allrecs=False, echo=False)
                    mapset_dict = row2dict(mapset_info)
                    mapset_dict['mapsetdatasets'] = []
                    all_mapset_datasets = p.get_subproducts(mapset=mapset)
                    for subproductcode in all_mapset_datasets:
                        dataset_info = querydb.get_subproduct(productcode=productcode,
                                                              version=version,
                                                              subproductcode=subproductcode,
                                                              echo=False)

                        dataset_dict = row2dict(dataset_info)
                        dataset = p.get_dataset(mapset=mapset, sub_product_code=subproductcode)
                        completeness = dataset.get_dataset_normalized_info()
                        dataset_dict['datasetcompleteness'] = completeness

                        mapset_dict['mapsetdatasets'].append(dataset_dict)
                    prod_dict['productmapsets'].append(mapset_dict)
            products_dict_all.append(prod_dict)
        self.assertEquals(len(db_products), 31)
Exemplo n.º 5
0
    def test_data_management(self):
        import json

        db_products = querydb.get_products(activated=True)

        if db_products.__len__() > 0:
            products_dict_all = []
            # loop the products list
            for row in db_products:
                prod_dict = functions.row2dict(row)
                productcode = prod_dict['productcode']
                version = prod_dict['version']

                p = Product(product_code=productcode, version=version)
                # print productcode
                # does the product have mapsets AND subproducts?
                all_prod_mapsets = p.mapsets
                all_prod_subproducts = p.subproducts
                if all_prod_mapsets.__len__(
                ) > 0 and all_prod_subproducts.__len__() > 0:
                    prod_dict['productmapsets'] = []
                    for mapset in all_prod_mapsets:
                        mapset_dict = []
                        # print mapset
                        mapset_info = querydb.get_mapset(mapsetcode=mapset,
                                                         allrecs=False)
                        # if mapset_info.__len__() > 0:
                        mapset_dict = functions.row2dict(mapset_info)
                        # else:
                        #   mapset_dict['mapsetcode'] = mapset
                        mapset_dict['mapsetdatasets'] = []
                        all_mapset_datasets = p.get_subproducts(mapset=mapset)
                        for subproductcode in all_mapset_datasets:
                            # print 'productcode: ' + productcode
                            # print 'version: ' + version
                            # print 'subproductcode: ' + subproductcode
                            dataset_info = querydb.get_subproduct(
                                productcode=productcode,
                                version=version,
                                subproductcode=subproductcode)
                            # print dataset_info
                            # dataset_info = querydb.db.product.get(productcode, version, subproductcode)
                            # dataset_dict = {}
                            if dataset_info is not None:
                                dataset_dict = functions.row2dict(dataset_info)
                                # dataset_dict = dataset_info.__dict__
                                # del dataset_dict['_labels']
                                if hasattr(dataset_info, 'frequency_id'):
                                    if dataset_info.frequency_id == 'e15minute' or dataset_info.frequency_id == 'e30minute':
                                        dataset_dict[
                                            'nodisplay'] = 'no_minutes_display'
                                    # To be implemented in dataset.py
                                    elif dataset_info.frequency_id == 'e1year':
                                        dataset_dict[
                                            'nodisplay'] = 'no_minutes_display'
                                    else:
                                        dataset = p.get_dataset(
                                            mapset=mapset,
                                            sub_product_code=subproductcode)
                                        completeness = dataset.get_dataset_normalized_info(
                                        )
                                        dataset_dict[
                                            'datasetcompleteness'] = completeness
                                        dataset_dict['nodisplay'] = 'false'

                                    dataset_dict['mapsetcode'] = mapset_dict[
                                        'mapsetcode']
                                    dataset_dict[
                                        'mapset_descriptive_name'] = mapset_dict[
                                            'descriptive_name']

                                    mapset_dict['mapsetdatasets'].append(
                                        dataset_dict)
                                else:
                                    pass
                        prod_dict['productmapsets'].append(mapset_dict)
                products_dict_all.append(prod_dict)

            prod_json = json.dumps(products_dict_all,
                                   ensure_ascii=False,
                                   sort_keys=True,
                                   indent=4,
                                   separators=(', ', ': '))

            datamanagement_json = '{"success":"true", "total":'\
                                  + str(db_products.__len__())\
                                  + ',"products":'+prod_json+'}'

        else:
            datamanagement_json = '{"success":false, "error":"No data sets defined!"}'
Exemplo n.º 6
0
def push_data_ftp(dry_run=False,
                  user=None,
                  psw=None,
                  url=None,
                  trg_dir=None,
                  masked=True):

    #   Synchronized data towards an ftp server (only for JRC)
    #   It replaces, since the new srv-ies-ftp.jrc.it ftp is set, the bash script: mirror_to_ftp.sh
    #   Configuration:  it looks at all 'non-masked' products and pushes them
    #                   For the mapsets, find what is in the filesystem, and pushes only the 'largest'
    #   It uses a command like:
    #       lftp -e "mirror -RLe /data/processing/vgt-ndvi/sv2-pv2.1/SPOTV-Africa-1km/derived/10dmax-linearx2/
    #                            /narma/eStation_2.0/processing/vgt-ndvi/sv2-pv2.1/SPOTV-Africa-1km/derived/10dmax-linearx2/;exit"
    #                            -u narma:JRCVRw2960 sftp://srv-ies-ftp.jrc.it"" >> /eStation2/log/push_data_ftp.log
    #

    spec_logger = log.my_logger('apps.es2system.push_data_ftp')

    try:
        from config import server_ftp
    except:
        logger.warning('Configuration file for ftp sync not found. Exit')
        return 1

    if user is None:
        user = server_ftp.server['user']
    if psw is None:
        psw = server_ftp.server['psw']
    if url is None:
        url = server_ftp.server['url']
    if trg_dir is None:
        trg_dir = server_ftp.server['data_dir']

    # Create an ad-hoc file for the lftp command output (beside the standard logger)
    logfile = es_constants.es2globals['log_dir'] + 'push_data_ftp.log'
    message = time.strftime(
        "%Y-%m-%d %H:%M") + ' INFO: Running the ftp sync now ... \n'

    logger.debug("Entering routine %s" % 'push_data_ftp')

    # Loop over 'not-masked' products
    products = querydb.get_products(masked=False)
    # produts=products[21:23]               # test a subset
    for row in products:

        prod_dict = functions.row2dict(row)
        productcode = prod_dict['productcode']
        version = prod_dict['version']
        spec_logger.info('Working on product {}/{}'.format(
            productcode, version))

        # TEMP - For testing only
        # if productcode!='vgt-ndvi' or version !='sv2-pv2.2':
        #     continue

        # Check it if is in the list of 'exclusions' defined in ./config/server_ftp.py
        key = '{}/{}'.format(productcode, version)
        skip = False
        if key in server_ftp.exclusions:
            skip = True
            logger.debug('Do not sync for {}/{}'.format(productcode, version))

        p = Product(product_code=productcode, version=version)

        all_prod_mapsets = p.mapsets
        all_prod_subproducts = p.subproducts

        # Check there is at least one mapset and one subproduct
        if all_prod_mapsets.__len__() > 0 and all_prod_subproducts.__len__(
        ) > 0 and not skip:

            # In case of several mapsets, check if there is a 'larger' one
            if len(all_prod_mapsets) > 1:
                mapset_to_use = []
                for my_mapset in all_prod_mapsets:
                    mapset_info = querydb.get_mapset(mapsetcode=my_mapset,
                                                     allrecs=False)
                    if hasattr(mapset_info, "mapsetcode"):
                        my_mapobj = MapSet()
                        my_mapobj.assigndb(my_mapset)

                        larger_mapset = my_mapobj.get_larger_mapset()
                        if larger_mapset is not None:
                            if larger_mapset not in mapset_to_use:
                                mapset_to_use.append(larger_mapset)
                        else:
                            if my_mapset not in mapset_to_use:
                                mapset_to_use.append(my_mapset)
            else:
                mapset_to_use = all_prod_mapsets
            # Loop over existing mapset
            for mapset in mapset_to_use:
                all_mapset_datasets = p.get_subproducts(mapset=mapset)

                # Loop over existing subproducts
                for subproductcode in all_mapset_datasets:
                    # Get info - and ONLY for NOT masked products
                    dataset_info = querydb.get_subproduct(
                        productcode=productcode,
                        version=version,
                        subproductcode=subproductcode,
                        masked=masked)  # -> TRUE means only NOT masked sprods

                    if dataset_info is not None:
                        dataset_dict = functions.row2dict(dataset_info)
                        dataset_dict['mapsetcode'] = mapset

                        logger.debug('Working on {}/{}/{}/{}'.format(
                            productcode, version, mapset, subproductcode))

                        subdir = functions.set_path_sub_directory(
                            productcode, subproductcode,
                            dataset_dict['product_type'], version, mapset)
                        source = data_dir + subdir
                        target = trg_dir + subdir

                        # command = 'lftp -e "mirror -RLe {} {};exit" -u {}:{} {}"" >> {}'.format(source,target,user,psw,url,logfile)
                        command = 'lftp -e "mirror -RLe {} {};exit" -u {}:{} {}"" >> /dev/null'.format(
                            source, target, user, psw, url)
                        logger.debug("Executing %s" % command)
                        spec_logger.info(
                            'Working on mapset/subproduct {}/{} \n'.format(
                                mapset, subproductcode))

                        # return
                        try:
                            status = os.system(command)
                            if status:
                                logger.error("Error in executing %s" % command)
                                spec_logger.error("Error in executing %s" %
                                                  command)
                        except:
                            logger.error(
                                'Error in executing command: {}'.format(
                                    command))
                            spec_logger.error(
                                'Error in executing command: {}'.format(
                                    command))