Example #1
0
def _insert_record(vname_map, nc_var, record, scalars_handled, count, linked_index):
    """ Inserts data into a netcdf variable

        Parameters:
            vname_map : dict
                A dict extracted from the database containing metadata on this
                particular variable.

            nc_var : NetCDFVariable
                The NetCDF variable object.
                
            record : BUFRRecordEntry
                The BUFR data from the bufr reader.

            scalars_handled : bool
                Defines whether variables that should be considdered as
                constants / scalars are handled

            count : int
                Defines in which entry in the NetCDF running variable this
                data should be inserted. This should be a continously
                increasing variable.
                
    """

    try:
        
        # pack data  if possible
        packable_1dim = int(vname_map[linked_index]['packable_1dim'])
        packable_2dim = int(vname_map[linked_index]['packable_2dim'])

        var_type = vname_map[linked_index]['var_type']
        if var_type not in ['int', 'float', 'str', 
                'double', 'long']:
            logger.error("No valid type defined")
            return
       
        # Handle 32/64 numpy conversion
        if 'int' in var_type and not packable_2dim:
            var_type = 'int32'

        try:
            #
            # Handle cases where the data can be packed into a single value or
            # a single row or single column

            # Data packs to a single value
            if packable_2dim and packable_1dim:
                if not scalars_handled:
                    data = bufr.pack_record(record)
                    try:
                        nc_var[ 0 ] = eval(var_type)(data)
                    except OverflowError, overflow_error:
                        logger.exception("Unable to convert "+\
                                "value for %s in %s" %\
                                ( data, vname_map[linked_index]['netcdf_name']))
                        nc_var[ 0 ] = vname_map[linked_index]\
                                ['netcdf__FillValue']
                        
                return
            # Data packs to a single row
            elif packable_1dim:
                if not scalars_handled:
                    size = vname_map[ linked_index ]\
                            [ 'netcdf_dimension_length' ]
                    data = record.data
                    
                    if data.shape[ 0 ] < size:
                        fillvalue = vname_map[ linked_index ]\
                                [ 'netcdf__FillValue' ]
                        data = bufr.pad_record(data, size, fillvalue)
                    elif data.shape[0] > size:
                        raise BUFR2NetCDFError("Size mismatch netcdf "+\
                                "variable expected size is"+\
                                " %d, data size is %d" % (size, 
                                    data.shape[0]) )
                    nc_var[:] = data
                return
            # Data packes to a single column
            elif packable_2dim:
                data = bufr.pack_record(record)
                try:
                    nc_var[ count ] = eval(var_type)(data)
                except OverflowError, overflow_error:
                    logger.exception("Unable to convert value for %s in %s" %\
                            ( data, vname_map[linked_index]['netcdf_name']))
                    nc_var[ count ] = vname_map[linked_index]\
                            ['netcdf__FillValue']
Example #2
0
                            ['netcdf__FillValue']
                return

        except bufr.RecordPackError, pack_error:
            logger.exception("Unable to pack data for %s" %\
                    ( vname_map[linked_index]['netcdf_name'], ))

        
        
        # Handle data with varying lengths, padd data to fit size
        size = vname_map[ linked_index ][ 'netcdf_dimension_length' ]
        data = record.data
        
        if data.shape[ 0 ] < size:
            fillvalue = vname_map[ linked_index ][ 'netcdf__FillValue' ]
            data = bufr.pad_record(data, size, fillvalue)
        elif data.shape[0] > size:
            raise BUFR2NetCDFError("Size mismatch netcdf "+\
                    "variable expected size is %d, data size is %d" %\
                    (size, data.shape[0]) )
        
        nc_var[ count, : ] = data.astype(var_type)

    except ValueError, val_err:
        logger.exception("Unable to insert records %s" % (val_err, ))

def bufr2netcdf(instr_name, bufr_fn, nc_fn, dburl=None):
    """ Does the actual work in transforming the file """
    
    # Create file object and connect to database
    bfr = bufr.BUFRFile(bufr_fn)
Example #3
0
                            ( data, vname_map[linked_index]['netcdf_name']))
                    nc_var[ count ] = vname_map[linked_index]\
                            ['netcdf__FillValue']
                return

        except bufr.RecordPackError, pack_error:
            logger.exception("Unable to pack data for %s" %\
                    ( vname_map[linked_index]['netcdf_name'], ))

        # Handle data with varying lengths, padd data to fit size
        size = vname_map[linked_index]['netcdf_dimension_length']
        data = record.data

        if data.shape[0] < size:
            fillvalue = vname_map[linked_index]['netcdf__FillValue']
            data = bufr.pad_record(data, size, fillvalue)
        elif data.shape[0] > size:
            raise BUFR2NetCDFError("Size mismatch netcdf "+\
                    "variable expected size is %d, data size is %d" %\
                    (size, data.shape[0]) )

        nc_var[count, :] = data.astype(var_type)

    except ValueError, val_err:
        logger.exception("Unable to insert records %s" % (val_err, ))


def bufr2netcdf(instr_name, bufr_fn, nc_fn, dburl=None):
    """ Does the actual work in transforming the file """

    # Create file object and connect to database
Example #4
0
def _insert_record(vname_map, nc_var, record, scalars_handled, count,
                   linked_index):
    """ Inserts data into a netcdf variable

        Parameters:
            vname_map : dict
                A dict extracted from the database containing metadata on this
                particular variable.

            nc_var : NetCDFVariable
                The NetCDF variable object.
                
            record : BUFRRecordEntry
                The BUFR data from the bufr reader.

            scalars_handled : bool
                Defines whether variables that should be considdered as
                constants / scalars are handled

            count : int
                Defines in which entry in the NetCDF running variable this
                data should be inserted. This should be a continously
                increasing variable.
                
    """

    try:

        # pack data  if possible
        packable_1dim = int(vname_map[linked_index]['packable_1dim'])
        packable_2dim = int(vname_map[linked_index]['packable_2dim'])

        var_type = vname_map[linked_index]['var_type']
        if var_type not in ['int', 'float', 'str', 'double', 'long']:
            logger.error("No valid type defined")
            return

        # Handle 32/64 numpy conversion
        if 'int' in var_type and not packable_2dim:
            var_type = 'int32'

        try:
            #
            # Handle cases where the data can be packed into a single value or
            # a single row or single column

            # Data packs to a single value
            if packable_2dim and packable_1dim:
                if not scalars_handled:
                    data = bufr.pack_record(record)
                    try:
                        nc_var[0] = eval(var_type)(data)
                    except OverflowError, overflow_error:
                        logger.exception("Unable to convert "+\
                                "value for %s in %s" %\
                                ( data, vname_map[linked_index]['netcdf_name']))
                        nc_var[ 0 ] = vname_map[linked_index]\
                                ['netcdf__FillValue']

                return
            # Data packs to a single row
            elif packable_1dim:
                if not scalars_handled:
                    size = vname_map[ linked_index ]\
                            [ 'netcdf_dimension_length' ]
                    data = record.data

                    if data.shape[0] < size:
                        fillvalue = vname_map[ linked_index ]\
                                [ 'netcdf__FillValue' ]
                        data = bufr.pad_record(data, size, fillvalue)
                    elif data.shape[0] > size:
                        raise BUFR2NetCDFError("Size mismatch netcdf "+\
                                "variable expected size is"+\
                                " %d, data size is %d" % (size,
                                    data.shape[0]) )
                    nc_var[:] = data
                return
            # Data packes to a single column
            elif packable_2dim:
                data = bufr.pack_record(record)
                try:
                    nc_var[count] = eval(var_type)(data)
                except OverflowError, overflow_error:
                    logger.exception("Unable to convert value for %s in %s" %\
                            ( data, vname_map[linked_index]['netcdf_name']))
                    nc_var[ count ] = vname_map[linked_index]\
                            ['netcdf__FillValue']
def _insert_record(vname_map, nc_var, record, scalars_handled, count):
    """ Insert record in netcdf variables
    """

    try:
        
        # pack data  if possible
        packable_1dim = int(vname_map[record.index]['packable_1dim'])
        packable_2dim = int(vname_map[record.index]['packable_2dim'])

        var_type = vname_map[record.index]['var_type']
        if var_type not in ['int', 'float', 'str', 
                'double', 'long']:
            print "no valid type defined"
            return
       
        # Handle 32/64 numpy conversion
        if 'int' in var_type and not packable_2dim:
            var_type = 'int32'

        try:
            if packable_2dim and packable_1dim:
                if not scalars_handled:
                    data = bufr.pack_record(record)
                    try:
                        nc_var[ 0 ] = eval(var_type)(data)
                    except OverflowError, overflow_error:
                        traceback.print_exc(file=sys.stdout)
                        print data
                        print nc_var
                        nc_var[ 0 ] = vname_map[record.index]\
                                ['netcdf__FillValue']
                return

            elif packable_1dim:
                if not scalars_handled:
                    size = vname_map[ record.index ]\
                            [ 'netcdf_dimension_length' ]
                    data = record.data
                    
                    if data.shape[ 0 ] < size:
                        fillvalue = vname_map[ record.index ]\
                                [ 'netcdf__FillValue' ]
                        data = bufr.pad_record(data, size, fillvalue)
                    elif data.shape[0] > size:
                        raise BUFR2NetCDFError("Size mismatch netcdf variable"+\
                                " expected size is %d, data size is %d" %\
                    (size, data.shape[0]) )
                    nc_var[:] = data
                return

            elif packable_2dim:
                data = bufr.pack_record(record)
                try:
                    nc_var[ count ] = eval(var_type)(data)
                except OverflowError, overflow_error:
                    traceback.print_exc(file=sys.stdout)
                    print data
                    print nc_var
                    nc_var[ count ] = vname_map[record.index]\
                            ['netcdf__FillValue']