Exemplo n.º 1
0
    def restore(self, fname, overwrite=False):
        # Determine the id of the outputfile
        if self.quantity == 'radiance':
            mech = ("mechanism = '" +
                    ", ".join(sorted([m for m in self.mechanism])) + "'")
            wave_ = sorted([w.value for w in self.wavelength])
            wave = ("wavelength = '" + ", ".join([str(w)
                                                  for w in wave_]) + "'")
        else:
            mech = 'mechanism is NULL'
            wave = 'wavelength is NULL'

        with database_connect() as con:
            idnum_ = pd.read_sql(
                f'''SELECT idnum
                                FROM outputfile
                                WHERE filename='{fname}' ''', con)
            oid = idnum_.idnum[0]

            result = pd.read_sql(
                f'''SELECT filename FROM modelimages
                    WHERE out_idnum = {oid} and
                          quantity = '{self.quantity}' and
                          origin = '{self.origin.object}' and
                          dims[1] = {self.dims[0]} and
                          dims[2] = {self.dims[1]} and
                          center[1] = {self.center[0].value} and
                          center[2] = {self.center[1].value} and
                          width[1] = {self.width[0].value} and
                          width[2] = {self.width[1].value} and
                          subobslongitude = {self.subobslongitude.value} and
                          subobslatitude = {self.subobslatitude.value} and
                          {mech} and
                          {wave}''', con)

        if (len(result) == 1) and overwrite:
            if os.path.exists(result.filename[0]):
                os.remove(result.filename[0])
            with database_connect() as con:
                cur = con.cursor()
                cur.execute(
                    '''DELETE FROM modelimages
                               WHERE filename = %s''', (result.filename[0], ))
            image, packets = None, None
        elif len(result) == 1:
            image, packets = pickle.load(open(result.filename[0], 'rb'))
        elif len(result) == 0:
            image, packets = None, None
        else:
            raise RuntimeError('ModelImage.restore',
                               'Should not be able to get here.')

        return image, packets
Exemplo n.º 2
0
    def insert(self):
        ids = self.search()
        if ids is None:
            if self.type == 'uniform':
                long = [l.value for l in self.longitude]
                lat = [l.value for l in self.latitude]
                with database_connect() as con:
                    cur = con.cursor()
                    cur.execute(
                        '''INSERT INTO spatdist_uniform (
                                       exobase, longitude, latitude) VALUES (
                                       %s, %s::DOUBLE PRECISION[2],
                                       %s::DOUBLE PRECISION[2])''',
                        (self.exobase, long, lat))
            elif self.type == 'surface map':
                with database_connect() as con:
                    cur = con.cursor()
                    sslon = (None if self.subsolarlon is None else
                             self.subsolarlon.value)
                    cur.execute(
                        '''INSERT INTO spatdist_surfmap (
                                       exobase, mapfile, subsolarlon,
                                       coordinate_system) VALUES (
                                       %s, %s, %s, %s)''',
                        (self.exobase, self.mapfile, sslon,
                         self.coordinate_system))
            elif self.type == 'surface spot':
                with database_connect() as con:
                    cur = con.cursor()
                    cur.execute(
                        '''INSERT INTO spatdist_spot (
                                       exobase, longitude, latitude, sigma) VALUES (
                                       %s, %s, %s, %s)''',
                        (self.exobase, self.longitude.value,
                         self.latitude.value, self.sigma.value))
            elif self.type == 'fitted output':
                with database_connect() as con:
                    cur = con.cursor()
                    cur.execute(
                        '''INSERT INTO spatdist_fittedoutput (
                                   unfit_outid, query)
                                       VALUES (%s, %s)''',
                        (self.unfit_outid, self.query))
            else:
                raise InputError(
                    'SpatialDist.search()',
                    f'SpatialDist.type = {self.type} not allowed.')
            ids = self.search()
            assert ids is not None
        else:
            pass

        return ids
Exemplo n.º 3
0
    def insert(self):
        # check to see if it is already there
        ids = self.search()
        if ids is None:
            if self.type == 'geometry with starttime':
                if self.objects is None:
                    objs = None
                else:
                    objs = [o.object for o in self.objects]

                with database_connect() as con:
                    cur = con.cursor()
                    cur.execute(
                        '''INSERT INTO geometry_with_time (
                                       planet, startpoint, objects,
                                       starttime VALUES (%s, %s, %s, %s)''',
                        (self.planet.object, self.startpoint, objs, self.time))
            elif self.type == 'geometry without starttime':
                if self.objects is None:
                    objs = None
                else:
                    objs = [o.object for o in self.objects]

                subspt = [s.value for s in self.subsolarpoint]

                if self.phi is None:
                    phi = None
                else:
                    phi = [p.value for p in self.phi]

                with database_connect() as con:
                    cur = con.cursor()
                    cur.execute(
                        '''INSERT INTO geometry_without_time (
                                       planet, startpoint, objects, phi,
                                       subsolarpt, taa) VALUES (
                                       %s, %s, %s::SSObject[], %s,
                                       %s::DOUBLE PRECISION[2], %s)''',
                        (self.planet.object, self.startpoint, objs, phi,
                         subspt, self.taa.value))
            else:
                raise InputError('Geometry.search()',
                                 f'geometry.type = {self.type} not allowed.')

            ids = self.search()
            assert ids is not None
        else:
            pass

        return ids
Exemplo n.º 4
0
    def search(self):
        params = [
            self.endtime.value, self.species, self.lifetime.value,
            self.outeredge, self.step_size, self.fitted
        ]

        if self.resolution is None:
            resol = 'resolution is NULL'
        else:
            resol = 'resolution = %s'
            params.append(self.resolution)

        query = f'''SELECT idnum
                    FROM options
                    WHERE endtime = %s and
                          species = %s and
                          lifetime = %s and
                          outer_edge = %s and
                          step_size = %s and
                          {resol} and
                          fitted = %s'''

        with database_connect() as con:
            cur = con.cursor()
            cur.execute(query, tuple(params))

            if cur.rowcount == 0:
                return None
            elif cur.rowcount == 1:
                return cur.fetchone()[0]
            else:
                raise RuntimeError('Options.search()',
                                   'Duplicates in options table')
Exemplo n.º 5
0
    def search(self):
        if self.type == 'uniform':
            params = [
                self.exobase,
                [self.longitude[0].value, self.longitude[1].value],
                [self.latitude[0].value, self.latitude[1].value]
            ]
            query = '''SELECT idnum
                       FROM spatdist_uniform
                       WHERE exobase = %s and
                             longitude = %s::DOUBLE PRECISION[2] and
                             latitude = %s::DOUBLE PRECISION[2]'''
        elif self.type == 'surface map':
            params = [self.exobase, self.mapfile]
            if self.subsolarlon is None:
                sslon = 'subsolarlon is NULL'
            else:
                sslon = 'subsolarlon = %s'
                params.append(self.subsolarlon.value)

            params.append(self.coordinate_system)
            query = f'''SELECT idnum
                        FROM spatdist_surfmap
                        WHERE exobase = %s and
                              mapfile = %s and
                              {sslon} and
                              coordinate_system = %s'''
        elif self.type == 'surface spot':
            params = [
                self.exobase, self.longitude.value, self.latitude.value,
                self.sigma.value
            ]
            query = '''SELECT idnum
                       FROM spatdist_spot
                       WHERE exobase = %s and
                             longitude = %s and
                             latitude = %s and
                             sigma = %s'''
        elif self.type == 'fitted output':
            params = [self.unfit_outid, self.query]
            query = '''SELECT idnum
                       FROM spatdist_fittedoutput
                       WHERE unfit_outid = %s and
                             query = %s'''
        else:
            raise InputError('SpatialDist.__init__',
                             f'SpatialDist.type = {self.type} not defined.')

        with database_connect() as con:
            cur = con.cursor()
            # print(cur.mogrify(query, tuple(params)))
            cur.execute(query, tuple(params))

            if cur.rowcount == 0:
                return None
            elif cur.rowcount == 1:
                return cur.fetchone()[0]
            else:
                raise RuntimeError('SpatialDist.search()',
                                   'Duplicates in spatial distribution table')
Exemplo n.º 6
0
    def insert(self):
        ids = self.search()
        if ids is None:
            if self.type == 'radial':
                assert 0, 'Should not be able to get here.'
            elif self.type == 'isotropic':
                alt = [a.value for a in self.altitude]
                az = [a.value for a in self.azimuth]
                with database_connect() as con:
                    cur = con.cursor()
                    cur.execute(
                        '''INSERT INTO angdist_isotropic (
                                       altitude, azimuth) VALUES (
                                       %s::DOUBLE PRECISION[2],
                                       %s::DOUBLE PRECISION[2])''', (alt, az))
            else:
                raise InputError(
                    'AngularDist.search()',
                    f'angulardist.type = {self.type} not allowed.')

            ids = self.search()
            assert ids is not None
        else:
            pass

        return ids
Exemplo n.º 7
0
    def search(self):
        if self.type == 'radial':
            return 0
        elif self.type == 'isotropic':
            params = [[a.value for a in self.altitude],
                      [a.value for a in self.azimuth]]
            query = '''SELECT idnum
                       FROM angdist_isotropic
                       WHERE altitude=%s::DOUBLE PRECISION[2] and
                             azimuth=%s::DOUBLE PRECISION[2]'''
        else:
            raise InputError('AngularDist.__init__',
                             f'AngularDist.type = {self.type} not defined.')

        with database_connect() as con:
            cur = con.cursor()
            cur.execute(query, tuple(params))

            if cur.rowcount == 0:
                return None
            elif cur.rowcount == 1:
                return cur.fetchone()[0]
            else:
                raise RuntimeError('AngularDist.search()',
                                   'Duplicates in angular distribution table')
Exemplo n.º 8
0
    def search(self):
        if self.type == 'gaussian':
            params = [self.vprob.value, self.sigma.value]
            query = '''SELECT idnum
                       FROM speeddist_gaussian
                       WHERE vprob = %s and
                             sigma = %s'''
        elif self.type == 'sputtering':
            params = [self.alpha, self.beta, self.U.value]
            query = '''SELECT idnum
                       FROM speeddist_sputtering
                       WHERE alpha = %s and
                             beta = %s and
                             U = %s'''
        elif self.type == 'maxwellian':
            params = [self.temperature.value]
            query = '''SELECT idnum
                       FROM speeddist_maxwellian
                       WHERE temperature = %s'''
        elif self.type == 'flat':
            params = [self.vprob.value, self.delv.value]
            query = '''SELECT idnum
                       FROM speeddist_flat
                       WHERE vprob = %s and
                             delv = %s'''
        elif self.type == 'user defined':
            params = [self.vdistfile]
            query = '''SELECT idnum
                       FROM speeddist_user
                       WHERE vdistfile = %s'''
        elif self.type == 'fitted output':
            params = [self.unfit_outid, self.query]
            query = '''SELECT idnum
                           FROM speeddist_fittedoutput
                           WHERE unfit_outid = %s and
                                 query = %s'''
        else:
            raise InputError('SpeedDist.__init__',
                             f'SpeedDist.type = {self.type} not defined.')

        with database_connect() as con:
            cur = con.cursor()
            cur.execute(query, tuple(params))

            if cur.rowcount == 0:
                return None
            elif cur.rowcount == 1:
                return cur.fetchone()[0]
            else:
                raise RuntimeError('SpeedDist.search()',
                                   'Duplicates in speed distribution table')
Exemplo n.º 9
0
    def insert(self):
        ids = self.search()
        if ids is None:
            if self.sticktype == 'constant':
                with database_connect() as con:
                    cur = con.cursor()
                    cur.execute(
                        '''INSERT INTO surface_int_constant (
                                       stickcoef, accomfactor) VALUES (
                                       %s, %s)''',
                        (self.stickcoef, self.accomfactor))
            elif self.sticktype == 'surface map':
                with database_connect() as con:
                    cur = con.cursor()
                    cur.execute(
                        '''INSERT INTO surface_int_map (
                                       mapfile, accomfactor) VALUES (
                                       %s, %s)''',
                        (self.stick_mapfile, self.accomfactor))
            elif self.sticktype == 'temperature dependent':
                with database_connect() as con:
                    cur = con.cursor()
                    cur.execute(
                        '''INSERT INTO surface_int_tempdependent (
                                       accomfactor, a) VALUES
                                       (%s, %s::DOUBLE PRECISION[3])''',
                        (self.accomfactor, self.A))
            else:
                raise InputError(
                    'SurfaceInteraction.search()',
                    f'surfaceinteraction.sticktype = {self.sticktype} not allowed.'
                )
            ids = self.search()
            assert ids is not None
        else:
            pass

        return ids
Exemplo n.º 10
0
 def save(self, iteration_result):
     '''
     Insert the result of a LOS iteration into the database
     :param iteration_result: LOS result from a single outputfile
     :return: name of saved file
     '''
     if self.quantity == 'radiance':
         mech = ', '.join(sorted([m for m in self.mechanism]))
         wave_ = sorted([w.value for w in self.wavelength])
         wave = ', '.join([str(w) for w in wave_])
     else:
         mech = None
         wave = None
     
     tempname = f'temp_{str(random.randint(0, 1000000))}'
     
     if isinstance(iteration_result, FittedIterationResult):
         ufit_id = iteration_result.unfit_outid
     else:
         ufit_id = None
     
     with database_connect() as con:
         cur = con.cursor()
         cur.execute(f'''INSERT into uvvsmodels (out_idnum, unfit_idnum,
                         quantity, query, dphi, mechanism, wavelength,
                         fitted, filename)
                         values (%s, %s, %s, %s, %s, %s, %s, %s, %s)''',
                     (iteration_result.out_idnum, ufit_id, self.quantity,
                      self.query, self.dphi, mech, wave, self.fitted,
                      tempname))
         
         # Determine the savefile name
         idnum_ = pd.read_sql(f'''SELECT idnum
                                  FROM uvvsmodels
                                  WHERE filename='{tempname}';''', con)
         assert len(idnum_) == 1
         idnum = int(idnum_.idnum[0])
         
         savefile = os.path.join(os.path.dirname(iteration_result.outputfile),
                                 f'model.{idnum}.pkl')
         print(f'Saving model result {savefile}')
         
         cur.execute(f'''UPDATE uvvsmodels
                         SET filename=%s
                         WHERE idnum=%s''', (savefile, idnum))
     
     with open(savefile, 'wb') as f:
         pickle.dump(iteration_result, f)
     
     return savefile
Exemplo n.º 11
0
    def save(self, data_packets, surface):
        # Insert the model into the database
        # Save is on an outputfile basis
        if self.quantity == 'radiance':
            mech = ', '.join(sorted([m for m in self.mechanism]))
            wave_ = sorted([w.value for w in self.wavelength])
            wave = ', '.join([str(w) for w in wave_])
        else:
            mech = None
            wave = None

        # Save query with all white space removed and lowercase
        tempname = f'temp_{str(random.randint(0, 1000000))}'

        with database_connect() as con:
            cur = con.cursor()
            cur.execute(
                f'''INSERT into uvvsmodels (out_idnum, quantity,
                            query, dphi, mechanism, wavelength,
                            fitted, filename)
                            values (%s, %s, %s, %s, %s, %s, %s, %s)''',
                (data_packets['out_idnum'], self.quantity, self.scdata.query,
                 self.dphi, mech, wave, self.fitted, tempname))

            # Determine the savefile name
            idnum_ = pd.read_sql(
                f'''SELECT idnum
                                     FROM uvvsmodels
                                     WHERE filename='{tempname}';''', con)
            assert len(idnum_) == 1
            idnum = int(idnum_.idnum[0])

            savefile = os.path.join(
                os.path.dirname(data_packets['outputfile']),
                f'model.{idnum}.pkl')

            cur.execute(
                f'''UPDATE uvvsmodels
                            SET filename=%s
                            WHERE idnum=%s''', (savefile, idnum))

        with open(savefile, 'wb') as f:
            pickle.dump(data_packets, f)
        if surface is None:
            savefile_surf = None
        else:
            savefile_surf = self.surfacefile_name(savefile)
            surface.to_parquet(savefile_surf)

        return savefile, savefile_surf
Exemplo n.º 12
0
    def insert(self):
        ids = self.search()
        if ids is None:
            if self.type == 'gaussian':
                with database_connect() as con:
                    cur = con.cursor()
                    cur.execute(
                        '''INSERT INTO speeddist_gaussian (
                                       vprob, sigma) VALUES (%s, %s)''',
                        (self.vprob.value, self.sigma.value))
            elif self.type == 'sputtering':
                with database_connect() as con:
                    cur = con.cursor()
                    cur.execute(
                        '''INSERT INTO speeddist_sputtering (
                                       alpha, beta, U) VALUES (%s, %s, %s)''',
                        (self.alpha, self.beta, self.U.value))
            elif self.type == 'maxwellian':
                with database_connect() as con:
                    cur = con.cursor()
                    cur.execute(
                        '''INSERT INTO speeddist_maxwellian (
                                       temperature) VALUES (%s)''',
                        (self.temperature.value, ))
            elif self.type == 'flat':
                with database_connect() as con:
                    cur = con.cursor()
                    cur.execute(
                        '''INSERT INTO speeddist_flat (
                                       vprob, delv) VALUES (%s, %s)''',
                        (self.vprob.value, self.delv.value))
            elif self.type == 'user defined':
                with database_connect() as con:
                    cur = con.cursor()
                    cur.execute(
                        '''INSERT INTO speeddist_user (
                                       vdistfile) VALUES (%s)''',
                        (self.vdistfile, ))
            elif self.type == 'fitted output':
                with database_connect() as con:
                    cur = con.cursor()
                    cur.execute(
                        '''INSERT INTO speeddist_fittedoutput (
                                   unfit_outid, query)
                                   VALUES (%s, %s)''',
                        (self.unfit_outid, self.query))
            else:
                raise InputError('SpeedDist.search()',
                                 f'speeddist.type = {self.type} not allowed.')

            ids = self.search()
            assert ids is not None
        else:
            pass

        return ids
Exemplo n.º 13
0
    def insert(self):
        ids = self.search()
        if ids is None:
            with database_connect() as con:
                cur = con.cursor()
                cur.execute(
                    '''INSERT INTO forces (
                                   gravity, radpres) VALUES (%s, %s)''',
                    (self.gravity, self.radpres))
            ids = self.search()
            assert ids is not None
        else:
            pass

        return ids
Exemplo n.º 14
0
    def search(self):
        query = f'''SELECT idnum FROM forces
                    WHERE gravity = %s and
                          radpres = %s'''

        with database_connect() as con:
            cur = con.cursor()
            cur.execute(query, (self.gravity, self.radpres))

            if cur.rowcount == 0:
                return None
            elif cur.rowcount == 1:
                return cur.fetchone()[0]
            else:
                raise RuntimeError('Forces.search()',
                                   'Duplicates in forces table')
Exemplo n.º 15
0
    def search(self):
        """
        :return: dictionary containing search results:
                 {outputfilename: (modelfile_id, modelfile_name)}
        """
        search_results = {}
        for outputfile in self.outputfiles:
            with database_connect() as con:
                # Determine the id of the outputfile
                idnum_ = pd.read_sql(
                    f'''SELECT idnum
                        FROM outputfile
                        WHERE filename='{outputfile}' ''', con)
                oid = idnum_.idnum[0]

                if self.quantity == 'radiance':
                    mech = ("mechanism = '" +
                            ", ".join(sorted([m
                                              for m in self.mechanism])) + "'")
                    wave_ = sorted([w.value for w in self.wavelength])
                    wave = ("wavelength = '" +
                            ", ".join([str(w) for w in wave_]) + "'")
                else:
                    mech = 'mechanism is NULL'
                    wave = 'wavelength is NULL'

                result = pd.read_sql(
                    f'''SELECT idnum, filename FROM uvvsmodels
                        WHERE out_idnum={oid} and
                              quantity = '{self.quantity}' and
                              query = '{self.scdata.query}' and
                              dphi = {self.dphi} and
                              {mech} and
                              {wave} and
                              fitted = {self.fitted}''', con)

                # Should only have one match per outputfile
                assert len(result) <= 1

                if len(result) == 0:
                    search_results[outputfile] = None
                else:
                    surffile = self.surfacefile_name(result.iloc[0, 1])
                    search_results[outputfile] = (result.iloc[0, 0],
                                                  result.iloc[0, 1], surffile)

        return search_results
Exemplo n.º 16
0
    def search(self):
        """ Search the database for previous model runs with the same inputs.
        See :doc:`searchtolerances` for tolerances used in searches.
        
        **Parameters**
        
        No parameters.
        
        **Returns**
        
        * A list of filenames corresponding to the inputs.
        
        * Number of packets contained in those saved outputs.
        
        * Total modeled source rate.
        """
        geo_id = self.geometry.search()
        sint_id = self.surfaceinteraction.search()
        for_id = self.forces.search()
        spat_id = self.spatialdist.search()
        spd_id = self.speeddist.search()
        ang_id = self.angulardist.search()
        opt_id = self.options.search()

        if None in [geo_id, sint_id, for_id, spat_id, spd_id, ang_id, opt_id]:
            return [], [], 0., 0.
        else:
            query = f'''SELECT idnum, filename, npackets, totalsource
                        FROM outputfile
                        WHERE geo_type = '{self.geometry.type}' and
                              geo_id = {geo_id} and
                              sint_type = '{self.surfaceinteraction.sticktype}' and
                              sint_id = {sint_id} and
                              force_id = {for_id} and
                              spatdist_type = '{self.spatialdist.type}' and
                              spatdist_id = {spat_id} and
                              spddist_type = '{self.speeddist.type}' and
                              spddist_id = {spd_id} and
                              angdist_type = '{self.angulardist.type}' and
                              angdist_id = {ang_id} and
                              opt_id = {opt_id}'''
            with database_connect() as con:
                result = pd.read_sql(query, con)

            return (result.idnum.to_list(), result.filename.to_list(),
                    result.npackets.sum(), result.totalsource.sum())
Exemplo n.º 17
0
    def save(self, fname, image, packets):
        # Determine the id of the outputfile
        idnum = int(os.path.basename(fname).split('.')[0])

        # Insert the image into the database
        if self.quantity == 'radiance':
            mech = ', '.join(sorted([m for m in self.mechanism]))
            wave_ = sorted([w.value for w in self.wavelength])
            wave = ', '.join([str(w) for w in wave_])
        else:
            mech = None
            wave = None

        width = [w.value for w in self.width]
        center = [c.value for c in self.center]

        with database_connect() as con:
            cur = con.cursor()
            cur.execute(
                f'''INSERT into modelimages (out_idnum, quantity,
                                origin, dims, center, width, subobslongitude,
                                subobslatitude, mechanism, wavelength,
                                filename)
                            VALUES (%s, %s, %s, %s::INT[2],
                                    %s::DOUBLE PRECISION[2],
                                    %s::DOUBLE PRECISION[2],
                                    %s, %s, %s, %s, 'temp')''',
                (idnum, self.quantity, self.origin.object, self.dims, center,
                 width, self.subobslongitude.value, self.subobslatitude.value,
                 mech, wave))

        idnum_ = pd.read_sql(
            f'''SELECT idnum
                                FROM modelimages
                                WHERE filename = 'temp';''', con)
        assert len(idnum_) == 1
        idnum = int(idnum_.idnum[0])

        savefile = os.path.join(os.path.dirname(fname), f'image.{idnum}.pkl')
        with open(savefile, 'wb') as f:
            pickle.dump((image, packets), f)
        cur.execute(
            f'''UPDATE modelimages
                        SET filename=%s
                        WHERE idnum = %s''', (savefile, idnum))
        con.close()
Exemplo n.º 18
0
    def insert(self):
        ids = self.search()
        if ids is None:
            with database_connect() as con:
                cur = con.cursor()
                cur.execute(
                    '''INSERT into options (endtime, species, lifetime,
                                   outer_edge, step_size, resolution, fitted) VALUES (
                                   %s, %s, %s, %s, %s, %s, %s)''',
                    (self.endtime.value, self.species, self.lifetime.value,
                     self.outeredge, self.step_size, self.resolution,
                     self.fitted))
            ids = self.search()
            assert ids is not None
        else:
            pass

        return ids
Exemplo n.º 19
0
    def search(self):
        if self.sticktype == 'constant':
            params = [self.stickcoef]
            if self.accomfactor is None:
                afactor = 'accomfactor is NULL'
            else:
                afactor = 'accomfactor = %s'
                params.append(self.accomfactor)

            query = f"""SELECT idnum
                        FROM surface_int_constant
                        WHERE stickcoef = %s and
                              {afactor}"""
        elif self.sticktype == 'temperature dependent':
            params = [self.accomfactor, self.A]
            query = f"""SELECT idnum
                        FROM surface_int_tempdependent
                        WHERE accomfactor = %s and
                              a = %s::DOUBLE PRECISION[3]"""
        elif self.sticktype == 'surface map':
            params = [self.stick_mapfile, self.accomfactor]
            query = f"""SELECT idnum
                        FROM surface_int_map
                        WHERE mapfile = %s and
                              accomfactor = %s"""
        else:
            raise InputError(
                'SurfaceInteraction.search()',
                f'surfaceinteraction.sticktype = {self.sticktype} not allowed.'
            )

        with database_connect() as con:
            cur = con.cursor()
            cur.execute(query, tuple(params))

            if cur.rowcount == 0:
                return None
            elif cur.rowcount == 1:
                return cur.fetchone()[0]
            else:
                raise RuntimeError('SurfaceInteraction.search()',
                                   'Duplicates in surface interaction table')
Exemplo n.º 20
0
    def delete_models(self):
        """Deletes any LOSResult models associated with this data and input
        This may never actually do anything. Overwrite=True will also
        erase the outputfiles (which erases any models that depend on them).
        Unless I put separate outputfile and modelfile delete switches,
        This shouldn't do anything"""

        search_results = self.search()
        if len(search_results) != 0:
            print('Warning: LOSResult.delete_models found something to delete')
            for _, search_result in search_results.items():
                if search_result is not None:
                    idnum, modelfile = search_result
                    with database_connect() as con:
                        cur = con.cursor()
                        cur.execute(
                            f'''DELETE from uvvsmodels
                                       WHERE idnum = %s''', (idnum, ))
                    if os.path.exists(modelfile):
                        os.remove(modelfile)
        else:
            pass
Exemplo n.º 21
0
def test_database_connect():
    # Test database connection
    with database_connect() as con:
        assert con.autocommit
Exemplo n.º 22
0
    def _spectrum_process(self, spectrum, packets, tree, dist, i, ofile):
        x_sc = spectrum[xcols].values.astype(float)
        bore = spectrum[borecols].values.astype(float)

        dd = 30  # Furthest distance we need to look
        x_far = x_sc + bore * dd
        while np.linalg.norm(x_far) > self.oedge:
            dd -= 0.1
            x_far = x_sc + bore * dd

        t = [0.05]
        while t[-1] < dd:
            t.append(t[-1] + t[-1] * np.sin(self.dphi))
        t = np.array(t)
        Xbore = x_sc[np.newaxis, :] + bore[np.newaxis, :] * t[:, np.newaxis]

        wid = t * np.sin(self.dphi)
        ind = np.concatenate(tree.query_radius(Xbore, wid))
        ilocs = np.unique(ind).astype(int)
        indicies = packets.iloc[ilocs].index
        subset = packets.loc[indicies]

        xpr = subset[xcols] - x_sc[np.newaxis, :]
        rpr = np.sqrt(xpr['x'] * xpr['x'] + xpr['y'] * xpr['y'] +
                      xpr['z'] * xpr['z'])

        losrad = np.sum(xpr * bore[np.newaxis, :], axis=1)
        inview = rpr < dist

        if np.any(inview):
            used_packets = subset[inview].index.to_list()
            used_packets0 = subset.loc[inview, 'Index']

            Apix = np.pi * (rpr[inview] *
                            np.sin(self.dphi))**2 * (self.unit.to(u.cm))**2
            wtemp = subset.loc[inview, 'weight'] / Apix
            if self.quantity == 'radiance':
                # Determine if any packets are in shadow
                # Projection of packet onto LOS
                # Point along LOS the packet represents
                losrad_ = losrad[inview].values
                hit = (x_sc[np.newaxis, :] +
                       bore[np.newaxis, :] * losrad_[:, np.newaxis])
                rhohit = np.linalg.norm(hit[:, [0, 2]], axis=1)
                out_of_shadow = (rhohit > 1) | (hit[:, 1] < 0)
                wtemp *= out_of_shadow

                rad = wtemp.sum()
                # Save the weight information
                rat = spectrum.radiance / rad if rad > 0 else 0.

                params = []
                for U, w, f0, u0 in zip(
                        used_packets, wtemp.values,
                        packets.loc[used_packets,
                                    'frac0'].values, used_packets0.values):
                    params.append((self.scdata.query, ofile, int(i), int(U), w,
                                   float(f0), int(u0), rat))
                # params = list(zip(used_packets, wtemp.values,
                #                   packets.loc[used_packets, 'frac0'].values.astype('float'),
                #                   used_packets0.values.astype('float')))

                statement = '''INSERT into savedpackets (
                                   query, outputfile, specind, oint,
                                   weight, frac0, index0, ratio) VALUES (
                                   %s, %s, %s, %s, %s::DOUBLE PRECISION,
                                   %s::DOUBLE PRECISION, %s,
                                   %s::DOUBLE PRECISION)'''
                with database_connect() as con:
                    cur = con.cursor()
                    execute_batch(cur, statement, params)
                del params
            else:
                assert False, 'Other quantities not set up.'
        else:
            pass
Exemplo n.º 23
0
    def determine_source_from_data(self, modnum, weight_method='scaling'):
        modkey = f'model{modnum:02d}'
        maskkey = f'mask{modnum:02d}'
        mask = self.scdata.data[maskkey]

        self.outputfiles = self.scdata.model_info[modkey]['outputfiles']
        surface_source = pd.DataFrame(columns=[
            'outputfile', 'Index', 'longitude', 'latitude', 'velocity'
        ])
        nsteps, endtime = 0, 0
        index0 = {}
        for outputfile in self.outputfiles:
            # Restore the unfit output file
            output = Output.restore(outputfile)
            self.totalsource += output.totalsource
            endtime = output.inputs.options.endtime
            nsteps = output.nsteps

            with database_connect() as con:
                ind0 = pd.read_sql(
                    f'''SELECT DISTINCT index0 FROM savedpackets
                        WHERE query='{self.scdata.query}' and
                              outputfile='{outputfile}';''', con)
            index0[outputfile] = ind0.index0.to_list()
            del output

            # vel_ = np.sqrt(output.X0.vx**2 + output.X0.vy**2 +
            #                output.X0.vz**2) * output.inputs.geometry.planet.radius
            # surface_source = surface_source.append(pd.DataFrame(
            #     {'outputfile': [outputfile for _ in range(len(ind0))],
            #      'Index': ind0,
            #      'longitude': output.X0.loc[ind0, 'longitude'].values,
            #      'latitude': output.X0.loc[ind0, 'latitude'].values,
            #      'velocity': vel_.loc[ind0]}), ignore_index = True)

        if weight_method == 'scaling':
            # Determine the proper weightings
            outputfile_str = self.outputfiles.__str__().replace('[',
                                                                '(').replace(
                                                                    ']', ')')
            statement = f'''UPDATE savedpackets
                            SET scale_factor=%s::DOUBLE PRECISION
                            WHERE query='{self.scdata.query}' and
                                outputfile=%s and
                                index0=%s'''
            with database_connect() as con:
                cur = con.cursor()
                scale_factor = pd.read_sql(
                    f'''SELECT outputfile, index0, AVG(ratio) from savedpackets
                        WHERE query='{self.scdata.query}' and
                              outputfile in {outputfile_str}
                        GROUP BY outputfile, index0''', con)

                scale_factor_ind = scale_factor.set_index(
                    ['outputfile', 'index0'])
                params = list(
                    zip(scale_factor_ind['avg'].values,
                        [ind[0] for ind in scale_factor_ind.index],
                        [ind[1] for ind in scale_factor_ind.index]))
                execute_batch(cur, statement, params)
                # for ind, scfactor in scale_factor_ind.iterrows():
                # cur.execute(f'''UPDATE savedpackets
                #                 SET scale_factor=%s::DOUBLE PRECISION
                #                 WHERE query='{self.scdata.query}' and
                #                       outputfile=%s and
                #                       index0=%s''',
                #             (scfactor, ind[0], ind[1]))

                weight = pd.read_sql(
                    f'''SELECT outputfile, oint, index0, weight*scale_factor fit_weight
                        FROM savedpackets
                        WHERE query='{self.scdata.query}' and
                              outputfile in {outputfile_str};''', con)

            from IPython import embed
            embed()
            import sys
            sys.exit()

            # ind = pd.MultiIndex.from_arrays([weight.outputfile.to_list(),
            #                                  weight.index0.to_list()])
            # scale_factor_ = scale_factor_ind.loc[ind, 'avg'].reset_index()
            # fit_weight = weight.weight * scale_factor_['avg']

            # Determine surface weighting
            # surface_ind = surface_source.set_index(['outputfile', 'Index'])
            # surface_ind.loc[scale_factor_.index, 'weight'] = scale_factor_
            # surface_ind.dropna(inplace=True)
            # surface_source = surface_ind.reset_index()
        elif weight_method == 'leastsq':
            # This method produces too many scale_factors < 0
            assert 0

            # Solve linear equations to get best fit
            temp = self.data_packets.data.set_index('specind')
            temp = temp.loc[mask]
            temp.reset_index(inplace=True)

            W = np.zeros((len(data), len(self.modelfiles),
                          self.data_packets.data.Index0.max() + 1))

            spec_ind = [data.index.get_loc(i) for i in temp.specind.values]
            pack_ind = temp.index.to_list()

            outputfiles = {
                ofile: i
                for i, ofile in enumerate(self.modelfiles.keys())
            }
            oo = [outputfiles[ofile] for ofile in temp.outputfile]
            W[spec_ind, oo, temp.Index0.to_list()] = temp.weight

            W2 = W.squeeze(axis=1) / W.mean()
            # assert np.all(W == W2.reshape(W.shape))
            assert W.shape[1] == 1
            scale_factor_, r, rank, s = lin.lstsq(
                W2, data.loc[:, 'radiance'].values)
            scale_factor_ /= scale_factor_[scale_factor_ > 0].mean()
            self.data_packets.data['scale_factor'] = (
                scale_factor_[self.data_packets.data.Index0.to_list()])
            self.data_packets.data['fit_weight'] = (
                self.data_packets.data.weight *
                self.data_packets.data.scale_factor)

            surface_source['weight'] = scale_factor_[
                surface_source.Index.to_list()]

            scaled = pd.read_pickle('surface_source_scaled.pkl')

            from IPython import embed
            embed()
            import sys
            sys.exit()

            self.data_packets.data['scale_factor'] = (
                scale_factor_[self.data_packets.data['Index0'].to_list()])

            # Determine surface weighting

            surface_source.dropna(inplace=True)
            # surface_source = surface_source[surface_source.weight > 0]
        else:
            raise InputError('LOSResult.determine_source_from_data: '
                             'Not a valid weighting method')

        initial = set(
            zip(self.data_packets.data.outputfile.to_list(),
                self.data_packets.data.Index0.to_list(),
                self.data_packets.data.frac0.to_list(),
                self.data_packets.data.scale_factor.to_list()))
        frac0 = sum(x[2] * x[3] for x in initial)
        self.totalsource = frac0 * nsteps
        mod_rate = self.totalsource / endtime.value
        atoms_per_packet = 1e23 / mod_rate

        spec_group = self.data_packets.data.groupby('specind')
        fitrad_ = spec_group['fit_weight'].sum() * atoms_per_packet

        self.radiance.loc[fitrad_.index] = fitrad_
        self.radiance *= u.R

        # Create a new sourcemap
        source, xx, yy = np.histogram2d(surface_source['longitude'],
                                        surface_source['latitude'],
                                        weights=surface_source['weight'],
                                        range=[[0, 2 * np.pi],
                                               [-np.pi / 2, np.pi / 2]],
                                        bins=(NLONBINS, NLATBINS))
        source = source / np.cos(yy + (yy[1] - yy[0]) / 2.)[np.newaxis, :-1]
        source[:, [0, -1]] = 0

        v_source, v = np.histogram(surface_source['velocity'],
                                   bins=NVELBINS,
                                   range=[0, surface_source['velocity'].max()],
                                   weights=surface_source['weight'])
        v_source /= np.max(v_source)

        # packets available
        packets, _, _ = np.histogram2d(surface_source['longitude'],
                                       surface_source['latitude'],
                                       range=[[0, 2 * np.pi],
                                              [-np.pi / 2, np.pi / 2]],
                                       bins=(NLONBINS, NLATBINS))
        packets = packets / np.cos(yy + (yy[1] - yy[0]) / 2.)[np.newaxis, :-1]
        packets[:, [0, -1]] = 0
        v_packets, _ = np.histogram(
            surface_source['velocity'],
            bins=NVELBINS,
            range=[0, surface_source['velocity'].max()])
        v_packets = v_packets / np.max(v_packets)

        sourcemap = {
            'longitude': xx * u.rad,
            'latitude': yy * u.rad,
            'abundance': source,
            'p_available': packets,
            'velocity': v * u.km / u.s,
            'vdist': v_source,
            'v_available': v_packets,
            'coordinate_system': 'solar-fixed'
        }
        self.sourcemap = sourcemap
Exemplo n.º 24
0
    def simulate_data_from_inputs(self,
                                  inputs_,
                                  npackets,
                                  overwrite=False,
                                  packs_per_it=None):
        """Given a set of inputs, determine what the spacecraft should see.
        
        Parameters
        ==========
        inputs_
            A nexoclom Input object or the name of an inputs file
        """
        if isinstance(inputs_, str):
            self.inputs = Input(inputs_)
        elif isinstance(inputs_, Input):
            self.inputs = copy.deepcopy(inputs_)
        else:
            raise InputError('nexoclom.LOSResult', 'Problem with the inputs.')

        # TAA needs to match the data
        self.inputs.geometry.taa = self.scdata.taa
        self.unit = u.def_unit('R_' + self.inputs.geometry.planet.object,
                               self.inputs.geometry.planet.radius)

        # If using a planet-fixed source map, need to set subsolarlon
        if ((self.inputs.spatialdist.type == 'surface map') and
            (self.inputs.spatialdist.coordinate_system == 'planet-fixed')):
            self.inputs.spatialdist.subsolarlon = self.scdata.subslong.median(
            ) * u.rad
        else:
            pass

        # Run the model
        self.fitted = False
        self.inputs.run(npackets,
                        packs_per_it=packs_per_it,
                        overwrite=overwrite)
        self.search_for_outputs()

        dist_from_plan = self._data_setup()
        data = self.scdata.data

        for outputfile in self.outputfiles:
            with database_connect() as con:
                ct = pd.read_sql(
                    f'''SELECT count(*) FROM savedpackets
                        WHERE query='{self.scdata.query}' and
                              outputfile='{outputfile}';''', con)
            output = Output.restore(outputfile)
            self.totalsource += output.totalsource
            if ct.loc[0, 'count'] == 0:
                packets = copy.deepcopy(output.X)
                packets['frac0'] = output.X0.loc[packets['Index'],
                                                 'frac'].values
                packets['radvel_sun'] = (
                    packets['vy'] + output.vrplanet.to(self.unit / u.s).value)
                self.oedge = output.inputs.options.outeredge * 2

                # Will base shadow on line of sight, not the packets
                out_of_shadow = np.ones(len(packets))
                self.packet_weighting(packets, out_of_shadow, output.aplanet)

                # This sets limits on regions where packets might be
                tree = self._tree(packets[xcols].values)

                print(f'{data.shape[0]} spectra taken.')
                for i, spectrum in data.iterrows():
                    self._spectrum_process(spectrum, packets, tree,
                                           dist_from_plan[i], i, outputfile)

                    ind = data.index.get_loc(i)
                    if (ind % (len(data) // 10)) == 0:
                        print(f'Completed {ind+1} spectra')
                    else:
                        pass

            del output

        mod_rate = self.totalsource / self.inputs.options.endtime.value
        atoms_per_packet = 1e23 / mod_rate

        outputfile_str = self.outputfiles.__str__().replace('[', '(').replace(
            ']', ')')
        with database_connect() as con:
            rad_ = pd.read_sql(
                f'''SELECT specind, sum(weight) from savedpackets
                    WHERE query='{self.scdata.query}' and
                          outputfile in {outputfile_str}
                    GROUP BY specind''', con)
        self.radiance.loc[
            rad_.specind.to_list()] = rad_['sum'].values * atoms_per_packet
        self.radiance *= u.R
Exemplo n.º 25
0
    def delete_files(self):
        """Delete output files and remove them from the database.

        **Parameters**

        filelist
            List of files to remove. This can be found with Inputs.search()

        **Returns**

        No outputs.

        """
        idnum, filelist, _, _ = self.search()
        with database_connect() as con:
            cur = con.cursor()

            for i, f in zip(idnum, filelist):
                # Remove from database and delete file
                print(f'Deleting outputfile {os.path.basename(f)}')
                cur.execute(
                    '''DELETE FROM outputfile
                               WHERE idnum = %s''', (i, ))
                if os.path.exists(f):
                    os.remove(f)

                # Delete any model images that depend on this output
                cur.execute(
                    '''SELECT idnum, filename FROM modelimages
                               WHERE out_idnum = %s''', (i, ))
                for mid, mfile in cur.fetchall():
                    print(f'Deleting model image {os.path.basename(mfile)}')
                    cur.execute(
                        '''DELETE from modelimages
                                   WHERE idnum = %s''', (mid, ))
                    if os.path.exists(mfile):
                        os.remove(mfile)

                # Delete any uvvsmodels that depend on this output
                cur.execute(
                    '''SELECT idnum, filename FROM uvvsmodels
                               WHERE out_idnum = %s''', (i, ))
                for mid, mfile in cur.fetchall():
                    print(
                        f'Deleting uvvsmodel result {os.path.basename(mfile)}')
                    cur.execute(
                        '''DELETE from uvvsmodels
                                   WHERE idnum = %s''', (mid, ))
                    if os.path.exists(mfile):
                        os.remove(mfile)

                # Remove the fitted uvvsmodels that depend on this output
                uvvsmods = pd.read_sql(
                    f'''SELECT filename FROM uvvsmodels
                        WHERE unfit_idnum={i}''', con)
                for _, uvvsmod in uvvsmods.iterrows():
                    print(f'Deleting fitted uvvsmodel result ' +
                          os.path.basename(uvvsmod.filename))
                    cur.execute(f'''DELETE FROM uvvsmodels
                                    WHERE filename='{uvvsmod.filename}';''')
                    if os.path.exists(uvvsmod.filename):
                        os.remove(uvvsmod.filename)

                # Delete any fitted outputs that depend on this output
                spatdist_id = pd.read_sql(
                    f'''SELECT idnum FROM spatdist_fittedoutput
                        WHERE unfit_outid = {i}''', con)
                for num in spatdist_id.idnum.to_list():
                    # Remove the outputfile
                    fitoutfile = pd.read_sql(
                        f'''SELECT idnum, filename FROM outputfile
                            WHERE spatdist_type = 'fitted output' and
                                  spatdist_id = {num}''', con)

                    for _, row in fitoutfile.iterrows():
                        print(f'Deleting fitted output ' +
                              os.path.basename(row.filename))
                        cur.execute(f'''DELETE FROM outputfile
                                        WHERE filename='{row.filename}';''')
                        if os.path.exists(row.filename):
                            os.remove(row.filename)

                        # Remove the modelimages
                        modims = pd.read_sql(
                            f'''SELECT filename FROM modelimages
                                WHERE out_idnum={row.idnum}''', con)
                        for _, modim in modims.iterrows():
                            print(f'Deleting model image ' +
                                  os.path.basename(modim.filename))
                            cur.execute(f'''DELETE FROM modelimages
                                            WHERE filename='{modim.filename}';'''
                                        )
                            if os.path.exists(modim.filename):
                                os.remove(modim.filename)
Exemplo n.º 26
0
    def search(self):
        if self.type == 'geometry with starttime':
            params = [self.planet, self.startpoint]
            if self.objects is None:
                objects = 'objects is NULL'
            else:
                objects = 'objects = %s'
                params.append([o.object for o in self.objects])

            params.append(self.time)

            query = f'''
                SELECT idnum
                FROM geometry_with_time
                WHERE planet = %s and
                      startpoint = %s and
                      {objects} and
                      starttime = %s'''
        elif self.type == 'geometry without starttime':
            params = [self.planet.object, self.startpoint]
            if self.objects is None:
                objects = 'objects is NULL'
            else:
                objects = 'objects = %s::SSObject[]'
                params.append([o.object for o in self.objects])

            if self.phi is None:
                phi = 'phi is NULL'
            else:
                phi = 'phi is %s'
                params.append([p.value for p in self.phi])

            params.append([s.value for s in self.subsolarpoint])
            params.append(self.taa.value - dtaa / 2.)
            params.append(self.taa.value + dtaa / 2.)

            query = f"""
                SELECT idnum, taa
                FROM geometry_without_time
                WHERE planet = %s and
                      startpoint = %s and
                      {objects} and
                      {phi} and
                      subsolarpt = %s::DOUBLE PRECISION[2] and
                      taa > %s and
                      taa < %s"""
        else:
            raise InputError('geometry.search()',
                             f'geometry.type = {self.type} not allowed.')

        with database_connect() as con:
            cur = con.cursor()
            cur.execute(query, tuple(params))

            if cur.rowcount == 0:
                return None
            elif cur.rowcount == 1:
                return cur.fetchone()[0]
            else:
                results = cur.fetchall()
                diff = [np.abs(row[1] - self.taa.value) for row in results]
                result = [
                    row[0] for row in results
                    if np.abs(row[1] - self.taa.value) == min(diff)
                ]
                if len(result) == 1:
                    return result[0]
                else:
                    raise RuntimeError('geometry.search()',
                                       'Duplicates in geometry table')