def write_catalog(self,
                      obsHistID,
                      out_dir=None,
                      fov=2,
                      status_dir=None,
                      pickup_file=None,
                      skip_tarball=False,
                      region_bounds=None):
        """
        Write the instance catalog for the specified obsHistID.

        Parameters
        ----------
        obsHistID: int
            ID of the desired visit.
        out_dir: str [None]
            Output directory.  It will be created if it doesn't already exist.
            This is actually a parent directory.  The InstanceCatalog will be
            written to 'out_dir/%.8d' % obsHistID
        fov: float [2.]
            Field-of-view angular radius in degrees.  2 degrees will cover
            the LSST focal plane.
        status_dir: str [None]
            The directory in which to write the log file recording this job's
            progress.
        pickup_file: str [None]
            The path to an aborted log file (the file written to status_dir).
            This job will resume where that one left off, only simulating
            sub-catalogs that did not complete.
        skip_tarball: bool [False]
            Flag to skip making a tarball out of the instance catalog folder.
        region_bounds: (float, float, float, float) [None]
            Additional bounds on ra, dec to apply, specified by
            `(ra_min, ra_max, dec_min, dec_max)`.  If None, then no
            additional selection will be applied.
        """

        print('process %d doing %d' % (os.getpid(), obsHistID))

        if out_dir is None:
            raise RuntimeError("must specify out_dir")

        full_out_dir = os.path.join(out_dir, '%.8d' % obsHistID)
        tar_name = os.path.join(out_dir, '%.8d.tar' % obsHistID)

        if pickup_file is not None and os.path.isfile(pickup_file):
            with open(pickup_file, 'r') as in_file:
                for line in in_file:
                    if 'wrote star' in line:
                        self.do_obj_type['stars'] = False
                    if 'wrote knot' in line:
                        self.do_obj_type['knots'] = False
                    if 'wrote bulge' in line:
                        self.do_obj_type['bulges'] = False
                    if 'wrote disk' in line:
                        self.do_obj_type['disks'] = False
                    if 'wrote galaxy catalogs with sprinkling' in line:
                        self.do_obj_type['sprinkled'] = False
                    if 'wrote lensing host' in line:
                        self.do_obj_type['hosts'] = False
                    if 'wrote SNe' in line:
                        self.do_obj_type['sne'] = False
                    if 'wrote agn' in line:
                        self.do_obj_type['agn'] = False

        if not os.path.exists(full_out_dir):
            os.makedirs(full_out_dir)

        has_status_file = False
        if status_dir is not None:
            if not os.path.exists(status_dir):
                os.makedirs(status_dir)
            status_file = os.path.join(status_dir,
                                       'job_log_%.8d.txt' % obsHistID)
            if os.path.exists(status_file):
                os.unlink(status_file)
            has_status_file = True

            with open(status_file, 'a') as out_file:
                out_file.write('writing %d\n' % (obsHistID))
                for kk in self.config_dict:
                    out_file.write('%s: %s\n' % (kk, self.config_dict[kk]))

        obs_md = get_obs_md(self.obs_gen, obsHistID, fov, dither=self.dither)

        if obs_md is None:
            return

        if region_bounds is not None:
            obs_md.radec_bounds = region_bounds

        ExtraGalacticVariabilityModels.filters_to_simulate.clear()
        ExtraGalacticVariabilityModels.filters_to_simulate.extend(
            obs_md.bandpass)

        if has_status_file:
            with open(status_file, 'a') as out_file:
                out_file.write('got obs_md in %e hours\n' %
                               ((time.time() - self.t_start) / 3600.0))

        # Add directory for writing the GLSN spectra to
        glsn_spectra_dir = str(os.path.join(full_out_dir, 'Dynamic'))
        os.makedirs(glsn_spectra_dir, exist_ok=True)

        if HAS_TWINKLES:
            twinkles_spec_map.subdir_map['(^specFileGLSN)'] = 'Dynamic'
            # Ensure that the directory for GLSN spectra is created

        phosim_cat_name = 'phosim_cat_%d.txt' % obsHistID
        star_name = 'star_cat_%d.txt' % obsHistID
        bright_star_name = 'bright_stars_%d.txt' % obsHistID
        gal_name = 'gal_cat_%d.txt' % obsHistID
        knots_name = 'knots_cat_%d.txt' % obsHistID
        # keep track of all of the non-supernova InstanceCatalogs that
        # have been written so that we can remember to includeobj them
        # in the PhoSim catalog
        written_catalog_names = []
        sprinkled_host_name = 'spr_hosts_%d.txt' % obsHistID

        if self.do_obj_type['stars']:
            star_cat = self.instcats.StarInstCat(self.star_db,
                                                 obs_metadata=obs_md)
            star_cat.min_mag = self.min_mag
            star_cat.photParams = self.phot_params
            star_cat.lsstBandpassDict = self.bp_dict
            star_cat.disable_proper_motion = not self.proper_motion

            bright_cat \
                = self.instcats.BrightStarInstCat(self.star_db, obs_metadata=obs_md,
                                                  cannot_be_null=['isBright'])
            bright_cat.min_mag = self.min_mag
            bright_cat.photParams = self.phot_params
            bright_cat.lsstBandpassDict = self.bp_dict

            cat_dict = {
                os.path.join(full_out_dir, star_name): star_cat,
                os.path.join(full_out_dir, bright_star_name): bright_cat
            }
            parallelCatalogWriter(cat_dict,
                                  chunk_size=50000,
                                  write_header=False)
            written_catalog_names.append(star_name)

            if has_status_file:
                with open(status_file, 'a') as out_file:
                    duration = (time.time() - self.t_start) / 3600.0
                    out_file.write('%d wrote star catalog after %.3e hrs\n' %
                                   (obsHistID, duration))

        if 'knots' in self.descqa_catalog and self.do_obj_type['knots']:
            knots_db = knotsDESCQAObject(self.descqa_catalog)
            knots_db.field_ra = self.protoDC2_ra
            knots_db.field_dec = self.protoDC2_dec
            cat = self.instcats.DESCQACat(knots_db,
                                          obs_metadata=obs_md,
                                          cannot_be_null=['hasKnots'])
            cat.sed_lookup_dir = self.sed_lookup_dir
            cat.photParams = self.phot_params
            cat.lsstBandpassDict = self.bp_dict
            cat.write_catalog(os.path.join(full_out_dir, knots_name),
                              chunk_size=5000,
                              write_header=False)
            written_catalog_names.append(knots_name)
            del cat
            del knots_db
            if has_status_file:
                with open(status_file, 'a') as out_file:
                    duration = (time.time() - self.t_start) / 3600.0
                    out_file.write('%d wrote knots catalog after %.3e hrs\n' %
                                   (obsHistID, duration))
        elif self.do_obj_type['knots']:
            # Creating empty knots component
            subprocess.check_call('cd %(full_out_dir)s; touch %(knots_name)s' %
                                  locals(),
                                  shell=True)

        if self.sprinkler is False:

            if self.do_obj_type['bulges']:
                bulge_db = bulgeDESCQAObject(self.descqa_catalog)
                bulge_db.field_ra = self.protoDC2_ra
                bulge_db.field_dec = self.protoDC2_dec
                cat = self.instcats.DESCQACat(
                    bulge_db,
                    obs_metadata=obs_md,
                    cannot_be_null=['hasBulge', 'magNorm'])
                cat_name = 'bulge_' + gal_name
                cat.sed_lookup_dir = self.sed_lookup_dir
                cat.lsstBandpassDict = self.bp_dict
                cat.photParams = self.phot_params
                cat.write_catalog(os.path.join(full_out_dir, cat_name),
                                  chunk_size=5000,
                                  write_header=False)
                written_catalog_names.append(cat_name)
                del cat
                del bulge_db

                if has_status_file:
                    with open(status_file, 'a') as out_file:
                        duration = (time.time() - self.t_start) / 3600.0
                        out_file.write(
                            '%d wrote bulge catalog after %.3e hrs\n' %
                            (obsHistID, duration))

            if self.do_obj_type['disks']:
                disk_db = diskDESCQAObject(self.descqa_catalog)
                disk_db.field_ra = self.protoDC2_ra
                disk_db.field_dec = self.protoDC2_dec
                cat = self.instcats.DESCQACat(
                    disk_db,
                    obs_metadata=obs_md,
                    cannot_be_null=['hasDisk', 'magNorm'])
                cat_name = 'disk_' + gal_name
                cat.sed_lookup_dir = self.sed_lookup_dir
                cat.lsstBandpassDict = self.bp_dict
                cat.photParams = self.phot_params
                cat.write_catalog(os.path.join(full_out_dir, cat_name),
                                  chunk_size=5000,
                                  write_header=False)
                written_catalog_names.append(cat_name)
                del cat
                del disk_db

                if has_status_file:
                    with open(status_file, 'a') as out_file:
                        duration = (time.time() - self.t_start) / 3600.0
                        out_file.write(
                            '%d wrote disk catalog after %.3e hrs\n' %
                            (obsHistID, duration))
            if self.do_obj_type['agn']:
                agn_db = agnDESCQAObject(self.descqa_catalog)
                agn_db._do_prefiltering = True
                agn_db.field_ra = self.protoDC2_ra
                agn_db.field_dec = self.protoDC2_dec
                agn_db.agn_params_db = self.agn_db_name
                cat = self.instcats.DESCQACat_Agn(agn_db, obs_metadata=obs_md)
                cat._agn_threads = self._agn_threads
                cat.lsstBandpassDict = self.bp_dict
                cat.photParams = self.phot_params
                cat_name = 'agn_' + gal_name
                cat.write_catalog(os.path.join(full_out_dir, cat_name),
                                  chunk_size=5000,
                                  write_header=False)
                written_catalog_names.append(cat_name)
                del cat
                del agn_db

                if has_status_file:
                    with open(status_file, 'a') as out_file:
                        duration = (time.time() - self.t_start) / 3600.0
                        out_file.write(
                            '%d wrote agn catalog after %.3e hrs\n' %
                            (obsHistID, duration))
        else:

            if not HAS_TWINKLES:
                raise RuntimeError(
                    "Cannot do_sprinkled; you have not imported "
                    "the Twinkles modules in sims_GCRCatSimInterface")

            class SprinkledBulgeCat(SubCatalogMixin,
                                    self.instcats.DESCQACat_Bulge):
                subcat_prefix = 'bulge_'

                # must add catalog_type to fool InstanceCatalog registry into
                # accepting each iteration of these sprinkled classes as
                # unique classes (in the case where we are generating InstanceCatalogs
                # for multiple ObsHistIDs)
                catalog_type = 'sprinkled_bulge_%d' % obs_md.OpsimMetaData[
                    'obsHistID']

            class SprinkledDiskCat(SubCatalogMixin,
                                   self.instcats.DESCQACat_Disk):
                subcat_prefix = 'disk_'
                catalog_type = 'sprinkled_disk_%d' % obs_md.OpsimMetaData[
                    'obsHistID']

            class SprinkledAgnCat(SubCatalogMixin,
                                  self.instcats.DESCQACat_Twinkles):
                subcat_prefix = 'agn_'
                catalog_type = 'sprinkled_agn_%d' % obs_md.OpsimMetaData[
                    'obsHistID']
                _agn_threads = self._agn_threads

            if self.do_obj_type['sprinkled']:
                self.compoundGalICList = [
                    SprinkledBulgeCat, SprinkledDiskCat, SprinkledAgnCat
                ]

                self.compoundGalDBList = [
                    bulgeDESCQAObject, diskDESCQAObject, agnDESCQAObject
                ]

                for db_class in self.compoundGalDBList:
                    db_class.yaml_file_name = self.descqa_catalog

                gal_cat = twinklesDESCQACompoundObject(
                    self.compoundGalICList,
                    self.compoundGalDBList,
                    obs_metadata=obs_md,
                    compoundDBclass=sprinklerDESCQACompoundObject,
                    field_ra=self.protoDC2_ra,
                    field_dec=self.protoDC2_dec,
                    agn_params_db=self.agn_db_name)

                gal_cat.sed_lookup_dir = self.sed_lookup_dir
                gal_cat.filter_on_healpix = True
                gal_cat.use_spec_map = twinkles_spec_map
                gal_cat.sed_dir = glsn_spectra_dir
                gal_cat.photParams = self.phot_params
                gal_cat.lsstBandpassDict = self.bp_dict

                written_catalog_names.append('bulge_' + gal_name)
                written_catalog_names.append('disk_' + gal_name)
                written_catalog_names.append('agn_' + gal_name)
                gal_cat.write_catalog(os.path.join(full_out_dir, gal_name),
                                      chunk_size=5000,
                                      write_header=False)
                if has_status_file:
                    with open(status_file, 'a') as out_file:
                        duration = (time.time() - self.t_start) / 3600.0
                        out_file.write(
                            '%d wrote galaxy catalogs with sprinkling after %.3e hrs\n'
                            % (obsHistID, duration))

            if self.do_obj_type['hosts']:
                host_cat = hostImage(obs_md.pointingRA, obs_md.pointingDec,
                                     fov)
                host_cat.write_host_cat(
                    os.path.join(self.host_image_dir, 'agn_lensed_bulges'),
                    os.path.join(self.host_data_dir,
                                 'cosmoDC2_v1.1.4_bulge_agn_host.csv'),
                    os.path.join(full_out_dir, sprinkled_host_name))
                host_cat.write_host_cat(
                    os.path.join(self.host_image_dir, 'agn_lensed_disks'),
                    os.path.join(self.host_data_dir,
                                 'cosmoDC2_v1.1.4_disk_agn_host.csv'),
                    os.path.join(full_out_dir, sprinkled_host_name),
                    append=True)
                host_cat.write_host_cat(
                    os.path.join(self.host_image_dir, 'sne_lensed_bulges'),
                    os.path.join(self.host_data_dir,
                                 'cosmoDC2_v1.1.4_bulge_sne_host.csv'),
                    os.path.join(full_out_dir, sprinkled_host_name),
                    append=True)
                host_cat.write_host_cat(
                    os.path.join(self.host_image_dir, 'sne_lensed_disks'),
                    os.path.join(self.host_data_dir,
                                 'cosmoDC2_v1.1.4_disk_sne_host.csv'),
                    os.path.join(full_out_dir, sprinkled_host_name),
                    append=True)

                written_catalog_names.append(sprinkled_host_name)

                if has_status_file:
                    with open(status_file, 'a') as out_file:
                        duration = (time.time() - self.t_start) / 3600.0
                        out_file.write(
                            '%d wrote lensing host catalog after %.3e hrs\n' %
                            (obsHistID, duration))

        # SN instance catalogs
        if self.sn_db_name is not None and self.do_obj_type['sne']:
            phosimcatalog = snphosimcat(self.sn_db_name,
                                        obs_metadata=obs_md,
                                        objectIDtype=42,
                                        sedRootDir=full_out_dir)

            phosimcatalog.photParams = self.phot_params
            phosimcatalog.lsstBandpassDict = self.bp_dict

            snOutFile = 'sne_cat_{}.txt'.format(obsHistID)
            phosimcatalog.write_catalog(os.path.join(full_out_dir, snOutFile),
                                        chunk_size=5000,
                                        write_header=False)

            written_catalog_names.append(snOutFile)

            if has_status_file:
                with open(status_file, 'a') as out_file:
                    duration = (time.time() - self.t_start) / 3600.0
                    out_file.write('%d wrote SNe catalog after %.3e hrs\n' %
                                   (obsHistID, duration))

        make_instcat_header(self.star_db,
                            obs_md,
                            os.path.join(full_out_dir, phosim_cat_name),
                            object_catalogs=written_catalog_names)

        if os.path.exists(os.path.join(full_out_dir, gal_name)):
            full_name = os.path.join(full_out_dir, gal_name)
            with open(full_name, 'r') as in_file:
                gal_lines = in_file.readlines()
                if len(gal_lines) > 0:
                    raise RuntimeError(
                        "%d lines in\n%s\nThat file should be empty" %
                        (len(gal_lines), full_name))
            os.unlink(full_name)

        # gzip the object files.
        gzip_process_list = []
        for orig_name in written_catalog_names:
            full_name = os.path.join(full_out_dir, orig_name)
            if not os.path.exists(full_name):
                continue
            p = subprocess.Popen(args=['gzip', '-f', full_name])
            gzip_process_list.append(p)

            if len(gzip_process_list) >= self.gzip_threads:
                for p in gzip_process_list:
                    p.wait()
                gzip_process_list = []

        for p in gzip_process_list:
            p.wait()

        if not skip_tarball:
            if has_status_file:
                with open(status_file, 'a') as out_file:
                    out_file.write("%d tarring\n" % obsHistID)
                    p = subprocess.Popen(args=[
                        'tar', '-C', out_dir, '-cf', tar_name,
                        '%.8d' % obsHistID
                    ])
            p.wait()
            p = subprocess.Popen(args=['rm', '-rf', full_out_dir])
            p.wait()

        if has_status_file:
            with open(status_file, 'a') as out_file:
                duration = (time.time() - self.t_start) / 3600.0
                out_file.write('%d all done -- took %.3e hrs\n' %
                               (obsHistID, duration))

        print("all done with %d" % obsHistID)
        if has_status_file:
            return status_file
        return None
            cat.write_header(output)
            output.write('includeobj %s.gz\n' % star_name)
            output.write('includeobj %s.gz\n' % gal_name)
            output.write('includeobj %s.gz\n' % agn_name)

        star_cat = MaskedPhoSimCatalogPoint(star_db, obs_metadata=obs)
        star_cat.phoSimHeaderMap = phosim_header_map
        bright_cat = BrightStarCatalog(star_db, obs_metadata=obs, cannot_be_null=['isBright'])
        star_cat.min_mag = args.min_mag
        bright_cat.min_mag = args.min_mag

        from lsst.sims.catalogs.definitions import parallelCatalogWriter
        cat_dict = {}
        cat_dict[os.path.join(out_dir, star_name)] = star_cat
        cat_dict[os.path.join(out_dir, 'bright_stars_%d.txt' % obshistid)] = bright_cat
        parallelCatalogWriter(cat_dict, chunk_size=100000, write_header=False)

        cat = PhoSimCatalogSersic2D(bulge_db, obs_metadata=obs)
        cat.write_catalog(os.path.join(out_dir, gal_name), write_header=False,
                          chunk_size=100000)
        cat = PhoSimCatalogSersic2D(disk_db, obs_metadata=obs)
        cat.write_catalog(os.path.join(out_dir, gal_name), write_header=False,
                          write_mode='a', chunk_size=100000)

        cat = PhoSimCatalogZPoint(agn_db, obs_metadata=obs)
        cat.write_catalog(os.path.join(out_dir, agn_name), write_header=False,
                          chunk_size=100000)

        for orig_name in (star_name, gal_name, agn_name):
            full_name = os.path.join(out_dir, orig_name)
            with open(full_name, 'r') as input_file:
    def write_catalog(self, obsHistID, out_dir='.', fov=2):
        """
        Write the instance catalog for the specified obsHistID.

        Parameters
        ----------
        obsHistID: int
            ID of the desired visit.
        out_dir: str ['.']
            Output directory.  It will be created if it doesn't already exist.
        fov: float [2.]
            Field-of-view angular radius in degrees.  2 degrees will cover
            the LSST focal plane.
        """
        if not os.path.exists(out_dir):
            os.mkdir(out_dir)

        obs_md = get_obs_md(self.obs_gen, obsHistID, fov, dither=self.dither)
        # Add directory for writing the GLSN spectra to
        glsn_spectra_dir = str(os.path.join(out_dir, 'Dynamic'))
        twinkles_spec_map.subdir_map['(^specFileGLSN)'] = 'Dynamic'
        # Ensure that the directory for GLSN spectra is created
        os.makedirs(glsn_spectra_dir, exist_ok=True)

        cat_name = 'phosim_cat_%d.txt' % obsHistID
        star_name = 'star_cat_%d.txt' % obsHistID
        bright_star_name = 'bright_stars_%d.txt' % obsHistID
        gal_name = 'gal_cat_%d.txt' % obsHistID
        knots_name = 'knots_cat_%d.txt' % obsHistID
        #agn_name = 'agn_cat_%d.txt' % obshistid

        # SN Data
        snDataDir = os.path.join(getPackageDir('sims_GCRCatSimInterface'),
                                 'data')
        sncsv_hostless_uDDF = 'uDDF_hostlessSN_trimmed.csv'
        sncsv_hostless_pDC2 = 'MainSurvey_hostlessSN_trimmed.csv'
        sncsv_hostless_pDC2hz = 'MainSurvey_hostlessSN_highz_trimmed.csv'
        sncsv_hosted_uDDF = 'uDDFHostedSNPositions_trimmed.csv'
        sncsv_hosted_pDC2 = 'MainSurveyHostedSNPositions_trimmed.csv'

        snpopcsvs = list(
            os.path.join(snDataDir, n) for n in [
                sncsv_hostless_uDDF, sncsv_hostless_pDC2,
                sncsv_hostless_pDC2hz, sncsv_hosted_uDDF, sncsv_hosted_pDC2
            ])

        names = list(
            snpop.split('/')[-1].split('.')[0].strip('_trimmed')
            for snpop in snpopcsvs)
        object_catalogs = [star_name, gal_name] + \
                          ['{}_cat_{}.txt'.format(x, obsHistID) for x in names]

        make_instcat_header(self.star_db,
                            obs_md,
                            os.path.join(out_dir, cat_name),
                            imsim_catalog=self.imsim_catalog,
                            object_catalogs=object_catalogs)

        star_cat = self.instcats.StarInstCat(self.star_db, obs_metadata=obs_md)
        star_cat.min_mag = self.min_mag
        star_cat.photParams = self.phot_params
        star_cat.lsstBandpassDict = self.bp_dict
        star_cat.disable_proper_motion = not self.proper_motion

        bright_cat \
            = self.instcats.BrightStarInstCat(self.star_db, obs_metadata=obs_md,
                                              cannot_be_null=['isBright'])
        bright_cat.min_mag = self.min_mag
        bright_cat.photParams = self.phot_params
        bright_cat.lsstBandpassDict = self.bp_dict

        cat_dict = {
            os.path.join(out_dir, star_name): star_cat,
            os.path.join(out_dir, bright_star_name): bright_cat
        }
        parallelCatalogWriter(cat_dict, chunk_size=100000, write_header=False)

        # TODO: Find a better way of checking for catalog type
        if 'knots' in self.descqa_catalog:
            knots_db = knotsDESCQAObject(self.descqa_catalog)
            knots_db.field_ra = self.protoDC2_ra
            knots_db.field_dec = self.protoDC2_dec
            cat = self.instcats.DESCQACat(knots_db,
                                          obs_metadata=obs_md,
                                          cannot_be_null=['hasKnots'])
            cat.photParams = self.phot_params
            cat.lsstBandpassDict = self.bp_dict
            cat.write_catalog(os.path.join(out_dir, knots_name),
                              chunk_size=100000,
                              write_header=False)
        else:
            # Creating empty knots component
            subprocess.check_call('cd %(out_dir)s; touch %(knots_name)s' %
                                  locals(),
                                  shell=True)

        if self.sprinkler is False:

            bulge_db = bulgeDESCQAObject(self.descqa_catalog)
            bulge_db.field_ra = self.protoDC2_ra
            bulge_db.field_dec = self.protoDC2_dec
            cat = self.instcats.DESCQACat(bulge_db,
                                          obs_metadata=obs_md,
                                          cannot_be_null=['hasBulge'])
            cat.write_catalog(os.path.join(out_dir, gal_name),
                              chunk_size=100000,
                              write_header=False)
            cat.photParams = self.phot_params
            cat.lsstBandpassDict = self.bp_dict

            disk_db = diskDESCQAObject(self.descqa_catalog)
            disk_db.field_ra = self.protoDC2_ra
            disk_db.field_dec = self.protoDC2_dec
            cat = self.instcats.DESCQACat(disk_db,
                                          obs_metadata=obs_md,
                                          cannot_be_null=['hasDisk'])
            cat.write_catalog(os.path.join(out_dir, gal_name),
                              chunk_size=100000,
                              write_mode='a',
                              write_header=False)
            cat.photParams = self.phot_params
            cat.lsstBandpassDict = self.bp_dict

            agn_db = agnDESCQAObject(self.descqa_catalog)
            agn_db.field_ra = self.protoDC2_ra
            agn_db.field_dec = self.protoDC2_dec
            agn_db.agn_params_db = self.agn_db_name
            cat = self.instcats.DESCQACat_Agn(agn_db, obs_metadata=obs_md)
            cat.write_catalog(os.path.join(out_dir, gal_name),
                              chunk_size=100000,
                              write_mode='a',
                              write_header=False)
            cat.photParams = self.phot_params
            cat.lsstBandpassDict = self.bp_dict
        else:

            self.compoundGalICList = [
                self.instcats.DESCQACat_Bulge, self.instcats.DESCQACat_Disk,
                self.instcats.DESCQACat_Twinkles
            ]
            self.compoundGalDBList = [
                bulgeDESCQAObject, diskDESCQAObject, agnDESCQAObject
            ]

            gal_cat = twinklesDESCQACompoundObject(
                self.compoundGalICList,
                self.compoundGalDBList,
                obs_metadata=obs_md,
                compoundDBclass=sprinklerDESCQACompoundObject,
                field_ra=self.protoDC2_ra,
                field_dec=self.protoDC2_dec,
                agn_params_db=self.agn_db_name)

            gal_cat.use_spec_map = twinkles_spec_map
            gal_cat.sed_dir = glsn_spectra_dir
            gal_cat.photParams = self.phot_params
            gal_cat.lsstBandpassDict = self.bp_dict

            gal_cat.write_catalog(os.path.join(out_dir, gal_name),
                                  chunk_size=100000,
                                  write_header=False)

        # SN instance catalogs
        for i, snpop in enumerate(snpopcsvs):
            phosimcatalog = snphosimcat(snpop,
                                        tableName=names[i],
                                        sedRootDir=out_dir,
                                        obs_metadata=obs_md,
                                        objectIDtype=i + 42)
            phosimcatalog.photParams = self.phot_params
            phosimcatalog.lsstBandpassDict = self.bp_dict

            snOutFile = names[i] + '_cat_{}.txt'.format(obsHistID)
            print('writing out catalog ', snOutFile)
            phosimcatalog.write_catalog(os.path.join(out_dir, snOutFile),
                                        chunk_size=10000,
                                        write_header=False)

        if self.imsim_catalog:

            imsim_cat = 'imsim_cat_%i.txt' % obsHistID
            command = 'cd %(out_dir)s; cat %(cat_name)s %(star_name)s %(gal_name)s %(knots_name)s > %(imsim_cat)s' % locals(
            )
            subprocess.check_call(command, shell=True)

        # gzip the object files.
        for orig_name in object_catalogs:
            full_name = os.path.join(out_dir, orig_name)
            with open(full_name, 'rb') as input_file:
                with gzip.open(full_name + '.gz', 'wb') as output_file:
                    output_file.writelines(input_file)
            os.unlink(full_name)
Exemplo n.º 4
0
    def test_parallel_writing_chunk_size(self):
        """
        Test that parallelCatalogWriter gets the right columns in it
        when chunk_size is not None (this is a repeat of test_parallel_writing)
        """

        db = DbClass()

        class_dict = {os.path.join(self.scratch_dir, 'par_test1.txt'): ParallelCatClass1(db),
                      os.path.join(self.scratch_dir, 'par_test2.txt'): ParallelCatClass2(db),
                      os.path.join(self.scratch_dir, 'par_test3.txt'): ParallelCatClass3(db)}

        for file_name in class_dict:
            if os.path.exists(file_name):
                os.unlink(file_name)

        parallelCatalogWriter(class_dict, chunk_size=7)

        dtype = np.dtype([('id', int), ('test', int), ('ii', int)])
        data1 = np.genfromtxt(os.path.join(self.scratch_dir, 'par_test1.txt'), dtype=dtype, delimiter=',')
        data2 = np.genfromtxt(os.path.join(self.scratch_dir, 'par_test2.txt'), dtype=dtype, delimiter=',')
        data3 = np.genfromtxt(os.path.join(self.scratch_dir, 'par_test3.txt'), dtype=dtype, delimiter=',')

        # verify that the contents of the catalogs fit with the constraints in cannot_be_null
        self.assertEqual(len(np.where(data1['ii']%2 == 0)[0]), 0)
        self.assertEqual(len(np.where(data2['id']%2 == 0)[0]), 0)
        self.assertEqual(len(np.where(data3['id']%5 != 0)[0]), 0)

        # verify that the added value columns came out to the correct value
        np.testing.assert_array_equal(data1['id']**2, data1['test'])
        np.testing.assert_array_equal(data2['id']**3, data2['test'])
        np.testing.assert_array_equal(data3['id']**4, data3['test'])

        # now verify that all of the rows that were excluded from our catalogs
        # really should have been excluded

        control_cat = ControlCatalog(db)
        iterator = control_cat.iter_catalog()
        ct = 0
        ct_in_1 = 0
        ct_in_2 = 0
        ct_in_3 = 0
        for control_data in iterator:
            ct += 1

            if control_data[1] % 2 == 0:
                self.assertNotIn(control_data[0], data1['id'])
            else:
                ct_in_1 += 1
                self.assertIn(control_data[0], data1['id'])
                dex = np.where(data1['id'] == control_data[0])[0][0]
                self.assertEqual(control_data[1], data1['ii'][dex])

            if control_data[0] % 2 == 0:
                self.assertNotIn(control_data[0], data2['id'])
            else:
                ct_in_2 += 1
                self.assertIn(control_data[0], data2['id'])
                dex = np.where(data2['id'] == control_data[0])[0][0]
                self.assertEqual(control_data[1], data2['ii'][dex])

            if control_data[0] % 5 != 0:
                self.assertNotIn(control_data[0], data3['id'])
            else:
                ct_in_3 += 1
                self.assertIn(control_data[0], data3['id'])
                dex = np.where(data3['id'] == control_data[0])[0][0]
                self.assertEqual(control_data[1], data3['ii'][dex])

        self.assertEqual(ct_in_1, len(data1['id']))
        self.assertEqual(ct_in_2, len(data2['id']))
        self.assertEqual(ct_in_3, len(data3['id']))
        self.assertEqual(ct, 100)

        for file_name in class_dict:
            if os.path.exists(file_name):
                os.unlink(file_name)
    def write_catalog(self, obsHistID, out_dir='.', fov=2):
        """
        Write the instance catalog for the specified obsHistID.

        Parameters
        ----------
        obsHistID: int
            ID of the desired visit.
        out_dir: str ['.']
            Output directory.  It will be created if it doesn't already exist.
        fov: float [2.]
            Field-of-view angular radius in degrees.  2 degrees will cover
            the LSST focal plane.
        """
        if not os.path.exists(out_dir):
            os.mkdir(out_dir)

        obs_md = get_obs_md(self.obs_gen, obsHistID, fov, dither=self.dither)

        cat_name = 'phosim_cat_%d.txt' % obsHistID
        star_name = 'star_cat_%d.txt' % obsHistID
        bright_star_name = 'bright_stars_%d.txt' % obsHistID
        gal_name = 'gal_cat_%d.txt' % obsHistID
        #agn_name = 'agn_cat_%d.txt' % obshistid

        make_instcat_header(self.star_db, obs_md,
                            os.path.join(out_dir, cat_name),
                            imsim_catalog=self.imsim_catalog,
                            object_catalogs=(star_name, gal_name))

        star_cat = self.instcats.StarInstCat(self.star_db, obs_metadata=obs_md,
                                             cannot_be_null=['inProtoDc2'])
        star_cat.min_mag = self.min_mag
        star_cat.disable_proper_motion = not self.proper_motion

        bright_cat \
            = self.instcats.BrightStarInstCat(self.star_db, obs_metadata=obs_md,
                                              cannot_be_null=['isBright'])
        bright_cat.min_mag = self.min_mag

        cat_dict = {os.path.join(out_dir, star_name): star_cat,
                    os.path.join(out_dir, bright_star_name): bright_cat}
        parallelCatalogWriter(cat_dict, chunk_size=100000, write_header=False)

        cat = self.instcats.DESCQACat(bulgeDESCQAObject(self.descqa_catalog),
                                      obs_metadata=obs_md,
                                      cannot_be_null=['hasBulge'])
        cat.write_catalog(os.path.join(out_dir, gal_name), chunk_size=100000,
                          write_header=False)

        cat = self.instcats.DESCQACat(diskDESCQAObject(self.descqa_catalog),
                                      obs_metadata=obs_md,
                                      cannot_be_null=['hasDisk'])
        cat.write_catalog(os.path.join(out_dir, gal_name), chunk_size=100000,
                          write_mode='a', write_header=False)

        if self.imsim_catalog:
            imsim_cat = 'imsim_cat_%i.txt' % obsHistID
            command = 'cd %(out_dir)s; cat %(cat_name)s %(star_name)s %(gal_name)s > %(imsim_cat)s' % locals()
            subprocess.check_call(command, shell=True)

        # gzip the object files.
        for orig_name in (star_name, gal_name):
            full_name = os.path.join(out_dir, orig_name)
            with open(full_name, 'rb') as input_file:
                with gzip.open(full_name+'.gz', 'wb') as output_file:
                    output_file.writelines(input_file)
            os.unlink(full_name)
Exemplo n.º 6
0
            output.write('includeobj %s.gz\n' % agn_name)

        star_cat = MaskedPhoSimCatalogPoint(star_db, obs_metadata=obs)
        star_cat.phoSimHeaderMap = phosim_header_map
        bright_cat = BrightStarCatalog(star_db,
                                       obs_metadata=obs,
                                       cannot_be_null=['isBright'])
        star_cat.min_mag = args.min_mag
        bright_cat.min_mag = args.min_mag

        from lsst.sims.catalogs.definitions import parallelCatalogWriter
        cat_dict = {}
        cat_dict[os.path.join(out_dir, star_name)] = star_cat
        cat_dict[os.path.join(out_dir,
                              'bright_stars_%d.txt' % obshistid)] = bright_cat
        parallelCatalogWriter(cat_dict, chunk_size=100000, write_header=False)

        cat = PhoSimCatalogSersic2D(bulge_db, obs_metadata=obs)
        cat.write_catalog(os.path.join(out_dir, gal_name),
                          write_header=False,
                          chunk_size=100000)
        cat = PhoSimCatalogSersic2D(disk_db, obs_metadata=obs)
        cat.write_catalog(os.path.join(out_dir, gal_name),
                          write_header=False,
                          write_mode='a',
                          chunk_size=100000)

        cat = PhoSimCatalogZPoint(agn_db, obs_metadata=obs)
        cat.write_catalog(os.path.join(out_dir, agn_name),
                          write_header=False,
                          chunk_size=100000)
Exemplo n.º 7
0
    phosim_header_map = copy.deepcopy(DefaultPhoSimHeaderMap)
    phosim_header_map['rawSeeing'] = ('rawSeeing', None)
    phosim_header_map['FWHMeff'] = ('FWHMeff', None)
    phosim_header_map['FWHMgeom'] = ('FWHMgeom', None)

    phosim_cat = StarPhoSimCatalog(star_db, obs_metadata=obs)
    phosim_cat.phoSimHeaderMap = phosim_header_map

    truth_cat = StellarTruthCatalog(star_db, obs_metadata=obs)

    cat_dict = {
        'catalogs/phosim_stars.txt': phosim_cat,
        'catalogs/truth_stars.txt': truth_cat
    }

    parallelCatalogWriter(cat_dict, chunk_size=10000)

    print('\n\ndone with stars\n\n')

    obs.boundLength = 0.5

    truth_cat = GalaxyTruthCatalog(galaxy_db, obs_metadata=obs)
    phosim_cat = GalaxyPhoSimCatalog(galaxy_db, obs_metadata=obs)
    phosim_cat.phoSimHeaderMap = phosim_header_map

    cat_dict = {
        'catalogs/phosim_galaxies.txt': phosim_cat,
        'catalogs/truth_galaxies.txt': truth_cat
    }

    parallelCatalogWriter(cat_dict, chunk_size=10000)