Beispiel #1
0
    def __init__(self, working_directory, output_directory,
                 dry_run=False, debug=False):
        self.dry_run = dry_run

        logger.info("Input directory set to: %s" % working_directory)
        logger.info("Output directory set to: %s" % output_directory)

        working_context = context.get_context(working_directory)
        output_context = context.get_context(output_directory)

        if dry_run and working_context.is_remote():
            sys.stdout.write("A dry run can only be done on local files.\n")
            sys.exit(0)

        if output_context.is_remote():
            sys.stdout.write("The output directory must be local.\n")
            sys.exit(0)

        image_manager = self._create_image_manager()

        progress_manager = working_context.get_progress_manager()
        builder = self._create_workunit_builder(working_context,
                                                output_context,
                                                progress_manager)

        workunit_provider = WorkUnitProvider(self.input_suffix,
                                             working_context,
                                             progress_manager, builder,
                                             randomize=self.should_randomize_workunits)

        prefetching_workunit_provider = PreFetchingWorkUnitProvider(workunit_provider,
                                                                    config.read("PREFETCH.NUMBER"),
                                                                    image_manager)

        if working_context.is_remote():
            synchronization_manager = SynchronizationManager(working_context)
        else:
            synchronization_manager = None

        model = TransAckValidationModel(prefetching_workunit_provider,
                                        image_manager,
                                        synchronization_manager)
        logger.debug("Created model.")

        view = self._create_view(model, debug=debug)

        logger.debug("Created view.")
        model.start_work()

        self.model = model
        self.view = view
        self.controller = view.controller

        self.controller.display_current_image()

        if not synchronization_manager:
            self.view.disable_sync_menu()

        self.view.show()
Beispiel #2
0
def fix_tags_on_cands_missing_reals(user_id, vos_dir, property):
    "At the moment this just checks for a single user's missing reals. Easy to generalise it to all users."
    con = context.get_context(vos_dir)
    user_progress = []
    listing = con.get_listing(tasks.get_suffix('reals'))
    mpc_listing = con.get_listing('mpc')
    for filename in listing:
        if not filename.startswith('fk'):
            user = storage.get_property(con.get_full_path(filename), property)
            if (user is not None):
                # and (user == user_id):  # modify 'and' to generalise to all users with work in this directory
                #realsfile = filename.replace('cands', 'reals')
                #if not con.exists(realsfile):
                #    print filename, 'no reals file', realsfile

                # go through the listing of .mpc files and see if any match this reals.astrom
                is_present = False
                for mpcfile in [f for f in mpc_listing if not f.startswith('fk')]:
                    if mpcfile.startswith(filename):
                        print filename, user, 'exists!', mpcfile
                        is_present = True

                if not is_present:
                    user_progress.append(filename)
                    print filename, user, 'no mpc file'
                    storage.set_property(con.get_full_path(filename), property, None)

    print 'Fixed files:', len(user_progress)

    return
Beispiel #3
0
    def load_objects(self, directory_name=None):
        """Load the targets from a file

        """

        for name in Neptune:
            self.kbos[name] = Neptune[name]

        if directory_name is not None:

            working_context = context.get_context(directory_name)

            for filename in working_context.get_listing('ast'):
                fhandle = working_context.open(filename)
                observations = []
                lines = fhandle.read().split('\n')
                fhandle.close()
                for line in lines:
                    if len(line) > 0 and not line.startswith(
                            '#'
                    ):  # skip the comments, don't care about them here
                        observations.append(mpc.Observation.from_string(line))
                name = filename.rstrip(
                    '.ast')  # observations[0].provisional_name
                try:
                    this_orbit = orbfit.Orbfit(observations)
                    self.kbos[name] = this_orbit
                except Exception as e:
                    logging.error("Failed loading: {}".format(name))
                    logging.error(str(e))

        self.doplot()
Beispiel #4
0
def fix_tags_on_cands_missing_reals(user_id, vos_dir, property):
    "At the moment this just checks for a single user's missing reals. Easy to generalise it to all users."
    con = context.get_context(vos_dir)
    user_progress = []
    listing = con.get_listing(tasks.get_suffix('reals'))
    mpc_listing = con.get_listing('mpc')
    for filename in listing:
        if not filename.startswith('fk'):
            user = storage.get_property(con.get_full_path(filename), property)
            if (user is not None):
                # and (user == user_id):  # modify 'and' to generalise to all users with work in this directory
                #realsfile = filename.replace('cands', 'reals')
                #if not con.exists(realsfile):
                #    print filename, 'no reals file', realsfile

                # go through the listing of .mpc files and see if any match this reals.astrom
                is_present = False
                for mpcfile in [
                        f for f in mpc_listing if not f.startswith('fk')
                ]:
                    if mpcfile.startswith(filename):
                        print(filename, user, 'exists!', mpcfile)
                        is_present = True

                if not is_present:
                    user_progress.append(filename)
                    print(filename, user, 'no mpc file')
                    storage.set_property(con.get_full_path(filename), property,
                                         None)

    print('Fixed files:', len(user_progress))

    return
Beispiel #5
0
def scan_for_miscreant_files(directory, quarantine_directory, file_of_miscreants):
    reals_fks = []
    con = context.get_context(directory)
    listing = con.listdir()

    with open(file_of_miscreants, 'r') as infile:
        for line in infile.readlines():
            if line.split()[5] != '0':
                reals_fks.append(line.split()[3] + '_p' + line.split()[4])
            if line.split()[6] != '0':
                reals_fks.append('fk_' + line.split()[3] + '_s' + line.split()[4])

    for fileID in reals_fks:
        # remove any .measure3.reals.astrom if present
        reals_file = fileID + '.measure3.reals.astrom'
        try:
            if con.exists(reals_file):  # this isn't catching the error message, no obvious reason why
                # anything interesting about this file?
                print 'Removing:', reals_file, con.get_file_size(reals_file), \
                    'done: %s' % storage.get_property(con.get_full_path(reals_file), DONE_PROPERTY), \
                    'locked: %s' % storage.get_property(con.get_full_path(reals_file), LOCK_PROPERTY)
                # con.remove(reals_file)
        except:
            continue

        # obtain all files remaining in the directory listing that contain this chip, just not the removed reals.astrom
        fileID_files = [n for n in listing if
                        n.split('.')[0] == fileID and not n.__contains__('.measure3.reals.astrom')]
        print fileID
        print fileID_files
        # move all remaining matching files to quarantine
        for fn in fileID_files:
            print directory + fn, '>', quarantine_directory + fn
            con.rename(directory + fn, quarantine_directory + fn)
Beispiel #6
0
def ensure_cands_have_matching_reals(directory):
    con = context.get_context(directory)
    listing = con.get_listing(tasks.get_suffix('cands'))

    for filename in listing:
        user = storage.get_property(con.get_full_path(filename), DONE_PROPERTY)
        if user is not None:
            reals_file = filename.replace('cands', 'reals')
            if not con.exists(reals_file):
                print '.cands.astrom has no matching reals.astrom!', filename, 'done by', user

    return
Beispiel #7
0
def ensure_cands_have_matching_reals(directory):
    con = context.get_context(directory)
    listing = con.get_listing(tasks.get_suffix('cands'))

    for filename in listing:
        user = storage.get_property(con.get_full_path(filename), DONE_PROPERTY)
        if user is not None:
            reals_file = filename.replace('cands', 'reals')
            if not con.exists(reals_file):
                print '.cands.astrom has no matching reals.astrom!', filename, 'done by', user

    return
Beispiel #8
0
def what_locks_remain(directory):
    con = context.get_context(directory)
    listing = con.get_listing(tasks.get_suffix('cands'))
    user_progress = collections.defaultdict(int)

    for filename in listing:
        user = storage.get_property(con.get_full_path(filename), LOCK_PROPERTY)
        if user is not None:
            user_progress[user] += 1
            print filename, 'lock_holder=', user
            storage.set_property(con.get_full_path(filename), LOCK_PROPERTY, None)

    for user, num_locked in user_progress.iteritems():
        print "  %s: %d" % (user, num_locked)

    return
Beispiel #9
0
def what_locks_remain(directory):
    con = context.get_context(directory)
    listing = con.get_listing(tasks.get_suffix('cands'))
    user_progress = collections.defaultdict(int)

    for filename in listing:
        user = storage.get_property(con.get_full_path(filename), LOCK_PROPERTY)
        if user is not None:
            user_progress[user] += 1
            print(filename, 'lock_holder=', user)
            storage.set_property(con.get_full_path(filename), LOCK_PROPERTY,
                                 None)

    for user, num_locked in user_progress.items():
        print("  %s: %d" % (user, num_locked))

    return
Beispiel #10
0
def print_progress_stats(task, directory):
    con = context.get_context(directory)

    user_progress = collections.defaultdict(int)
    listing = con.get_listing(tasks.get_suffix(task))
    for filename in listing:
        user = storage.get_property(con.get_full_path(filename), DONE_PROPERTY)
        if user is not None:
            user_progress[user] += 1

    total_processed = sum(user_progress.values())

    print "%s: %s: %d of %d processed." % (
        directory, task, total_processed, len(listing))
    print "---"

    for user, num_processed in user_progress.iteritems():
        print "  %s: %d" % (user, num_processed)
Beispiel #11
0
def print_progress_stats(task, directory):
    con = context.get_context(directory)

    user_progress = collections.defaultdict(int)
    listing = con.get_listing(tasks.get_suffix(task))
    for filename in listing:
        user = storage.get_property(con.get_full_path(filename), DONE_PROPERTY)
        if user is not None:
            user_progress[user] += 1

    total_processed = sum(user_progress.values())
    total_todo = len(listing)

    print datetime.datetime.now()
    print "%s: %s: %d of %d processed (%2.1f%%)." % (
        directory, task, total_processed, total_todo, (float(total_processed) / float(total_todo)) * 100.)
    print "---"

    for user, num_processed in user_progress.iteritems():
        print "  %s: %d" % (user, num_processed)
Beispiel #12
0
def print_progress_stats(task, directory):
    con = context.get_context(directory)

    user_progress = collections.defaultdict(int)
    listing = con.get_listing(tasks.get_suffix(task))
    for filename in listing:
#        if filename.__contains__('p'):  # HACK FOR CHECKING P FILES ONLY
        user = storage.get_property(con.get_full_path(filename), DONE_PROPERTY)
        if user is not None:
            user_progress[user] += 1

    total_processed = sum(user_progress.values())
    total_todo = len([l for l in listing])# if l.__contains__('p')])

    print datetime.datetime.now()
    print "%s: %s: %d of %d processed (%2.1f%%)." % (
        directory, task, total_processed, total_todo, (float(total_processed) / float(total_todo)) * 100.)
    print "---"

    for user, num_processed in user_progress.iteritems():
        print "  %s: %d" % (user, num_processed)
Beispiel #13
0
def print_progress_stats(task, directory):
    con = context.get_context(directory)

    user_progress = collections.defaultdict(int)
    listing = con.get_listing(tasks.get_suffix(task))
    for filename in listing:
        #        if filename.__contains__('p'):  # HACK FOR CHECKING P FILES ONLY
        user = storage.get_property(con.get_full_path(filename), DONE_PROPERTY)
        if user is not None:
            user_progress[user] += 1

    total_processed = sum(user_progress.values())
    total_todo = len([l for l in listing])  # if l.__contains__('p')])

    print(datetime.datetime.now())
    print("%s: %s: %d of %d processed (%2.1f%%)." %
          (directory, task, total_processed, total_todo,
           (float(total_processed) / float(total_todo)) * 100.))
    print("---")

    for user, num_processed in user_progress.items():
        print("  %s: %d" % (user, num_processed))
Beispiel #14
0
    def __init__(self,
                 working_directory,
                 output_directory,
                 dry_run=False,
                 debug=False,
                 name_filter=None,
                 user_id=None):
        self.dry_run = dry_run
        self.user_id = user_id
        logger.info("Input directory set to: %s" % working_directory)
        logger.info("Output directory set to: %s" % output_directory)

        working_context = context.get_context(working_directory,
                                              userid=self.user_id)
        output_context = context.get_context(output_directory,
                                             userid=self.user_id)

        if dry_run and working_context.is_remote():
            sys.stdout.write("A dry run can only be done on local files.\n")
            sys.exit(0)

        if output_context.is_remote():
            sys.stdout.write("The output directory must be local.\n")
            sys.exit(0)

        image_manager = self._create_image_manager()

        progress_manager = working_context.get_progress_manager()
        builder = self._create_workunit_builder(working_context,
                                                output_context,
                                                progress_manager)

        workunit_provider = WorkUnitProvider(
            self.input_suffix,
            working_context,
            progress_manager,
            builder,
            randomize=self.should_randomize_workunits,
            name_filter=name_filter)

        prefetching_workunit_provider = PreFetchingWorkUnitProvider(
            workunit_provider, config.read("PREFETCH.NUMBER"), image_manager)

        if working_context.is_remote():
            synchronization_manager = SynchronizationManager(working_context,
                                                             sync_enabled=True)
        else:
            synchronization_manager = None

        model = TransAckValidationModel(prefetching_workunit_provider,
                                        image_manager, synchronization_manager)
        logger.debug("Created model.")

        view = self._create_view(model, debug=debug)

        logger.debug("Created view.")
        model.start_work()

        self.model = model
        self.view = view
        self.controller = view.controller

        self.controller.display_current_image()

        if not synchronization_manager:
            self.view.disable_sync_menu()

        self.view.show()
Beispiel #15
0
    description = 'Given a block ID (e.g. o5d), report which objects in that block have unmeasured lines of astrometry,' \
                  'as found by SSOIS from the present arc of each object in the block.'
    epilog = '''
    For all objects in the database, check if they have images taken by OSSOS on which they are predicted to fall
    but on which their astrometry/photometry have not yet been measured to generate a recorded MPC line.
    Output a list of objects to work on, and a list of images that have failed processing for any reason.
    '''
    parser = argparse.ArgumentParser(description=description, epilog=epilog)
    parser.add_argument('block',
                        help="The three-digit OSSOS designation for the set of TNOs of interest.",
                        )
    args = parser.parse_args()

    outfile = 'need_to_measure_{}.txt'.format(args.block)

    working_context = context.get_context(parameters.REAL_KBO_AST_DIR)
    files = working_context.get_listing('ast')
    files = [f for f in files if f.startswith(args.block)]

    with open(outfile, 'a') as ofile:
        ofile.write('Examining {} object files.\n'.format(len(files)))

    for fn in files:
        with open(outfile, 'a') as ofile:
            ofile.write('{}\n'.format(fn))
        parser = ssos.TracksParser(skip_previous=True)
        tracks_data = parser.parse(parameters.REAL_KBO_AST_DIR + fn)

        if len(tracks_data.observations) > 1:  # it is set to always return the discovery image
            with open(outfile, 'a') as ofile:
                ofile.write('{} unmeasured observations!\n'.format(len(tracks_data.observations) - 1))
Beispiel #16
0
            if not storage.exists(vo_reals):  # shouldn't possibly be there but let's just make sure
                storage.copy(fname, vo_reals)
                storage.set_property(mv_file, 'done', user_id)  # set the .cands.astrom #done tag to the user ID.
                uploaded_count += 1
        else:
            print fn, wasdone

    print 'Added unique files:', uploaded_count

    return


def fix_tags_on_cands_missing_reals(user_id, vos_dir, property):
    "At the moment this just checks for a single user's missing reals. Easy to generalise it to all users: modify the
    and"
    con = context.get_context(vos_dir)
    user_progress = []
    listing = con.get_listing(tasks.get_suffix('cands'))
    for filename in listing:
        user = storage.get_property(con.get_full_path(filename), property)
        if (user is not None) and (
            user == user_id):  # modify here to generalise to all users with work in this directory
            user_progress.append(filename)
            realsfile = filename.replace('cands', 'reals')
            if not storage.exists(con.get_full_path(realsfile)):
                print filename, 'no reals file', realsfile
                storage.set_property(con.get_full_path(filename), property, None)

    print 'Fixed files:', len(user_progress)

    return
        '--ast-dir',
        help="Name of the directory holding the astrometric files (.ast files)",
        default=parameters.REAL_KBO_AST_DIR,
    )
    parser.add_argument(
        '-x',
        '--check-mkpsf',
        help="Check in mkpsf has run.",
        action="store_true",
        default=False,
    )
    args = parser.parse_args()

    outfile = 'need_to_measure_{}.txt'.format(args.block)

    working_context = context.get_context(args.ast_dir)
    files = working_context.get_listing('ast')
    files = [f for f in files if f.startswith(args.block)]
    parser = ssos.TracksParser(skip_previous=True)

    with open(outfile, 'w') as ofile:
        ofile.write('Examining {} object files.\n'.format(len(files)))
        for fn in files:
            print(fn)
            ofile.write('{}\n'.format(fn))
            obj = parsers.TNO(None,
                              ast_filename=os.path.join(
                                  os.path.join(args.ast_dir, fn)))
            discovery_frames = []
            for original_observation in obj.orbit.observations:
                if original_observation.discovery:
                        help="The three-digit OSSOS designation for the set of TNOs of interest.",
                        )
    parser.add_argument('-a', '--ast-dir',
                        help="Name of the directory holding the astrometric files (.ast files)",
                        default=parameters.REAL_KBO_AST_DIR,
                        )
    parser.add_argument('-x', '--check-mkpsf',
                        help="Check in mkpsf has run.",
                        action="store_true",
                        default=False,
                        )
    args = parser.parse_args()

    outfile = 'need_to_measure_{}.txt'.format(args.block)

    working_context = context.get_context(args.ast_dir)
    files = working_context.get_listing('ast')
    files = [f for f in files if f.startswith(args.block)]
    parser = ssos.TracksParser(skip_previous=True)

    with open(outfile, 'w') as ofile:
        ofile.write('Examining {} object files.\n'.format(len(files)))
        for fn in files:
            print(fn)
            ofile.write('{}\n'.format(fn))
            obj = parsers.TNO(None, ast_filename=os.path.join(os.path.join(args.ast_dir, fn)))
            discovery_frames = []
            for original_observation in obj.orbit.observations:
                if original_observation.discovery:
                    try:
                        discovery_frames.append(original_observation.comment.frame)