Exemple #1
0
    def select_roi(self, roi=None):
        if roi is None:
            roi = tracking.select_roi(self.frame)
        self.roi = roi

        # ROI for saving must be at least 65x65 otherwise buffer might be to small
        roisave = self.roi.copy()
        nx, ny = roisave['x2'] - roisave['x1'], roisave['y2'] - roisave['y1']
        if nx < 64:
            roisave['x1'] -= floor((64 - nx) / 2)
            nx = 64
            roisave['x2'] = roisave['x1'] + nx
        if ny < 64:
            roisave['y1'] -= floor((64 - ny) / 2)
            ny = 64
            roisave['y2'] = roisave['y1'] + ny

        self.roisave = roisave
        self.nxsave, self.nysave = nx, ny
        self.roi_selected = True

        # Re-init tracker if necessary
        if self.dotrack:
            self.eye = tracking.resize_roi(self.frame, self.roi)
            self.tracker = tracking.Tracker(self.eye)
    def __init__(self):
        Thread.__init__(self)
        self.running = True
        self.q_camera_read_green = Queue.Queue()
        self.q_camera_read_blue = Queue.Queue()
        self.q_camera_read_red = Queue.Queue()

        self.tracker = tracking.Tracker(
            self.q_camera_read_green, self.q_camera_read_blue,
            self.q_camera_read_red, "pi"
        )  # "C:\\Users\\Sander\\Downloads\\ball-tracking\\ball_tracking_example.mp4")
Exemple #3
0
 def toggletrack(self):
     b = self.buttons['track']
     if b.isChecked() and not self.roi_selected:
         b.setChecked(False)
         QMessageBox.warning(None, "EYETRACK", "Select ROI first")
         return
     self.dotrack = b.isChecked()
     if self.dotrack:
         b.setStyleSheet("color: blue; font-weight: bold")
         self.tracker = tracking.Tracker(self.eye)
     else:
         b.setStyleSheet("")
         self.tracker = []
         cv2.destroyWindow('preproc')
         cv2.destroyWindow('controls')
Exemple #4
0
    def __init__(self, screen_x0, beamline, n_streaker, streaker_offset, delta_gap, tracker_kwargs, profile=None, recon_kwargs=None, charge=None, subtract_median=False, noise_cut=0.1, slice_factor=1, offset_explore=30e-6, ref_slice_dict=None, ref_y=None):
        self.delta_gap = delta_gap
        recon_kwargs['delta_gap'] = [0, 0]
        recon_kwargs['delta_gap'][n_streaker] = delta_gap
        self.screen_x0 = screen_x0
        self.beamline = beamline
        self.n_streaker = n_streaker
        self.streaker = config.streaker_names[beamline][n_streaker]
        self.streaker_offset = streaker_offset
        self.charge = charge
        self.profile = profile
        self.recon_kwargs = recon_kwargs
        self.subtract_median = subtract_median
        self.noise_cut = noise_cut
        self.slice_factor = slice_factor
        self.offset_explore = offset_explore
        self.ref_slice_dict = None
        self.ref_y = ref_y

        self.tracker = tracking.Tracker(**tracker_kwargs)
        self.do_recon_plot = False
        self.beam_offsets = None
        self.index_median = None
Exemple #5
0
    def initializeShared(self, gameState, agent):
        "Initializes the shared data structured for the agents"
        if not self.init:
            # Build the board and main particle filter
            self.init = True
            self.board.initialize(gameState)
            self.particleFilter.initialize(gameState, self.board.getLegal())

            # Build the ghosts and add to particle filter
            oppIndex = agent.getOpponents(gameState)
            for g in oppIndex:
                ghost = agents.StrategicGhost(g, self, 0.5)
                ghost.registerInitialState(gameState)
                ghost.strategy = strategy.Random()
                ghost.tracker = tracking.GhostTracker(self.particleFilter,
                                                      gameState, ghost)
                self.opponents.append(ghost)
                self.particleFilter.addGhostAgent(ghost)

        # Create the marginal particle filter for the agent
        agent.tracker = tracking.Tracker(self.particleFilter, gameState, agent)

        # Set the agent's strategy.
        if agent.index in map(lambda x: x.index, self.team):
            # Build strategy
            current = getattr(strategy, self.strategies.pop(),
                              strategy.ContestOffensive)
            current = current()

            # If we want look ahead, then wrap the strategy in a negamax
            depth = self.depths.pop()
            if depth:
                current = strategy.Negamax(current, depth)

            # Set the agent's strategy
            agent.strategy = current
Exemple #6
0
ms.closeall()

hostname = gethostname()
if hostname == 'desktop':
    data_dir = '/storage/data_2021-05-19/'
elif hostname == 'pc11292.psi.ch':
    data_dir = '/sf/data/measurements/2021/05/19/'
elif hostname == 'pubuntu':
    data_dir = '/mnt/data/data_2021-05-19/'
data_dir1 = data_dir.replace('19', '18')

tracker_kwargs = config.get_default_tracker_settings()
tracker_kwargs['len_screen'] = 1000
tracker_kwargs['n_particles'] = int(50e3)
recon_kwargs = config.get_default_gauss_recon_settings()
tracker = tracking.Tracker(**tracker_kwargs)

#data_file = data_dir+'2021_05_19-14_49_38_Lasing_False_SARBD02-DSCR050.h5'
#data_file = data_dir + '2021_05_19-14_24_05_Calibration_SARUN18-UDCP020.h5'
data_file = data_dir1 + '2021_05_18-23_07_20_Calibration_SARUN18-UDCP020.h5'
data_dict = h5_storage.loadH5Recursive(data_file)

raw_data = data_dict['raw_data']
meta_data = data_dict['meta_data']

offset_index = -1

gaps = [10e-3, 9.94e-3]
streaker_offset = 372e-6
beam_offsets = [0, -(meta_data['offsets'][offset_index] - streaker_offset)]
            ratio=6,
            method='nearest',
            clobber=True,
        )

    for path in [
            os.path.join(SCRATCH_BASE, subj, 'images'),
            os.path.join(SCRATCH_BASE, subj, 'images', 'reg'),
            dataset.get_log_folder(subj=subj)
    ]:
        try:
            os.mkdir(path)
        except:
            pass

    ### Generate script file and SGE qsub file
    tracker = tracking.Tracker(pb.Command.all_commands,
                               pb.Dataset.all_datasets)
    tracker.compute_dependencies()

    ###
    # NIPYPE_ROOT = '/data/scratch/rameshvs/sandbox/nipype_regpipe'
    # wf = tracker.make_nipype_pipeline(NIPYPE_ROOT)
    log_folder = dataset.get_log_folder(subj=subj)
    pb.Command.generate_code_from_datasets([dataset, atlas],
                                           log_folder,
                                           subj,
                                           sge=True,
                                           wait_time=0,
                                           tracker=tracker)
Exemple #8
0
image0 = dict_['Image'][-1][0].T
#meas_screen = get_screen_from_image(image, invert=True)
meas_screen0 = get_screen_from_image(image0, invert=True)

timestamp = get_timestamp(file_)

blmeas_1 = dirname1 + '129833611_bunch_length_meas.h5'
energy_eV = 4491892915.7690735
profile_meas = tracking.profile_from_blmeas(blmeas_1,
                                            tt_halfrange,
                                            charge,
                                            energy_eV,
                                            subtract_min=True)

tracker = tracking.Tracker(archiver_dir + 'archiver_api_data/2020-10-03.h5',
                           timestamp, struct_lengths, n_particles,
                           n_emittances, screen_bins, screen_cutoff, smoothen,
                           profile_cutoff, len_profile)
forward_dict = tracker.matrix_forward(profile_meas, [10e-3, 10e-3], [0., 0.])
forward_dict_ele = tracker.elegant_forward(profile_meas, [10e-3, 10e-3],
                                           [0., 0.])
beam_forward = forward_dict['beam0_at_screen']

image_sim, xedges, yedges = np.histogram2d(beam_forward[0],
                                           beam_forward[2],
                                           bins=(200, 100),
                                           normed=True)

fig = ms.figure('Compare images')
subplot = ms.subplot_factory(2, 2)
sp_ctr = 1
Exemple #9
0
def main():

    if DEBUG:
        files = glob.glob("../20190401/0000/*")
        for f in files:
            os.remove(f)

    total_time = 0
    total_frames = 0

    # TODO Change to pipe from ImageDecrypt
    # Read from file to import video
    # cap = cv2.VideoCapture("../../test_plates/test_video6.mp4")
    # Something for writing out the video, codec related probably
    fourcc = cv2.VideoWriter_fourcc(*'XVID')
    # Set up output file
    out = cv2.VideoWriter('../20190401/0000/output.avi', fourcc, 30,
                          (3840, 2160))

    track = tracking.Tracker()
    # Counter for number of frames
    j = 0
    # Return boolean to ensure reading in frames
    ret = True

    d_counter = dcounter.DCounter()

    k = 0
    # Loops through while video is reading in
    # Initialize buffer object
    buff = buffer.Buffer()
    buff.encrypt_path = "../20190401/0000/"
    # Read in frames
    print("Read in frames...")
    s = server.Server()
    buff = buffer.Buffer()
    while True:
        print("Waiting for client")
        client, addr = s.sock.accept()
        print("Client connected from " + str(addr))
        s.handshake(client)
        for i in range(300):

            print("Receiving frame " + str(i))
            #s.recv_msg(client, buff)
            s.pickle_recv(client, buff)

            print("Writing frame " + str(i))
            # buff.frames.append(frame_new)

            client.sendall("halo".encode())
        client.close()

        # for i in range(len(buff.encrypted_frames)):
        #     decoded = buff.encrypted_frames[i]
        #     decoded_frame = cv2.imdecode(numpy.frombuffer(decoded, numpy.uint8), -1)
        #     outname = "decoded_" + str(i + 1) + ".jpg"
        #     try:
        #         print("Decoded_frame size: " + str(len(decoded_frame)))
        #         cv2.imwrite(outname, decoded_frame)
        #         buff.frames.append(decoded_frame)
        #     except TypeError:
        #         print("Errored")
        # print("Writing picture to file...")
        # frame = buff.encrypted_frames[0]
        # print(frame)
        # cv2.imwrite("unencrypted.jpg", frame)
        # print("Decrypting frame " + str(i + 1))
        # s.decryptframes(buff, i)

        d_counter.max = 1
        total_frames += i
        print("Finding Plates...")
        track.frame_counter = 0
        start = time.time()
        track.start(buff, d_counter)
        for i in range(1, len(buff.frames)):
            track.update(buff, d_counter)
        buff.frame_num = j
        total_time += (time.time() - start)

        print("Encrypt License Plates...")
        processing.clear_plate_area(buff)
        j += 300
        print("Saving Buffer...")
        save.save_frame(buff, out)
        buff.frames.clear()

    cap.release()
    out.release()

    print("Average Time Per Frame: " + str(total_time / total_frames))
Exemple #10
0
quad_wake = False
bp_smoothen = 1e-15
invert_offset = True
magnet_file = archiver_dir + '2021-03-16.h5'
timestamp = elegant_matrix.get_timestamp(2021, 3, 16, 20, 14, 10)
sig_t_range = np.arange(20, 50.01, 5) * 1e-15
n_streaker = 1
compensate_negative_screen = False

tracker = tracking.Tracker(
    magnet_file,
    timestamp,
    struct_lengths,
    n_particles,
    n_emittances,
    screen_bins,
    screen_cutoff,
    smoothen,
    profile_cutoff,
    len_profile,
    quad_wake=quad_wake,
    bp_smoothen=bp_smoothen,
    compensate_negative_screen=compensate_negative_screen)

streaker_offset = 0.00037758839957521145

meas_screen = misc.image_to_screen(image_off, x_axis, True, x_offset=0)
meas_screen.cutoff2(5e-2)
meas_screen.crop()
meas_screen.reshape(len_profile)

#ms.figure('Obtain x-t')
Exemple #11
0
n_particles = int(100e3)
self_consistent = True


offset_errors_s1 = (np.arange(-20, 20.01, 5)+0)*1e-6
total_diff_list = []

if hostname == 'desktop':
    magnet_file = '/storage/Philipp_data_folder/archiver_api_data/2020-07-26.h5'
    bl_meas_file = '/storage/data_2020-02-03/Bunch_length_meas_2020-02-03_15-59-13.h5'
else:
    magnet_file = '/afs/psi.ch/intranet/SF/Beamdynamics/Philipp/data/archiver_api_data/2020-07-26.h5'
    bl_meas_file = '/sf/data/measurements/2020/02/03/Bunch_length_meas_2020-02-03_15-59-13.h5'


tracker = tracking.Tracker(magnet_file, timestamp, struct_lengths, energy_eV='file', n_emittances=n_emittances, screen_bins=screen_bins, n_particles=n_particles, smoothen=smoothen, profile_cutoff=profile_cutoff, screen_cutoff=screen_cutoff, len_screen=len_profile)
energy_eV = tracker.energy_eV

profile_meas = tracking.profile_from_blmeas(bl_meas_file, tt_halfrange, charge, energy_eV, subtract_min=False)
profile_meas.center()

meas_screen_dict = {}

ms.figure('Compare offset errors')
sp_ctr = 1
subplot = ms.subplot_factory(2,3)

sp_profile0 = subplot(sp_ctr, title='Beam profiles', xlabel='t [fs]', ylabel='Current (arb. units)')
sp_ctr += 1

sp_profile0.plot(profile_meas.time*1e15, profile_meas.current/profile_meas.integral, label='Real / %i' % (profile_meas.gaussfit.sigma*1e15), color='black', lw=3)
Exemple #12
0
def main(argv):
    ########################
    ### Argument parsing ###
    ########################
    USAGE = '%s <subj> <smoothness regularization> <field regularization> <out folder> [<subj list>]' % argv[
        0]

    if len(argv) not in [5, 6]:
        print(USAGE)
        raise ValueError

    subj = argv[1]
    # Regularization parameters for ANTS
    regularization = float(argv[2])
    regularization2 = float(argv[3])

    # where the data lives
    data_subfolder = argv[4]
    if 'site00' in data_subfolder:
        site = 'site00'
    else:
        site = 'unknown'

    if len(argv) == 6:
        subject_list = open(argv[5]).read().split()
        mode = 'server'
    else:
        mode = 'execute'
    #############################
    ### Set up atlas and data ###
    #############################

    BASE = os.path.join(PROCESSING_ROOT, data_subfolder)
    #BASE = os.path.join(DATA_ROOT, 'processed_datasets', data_subfolder)
    #SCRATCH_BASE = os.path.join(SCRATCH_ROOT, 'processed_datasets', data_subfolder)
    #SCRATCH_BASE = BASE
    #BASE = os.path.join('/data/vision/scratch/polina/users/rameshvs', data_subfolder)
    ## Atlas

    #atlas = pb.Dataset(ATLAS_BASE, 'buckner61{feature}{extension}', None)
    #atlas = pb.Dataset(ATLAS_BASE, 'flair_template{extension}', None)
    #atlas = pb.Dataset(ATLAS_BASE, 'flairTemplateInBuckner_sigma{kernel}{extension}', None)
    atlas = pb.Dataset(ATLAS_BASE,
                       'flairTemplateInBuckner_sigma{kernel}{extension}', None)
    buckner = pb.Dataset(ATLAS_BASE, 'buckner61{feature}{extension}', None)
    ## Subject data
    dataset = pb.Dataset(
        BASE,
        # How are the inputs to the pipeline stored?
        os.path.join(
            BASE, '{subj}/original/{modality}_1/{subj}_{modality}_{feature}'),
        # How should intermediate files be stored?
        #os.path.join(BASE, '{subj}/images/{subj}_{modality}_{feature}{modifiers}'),
        os.path.join(BASE,
                     '{subj}/images/{subj}_{modality}_{feature}{modifiers}'),
        log_template=os.path.join(BASE, '{subj}/logs/'),
    )

    #dataset.add_mandatory_input(modality='t1', feature='raw')
    #dataset.add_mandatory_input(modality='flair', feature='img')
    dataset.add_mandatory_input(modality='flair', feature='raw')
    dataset.get_original(subj=subj, modality='t1', feature='raw')

    if mode == 'server':
        tracking.run_server(subject_list, dataset)
        raise ValueError
    else:
        pass
    #############################
    ### Registration pipeline ###
    #############################

    ###
    flair_input = dataset.get_original(subj=subj,
                                       modality='flair',
                                       feature='raw')
    dwi_input = dataset.get_original(subj=subj, modality='dwi', feature='raw')
    if True:  #site in ['site00', 'site13', 'site18']:
        modifiers = '_prep_pad'
        first_step = pb.PyPadCommand(
            "Pad flair",
            #cmdName=os.path.join(cwd, 'strip_header.py'),
            input=flair_input,
            output=dataset.get(subj=subj,
                               modality='flair',
                               feature='img',
                               modifiers=modifiers),
            out_mask=dataset.get(subj=subj,
                                 modality='flair',
                                 feature='img',
                                 modifiers=modifiers + '_mask_seg'),
            clobber=True,
        )

    else:
        raise NotImplementedError

    mask = dataset.get(subj=subj,
                       modality='flair',
                       feature='img',
                       modifiers=modifiers + '_brainmask')
    robex = pb.RobexCommand("Brain extraction with ROBEX",
                            input=dataset.get(subj=subj,
                                              modality='flair',
                                              feature='img',
                                              modifiers=modifiers),
                            output=dataset.get(subj=subj,
                                               modality='flair',
                                               feature='img',
                                               modifiers=modifiers + '_robex'),
                            out_mask=mask)

    masker = pb.NiiToolsMaskCommand(
        "Apply mask from robex",
        input=dataset.get(subj=subj,
                          modality='flair',
                          feature='img',
                          modifiers=modifiers),
        mask=mask,
        output=dataset.get(subj=subj,
                           modality='flair',
                           feature='img',
                           modifiers=modifiers + '_brain'),
    )

    modifiers += '_brain'
    # intensity_corr = pb.MCCMatchWMCommand(
    #         "Intensity correction for flair image",
    #         inFile=dataset.get(subj=subj, modality='flair', feature='img', modifiers=modifiers),
    #         maskFile=mask,
    #         intensity=FLAIR_INTENSITY,
    #         output=dataset.get(subj=subj, modality='flair', feature='img', modifiers=modifiers + '_matchwm'))
    intensity_corr = pb.NiiToolsMatchIntensityCommand(
        "Intensity correction for flair image",
        inFile=dataset.get(subj=subj,
                           modality='flair',
                           feature='img',
                           modifiers=modifiers),
        maskFile=mask,
        intensity=FLAIR_INTENSITY,
        output=dataset.get(subj=subj,
                           modality='flair',
                           feature='img',
                           modifiers=modifiers + '_matchwm'),
    )

    modifiers += '_matchwm'

    subj_final_img = dataset.get(subj=subj,
                                 modality='flair',
                                 feature='img',
                                 modifiers=modifiers)

    dwi_mask = dataset.get(subj=subj,
                           modality='flair',
                           feature='mask',
                           modifiers='_from_flair')
    for sigma in [8]:
        atlas_img = atlas.get_original(kernel=sigma)
        basic_threshold_segmentation_wmh = dataset.get(
            subj=subj,
            modality='flair',
            feature='wmh_raw_threshold_seg',
            modifiers='')
        basic_threshold_segmentation_stroke = dataset.get(
            subj=subj,
            modality='flair',
            feature='wmh_raw_threshold_seg',
            modifiers='')
        multimodal_registration = pb.ANTSCommand(
            "Rigidly register DWI to FLAIR",
            moving=dwi_input,
            fixed=subj_final_img,
            output_folder=os.path.join(dataset.get_folder(subj=subj), 'reg'),
            metric='MI',
            radiusBins=32,
            mask=mask,
            method='rigid',
        )
        pb.ANTSWarpCommand.make_from_registration(
            "Warp mask to DWI",
            moving=mask,
            reference=dwi_input,
            output_filename=dwi_mask,
            registration=multimodal_registration,
            inversion='forward')

        ###### Final atlas -> subject registration
        forward_reg = pb.ANTSCommand(
            "Register label-blurred flair atlas with sigma %d to subject" %
            sigma,
            moving=atlas_img,
            fixed=subj_final_img,
            output_folder=os.path.join(dataset.get_folder(subj=subj), 'reg'),
            metric='CC',
            radiusBins=4,
            mask=mask,
            regularization='Gauss[%0.3f,%0.3f]' %
            (regularization, regularization2),
            method='201x201x201',
        )

        pb.ANTSWarpCommand.make_from_registration(
            "Warp subject image to atlas space using sigma %d warp" % sigma,
            moving=subj_final_img,
            reference=atlas_img,
            output_filename=dataset.get(subj=subj,
                                        modality='flair',
                                        feature='img',
                                        modifiers='_in_atlas'),
            registration=forward_reg,
            inversion='inverse',
        )

        label_warp = pb.ANTSWarpCommand.make_from_registration(
            "Warp atlas labels to subject space using sigma %d warp" % sigma,
            moving=buckner.get_original(feature='_seg'),
            reference=subj_final_img,
            registration=forward_reg,
            useNN=True,
        )
        dwi_seg = dataset.get(subj=subj,
                              modality='dwi',
                              feature='seg',
                              modifiers='')
        dwi_is_dwi = dataset.get(subj=subj,
                                 modality='dwi',
                                 feature='verified',
                                 modifiers='',
                                 extension='.txt')
        label_warp_dwi = pb.ANTSWarpCommand.make_from_registration_sequence(
            "Warp atlas labels to dwi",
            moving=buckner.get_original(feature='_seg'),
            reference=dwi_input,
            output_filename=dwi_seg,
            reg_sequence=[forward_reg, multimodal_registration],
            inversion_sequence=['forward', 'inverse'],
            useNN=True,
        )
        pb.PyFunctionCommand("Verify ventricles greater than white matter",
                             function="flairpipe.check_fluid_attenuation",
                             args=[dwi_input, dwi_seg, dwi_is_dwi],
                             output_positions=[2])

        dwi_matchwm = dataset.get(subj=subj,
                                  modality='dwi',
                                  feature='img',
                                  modifiers='_matchwm')
        intensity_corr_dwi = pb.NiiToolsMatchIntensityCommand(
            "Intensity correction for DWI image",
            inFile=dwi_input,
            maskFile=dwi_mask,
            intensity=DWI_INTENSITY,
            output=dwi_matchwm,
        )
        pb.ANTSWarpCommand.make_from_registration(
            "Warp atlas image to subject space using sigma %d warp" % sigma,
            moving=atlas_img,
            reference=subj_final_img,
            output_filename=dataset.get(subj=subj,
                                        modality='atlas',
                                        feature='img',
                                        modifiers='_in_subject'),
            registration=forward_reg)

        # threshold_segmentation_dwi = pb.NiiToolsMaskedThresholdCommand(
        #         "Threshold segmentation for stroke",
        #         infile=dwi_matchwm,
        #         threshold=STROKE_THRESHOLD,
        #         output=basic_threshold_segmentation_stroke,
        #         label=dwi_seg,
        #         direction='greater',
        #         labels=[2,41],
        #         )

        # threshold_segmentation_dwi_count = pb.NiiToolsMaskedThresholdCountCommand(
        #         "Threshold segmentation for stroke computation",
        #         infile=dwi_matchwm,
        #         threshold=STROKE_THRESHOLD,
        #         output=dataset.get(subj=subj, modality='other', feature='stroke_raw_threshold_seg', modifiers='', extension='.txt'),
        #         label=dwi_seg,
        #         direction='greater',
        #         units='mm',
        #         labels=[2,41],
        #         )
        threshold_segmentation = pb.NiiToolsMaskedThresholdCommand(
            "Threshold segmentation",
            infile=intensity_corr.outfiles[0],
            threshold=WMH_THRESHOLD,
            output=basic_threshold_segmentation_wmh,
            label=label_warp.outfiles[0],
            direction='greater',
            labels=[2, 41],
        )

        threshold_segmentation_count = pb.NiiToolsMaskedThresholdCountCommand(
            "Threshold segmentation computation",
            infile=intensity_corr.outfiles[0],
            threshold=WMH_THRESHOLD,
            output=dataset.get(subj=subj,
                               modality='other',
                               feature='wmh_raw_threshold_seg',
                               modifiers='',
                               extension='.txt'),
            label=label_warp.outfiles[0],
            direction='greater',
            units='mm',
            labels=[2, 41],
        )

        threshold_seg_to_atlas = pb.ANTSWarpCommand.make_from_registration(
            "Warp threshold segmentation to atlas space",
            moving=basic_threshold_segmentation_wmh,
            reference=atlas_img,
            registration=forward_reg,
            output_filename=dataset.get(subj=subj,
                                        modality='wmh_threshold_seg',
                                        feature='in_atlas',
                                        modifiers=''),
            inversion='inverse')

        filename = os.path.basename(label_warp.outfiles[0]).split('.')[0]
        #subj_png_filename = dataset.get(subj=subj, modality='other', feature=filename, modifiers='', extension='.png')
        subj_png_filename = dataset.get(subj=subj,
                                        modality='other',
                                        feature='buckner_labels',
                                        modifiers='',
                                        extension='.png')
        pb.PyFunctionCommand(
            "Generate flair with buckner label overlay",
            "tools.better_overlay",
            [
                subj_final_img, label_warp.outfiles[0],
                [15, 17, 19, 20, 21, 22, 23, 25], subj_png_filename
            ],
            output_positions=[3],
        )

    for path in [
            os.path.join(BASE, subj, 'images'),
            os.path.join(BASE, subj, 'images', 'reg'),
            dataset.get_log_folder(subj=subj)
    ]:
        try:
            os.mkdir(path)
        except:
            pass

    ### Generate script file and SGE qsub file
    tracker = tracking.Tracker(pb.Command.all_commands,
                               pb.Dataset.all_datasets)
    tracker.compute_dependencies()

    ###
    # NIPYPE_ROOT = '/data/scratch/rameshvs/sandbox/nipype_regpipe'
    # wf = tracker.make_nipype_pipeline(NIPYPE_ROOT)
    log_folder = dataset.get_log_folder(subj=subj)
    # pb.Command.generate_code_from_datasets([dataset, atlas], log_folder, subj, sge=False,
    #         wait_time=0, tracker=tracker)
    site = 'site' + data_subfolder.split('site')[-1][:2]
    subjects_file = '/data/vision/polina/projects/stroke/work/subject_lists/sites/%s.txt' % site
    subjects = open(subjects_file).read().split()[1:]
    aggregate_json = '/data/vision/polina/projects/stroke/work/rameshvs/site_pipeline_data_v2/%s.json' % site
    if not os.path.exists(aggregate_json):
        tracker.gather_multi_subject_jsons(subjects, subj, aggregate_json)
    import json
    tracking.run_server(subjects, dataset, json.load(open(aggregate_json)))
Exemple #13
0
 def get_tracker(self, magnet_dict=None):
     tracker_kwargs = self.get_tracker_kwargs(magnet_dict)
     tracker = tracking.Tracker(**tracker_kwargs)
     return tracker
Exemple #14
0
    if nysave<65:
        roisave['y1'] = roi['y1'] - floor((65-nysave)/2)
        nysave = 65
        roisave['y2'] = roisave['y1'] + nysave

    # Create movie
    fourcc = cv2.VideoWriter_fourcc(*'i420')
    out = cv2.VideoWriter(outname+'.avi', fourcc, 60.0, (nysave, nxsave))

    # Time vector
    timevector = []


# INIT TRACKER
if dotrack:
    T = tracking.Tracker(eye)

# PROCESS MOVIE
while film.isOpened():
    ret, frame = film.read()
    if not ret:
        if dorec:
            if not idle:
                idle = True
                print 'some idle time after processing',nprocessed,'frames'
                processed = 0
            sleep(.001)
            continue
        else:
            break
Exemple #15
0
                              title='Screen distribution',
                              xlabel='x [mm]',
                              ylabel='Intensity (arb. units)')
        sp_ctr += 1

        screen0.plot_standard(sp_forward2,
                              label='Measured',
                              lw=3,
                              color='black')

        tracker = tracking.Tracker(magnet_file,
                                   timestamp,
                                   struct_lengths,
                                   n_particles,
                                   n_emittances,
                                   screen_bins,
                                   screen_cutoff,
                                   smoothen,
                                   profile_cutoff,
                                   len_profile,
                                   bp_smoothen=bp_smoothen)

        emittance = 1000e-9
        delta_offset_arr = np.array([0, 50, 80]) * 1e-6
        legend_title = '$\Delta offset'
        #for emittance in emittance_arr:
        for delta_offset in delta_offset_arr:
            label = '%i nm' % (emittance * 1e9)
            label = '%i um' % (delta_offset * 1e6)
            beam_offsets2 = [beam_offsets[0], beam_offsets[1] + delta_offset]
            tracker.n_emittances = [emittance, n_emittances[1]]
Exemple #16
0
            [True],
            [20e3],
            [.5e-15, 1e-15, 2e-15],
        )):

    label = 'bp_smoothen %.1f' % (bp_smoothen * 1e15)
    n_particles = int(n_particles)

    tracker = tracking.Tracker(
        magnet_file,
        timestamp,
        struct_lengths,
        energy_eV='file',
        n_emittances=n_emittances,
        screen_bins=screen_bins,
        n_particles=n_particles,
        smoothen=smoothen,
        profile_cutoff=profile_cutoff,
        screen_cutoff=screen_cutoff,
        len_screen=len_profile,
        forward_method=forward_method,
        compensate_negative_screen=compensate_negative_screen,
        quad_wake=quad_wake,
        bp_smoothen=bp_smoothen)
    energy_eV = tracker.energy_eV

    if forward_method == 'matrix':
        forward_fun = tracker.matrix_forward
    elif forward_method == 'elegant':
        forward_fun = tracker.elegant_forward

    profile_meas = tracking.profile_from_blmeas(bl_meas_file,
gap2_correcting_summand = 0 #-3e-6
sig_t_range = np.arange(20, 40.01, 5)*1e-15
gaps = [10e-3, 10e-3]
subtract_min = True
fit_emittance = True
archiver_dir = '/afs/psi.ch/intranet/SF/Beamdynamics/Philipp/data/'
magnet_file = archiver_dir + 'archiver_api_data/2021-03-16.h5'
timestamp = elegant_matrix.get_timestamp(2021, 3, 16, 20, 41, 39)
beam_offsets = [-0.0, -0.004724]

median_screen = misc.image_to_screen(image_on.image, x_axis, False, x0)
median_screen.cutoff2(3e-2)
median_screen.crop()
median_screen.reshape(len_profile)

tracker = tracking.Tracker(magnet_file=magnet_file, timestamp=timestamp, n_particles=n_particles, n_emittances=n_emittances, screen_bins=screen_bins, screen_cutoff=screen_cutoff, smoothen=smoothen, profile_cutoff=profile_cutoff, len_screen=len_profile, bp_smoothen=bp_smoothen, quad_wake=False)

energy_eV = tracker.energy_eV
blmeas = data_dir+'113876237_bunch_length_meas.h5'

profile_meas = iap.profile_from_blmeas(blmeas, 200e-15, charge, energy_eV)
profile_meas.cutoff(5e-2)
profile_meas.reshape(len_profile)
profile_meas.crop()
tt_range = (profile_meas.time.max() - profile_meas.time.min())

profile_gauss = iap.get_gaussian_profile(38e-15, tt_range, len_profile, charge, energy_eV)



bp_back = tracker.track_backward2(median_screen, profile_gauss, gaps, beam_offsets, 1)
Exemple #18
0
def reconstruct_current(data_file_or_dict, n_streaker, beamline, tracker_kwargs_or_tracker, rec_mode, kwargs_recon, screen_x0, streaker_centers, blmeas_file=None, plot_handles=None, do_plot=True):

    if type(tracker_kwargs_or_tracker) is dict:
        tracker = tracking.Tracker(**tracker_kwargs_or_tracker)
    elif type(tracker_kwargs_or_tracker) is tracking.Tracker:
        tracker = tracker_kwargs_or_tracker
    else:
        raise ValueError(type(tracker_kwargs_or_tracker))

    if type(data_file_or_dict) is dict:
        screen_data = data_file_or_dict
    else:
        screen_data = h5_storage.loadH5Recursive(data_file_or_dict)

    if 'meta_data' in screen_data:
        meta_data = screen_data['meta_data']
    elif 'meta_data_begin' in screen_data:
        meta_data = screen_data['meta_data_begin']
    else:
        print(screen_data.keys())
        raise ValueError

    if 'pyscan_result' in screen_data:
        pyscan_data = screen_data['pyscan_result']
    else:
        pyscan_data = screen_data

    x_axis = pyscan_data['x_axis_m']
    projx = pyscan_data['image'].sum(axis=-2)
    if rec_mode == 'Median':
        median_projx = misc.get_median(projx)
        proj_list = [median_projx]
    elif rec_mode == 'All':
        proj_list = projx

    tracker.set_simulator(meta_data)

    if x_axis[1] < x_axis[0]:
        revert = True
        x_axis = x_axis[::-1]
    else:
        revert = False

    output_dicts = []
    for proj in proj_list:
        if revert:
            proj = proj[::-1]

        meas_screen = tracking.ScreenDistribution(x_axis, proj)
        kwargs_recon['meas_screen'] = meas_screen

        #print('Analysing reconstruction')
        kwargs = copy.deepcopy(kwargs_recon)

        gaps, streaker_offsets = get_gap_and_offset(meta_data, beamline)

        kwargs['meas_screen']._xx = kwargs['meas_screen']._xx - screen_x0
        kwargs['beam_offsets'] = -(streaker_offsets - streaker_centers)
        kwargs['gaps'] = gaps
        kwargs['meas_screen'].cutoff2(tracker.screen_cutoff)
        kwargs['meas_screen'].crop()
        kwargs['meas_screen'].reshape(tracker.len_screen)
        kwargs['n_streaker'] = n_streaker

        # Only allow one streaker at the moment
        for n in (0,1):
            if n != kwargs['n_streaker']:
                kwargs['beam_offsets'][n] = 0

        gauss_dict = current_profile_rec_gauss(tracker, kwargs, plot_handles, blmeas_file, do_plot=do_plot)
        output_dict = {
                'input': {
                    'data_file_or_dict': data_file_or_dict,
                    'n_streaker': n_streaker,
                    'beamline': beamline,
                    'tracker_kwargs': tracker_kwargs_or_tracker,
                    'rec_mode': rec_mode,
                    'kwargs_recon': kwargs_recon,
                    'screen_x0': screen_x0,
                    'streaker_centers': streaker_centers,
                    'blmeas_file': blmeas_file,
                    },
                'gauss_dict': gauss_dict,
                }
        output_dicts.append(output_dict)

    if rec_mode == 'Median':
        return output_dict
    elif rec_mode == 'All':
        return output_dicts
    else:
        print(rec_mode)
charge = 200e-12
timestamp = elegant_matrix.get_timestamp(2020, 7, 26, 17, 49, 0)
backtrack_cutoff = 0.05
len_profile = 1e3
struct_lengths = [1., 1.]

if hostname == 'desktop':
    magnet_file = '/storage/Philipp_data_folder/archiver_api_data/2020-07-26.h5'
    bl_meas_file = '/storage/data_2020-02-03/Bunch_length_meas_2020-02-03_15-59-13.h5'
else:
    magnet_file = '/afs/psi.ch/intranet/SF/Beamdynamics/Philipp/data/archiver_api_data/2020-07-26.h5'
    bl_meas_file = '/sf/data/measurements/2020/02/03/Bunch_length_meas_2020-02-03_15-59-13.h5'

tracker = tracking.Tracker(magnet_file,
                           timestamp,
                           charge,
                           struct_lengths,
                           energy_eV='file')
energy_eV = tracker.energy_eV

profile_meas = tracking.profile_from_blmeas(bl_meas_file,
                                            tt_halfrange,
                                            energy_eV,
                                            subtract_min=False)

profile_back = tracker.forward_and_back(profile_meas, profile_meas, gaps,
                                        beam_offsets, 0)

#track_dict_forward = tracker.elegant_forward(profile_meas, gaps, beam_offsets, [1, 1])
#track_dict_forward0 = tracker.elegant_forward(profile_meas, gaps, [0,0], [1, 1])
#
def main():
    fig_paper = ms.figure('Comparison plots')
    subplot = ms.subplot_factory(2, 2)
    sp_ctr_paper = 1

    #images0 = dict0['projx'][-1]
    #x_axis = dict0['x_axis']*1e-6

    #if np.diff(x_axis)[0] < 0:
    #    x_axis = x_axis[::-1]
    #    invert_x = True
    #else:
    #    invert_x = False

    process_dict = {
        'Long': {
            'filename': file38,
            'main_dict': dict38,
            'proj0': dict0['projx'][-1],
            'x_axis0': dict0['x_axis'] * 1e-6,
            'n_offset': None,
            'filename0': file0,
            'blmeas': blmeas38,
            'flipx': False,
        },
        'Medium': {
            'filename': file25,
            'main_dict': dict25,
            'proj0': dict25['projx'][7],
            'x_axis0': dict25['x_axis'] * 1e-6,
            'n_offset': 0,
            'filename0': file25,
            'blmeas': blmeas25,
            'flipx': False,
        },
    }

    for main_label, p_dict in process_dict.items():
        #if main_label != 'Medium':
        #    continue

        projx0 = p_dict['proj0']
        x_axis0 = p_dict['x_axis0']
        if np.diff(x_axis0)[0] < 0:
            x_axis0 = x_axis0[::-1]
            invert_x0 = True

        all_mean = []
        for proj in projx0:
            screen = get_screen_from_proj(proj, x_axis0, invert_x0)
            xx, yy = screen._xx, screen._yy
            gf = gaussfit.GaussFit(xx, yy)
            all_mean.append(gf.mean)

        mean0 = np.mean(all_mean)

        timestamp0 = misc.get_timestamp(os.path.basename(p_dict['filename0']))
        tracker0 = tracking.Tracker(
            archiver_dir + 'archiver_api_data/2020-10-03.h5', timestamp0,
            struct_lengths, n_particles, n_emittances, screen_bins,
            screen_cutoff, smoothen, profile_cutoff, len_profile)

        bp_test = tracking.get_gaussian_profile(40e-15, tt_halfrange,
                                                len_profile, charge,
                                                tracker0.energy_eV)
        screen_sim = tracker0.matrix_forward(bp_test, [10e-3, 10e-3],
                                             [0, 0])['screen']
        all_emittances = []
        for proj in projx0:
            screen_meas = get_screen_from_proj(proj, x_axis0, invert_x0)
            emittance_fit = misc.fit_nat_beamsize(screen_meas, screen_sim,
                                                  n_emittances[0])
            all_emittances.append(emittance_fit)

        new_emittance = np.mean(all_emittances)
        print(main_label, 'Emittance [nm]', new_emittance * 1e9)
        n_emittances[0] = new_emittance

        dict_ = p_dict['main_dict']
        file_ = p_dict['filename']
        x_axis = dict_['x_axis'] * 1e-6
        y_axis = dict_['y_axis'] * 1e-6
        n_offset = p_dict['n_offset']

        if np.diff(x_axis)[0] < 0:
            x_axis = x_axis[::-1]
            invert_x = True
        else:
            invert_x = False

        if np.diff(y_axis)[0] < 0:
            y_axis = y_axis[::-1]
            invert_y = True
        else:
            invert_y = False

        timestamp = misc.get_timestamp(os.path.basename(file_))
        tracker = tracking.Tracker(
            archiver_dir + 'archiver_api_data/2020-10-03.h5', timestamp,
            struct_lengths, n_particles, n_emittances, screen_bins,
            screen_cutoff, smoothen, profile_cutoff, len_profile)

        blmeas = p_dict['blmeas']
        flip_measured = p_dict['flipx']
        profile_meas = tracking.profile_from_blmeas(blmeas,
                                                    tt_halfrange,
                                                    charge,
                                                    tracker.energy_eV,
                                                    subtract_min=True)
        profile_meas.reshape(len_profile)
        profile_meas2 = tracking.profile_from_blmeas(blmeas,
                                                     tt_halfrange,
                                                     charge,
                                                     tracker.energy_eV,
                                                     subtract_min=True,
                                                     zero_crossing=2)
        profile_meas2.reshape(len_profile)
        if flip_measured:
            profile_meas.flipx()
        else:
            profile_meas2.flipx()

        profile_meas.cutoff(1e-2)
        profile_meas2.cutoff(1e-2)

        beam_offsets = [0., -(dict_['value'] * 1e-3 - mean_struct2)]
        distance_um = (gaps[n_streaker] / 2. - beam_offsets[n_streaker]) * 1e6
        if n_offset is not None:
            distance_um = distance_um[n_offset]
            beam_offsets = [beam_offsets[0], beam_offsets[1][n_offset]]

        tdc_screen1 = tracker.matrix_forward(profile_meas, gaps,
                                             beam_offsets)['screen']
        tdc_screen2 = tracker.matrix_forward(profile_meas, gaps,
                                             beam_offsets)['screen']

        plt.figure(fig_paper.number)
        sp_profile_comp = subplot(sp_ctr_paper,
                                  title=main_label,
                                  xlabel='t [fs]',
                                  ylabel='Intensity (arb. units)')
        sp_ctr_paper += 1
        profile_meas.plot_standard(sp_profile_comp,
                                   norm=True,
                                   color='black',
                                   label='TDC',
                                   center='Right')

        ny, nx = 2, 4
        subplot = ms.subplot_factory(ny, nx)
        sp_ctr = np.inf

        all_profiles, all_screens = [], []

        if n_offset is None:
            projections = dict_['projx']
        else:
            projections = dict_['projx'][n_offset]

        for n_image in range(len(projections)):
            screen = get_screen_from_proj(projections[n_image], x_axis,
                                          invert_x)
            screen.crop()
            screen._xx = screen._xx - mean0

            gauss_dict = tracker.find_best_gauss(
                sig_t_range,
                tt_halfrange,
                screen,
                gaps,
                beam_offsets,
                n_streaker,
                charge,
                self_consistent=self_consistent)
            best_screen = gauss_dict['reconstructed_screen']
            best_screen.cutoff(1e-3)
            best_screen.crop()
            best_profile = gauss_dict['reconstructed_profile']
            if n_image == 0:
                screen00 = screen
                bp00 = best_profile
                best_screen00 = best_screen
            best_gauss = gauss_dict['best_gauss']

            if sp_ctr > (ny * nx):
                ms.figure('All reconstructions Distance %i %s' %
                          (distance_um, main_label))
                sp_ctr = 1

            if n_image % 2 == 0:
                sp_profile = subplot(sp_ctr, title='Reconstructions')
                sp_ctr += 1
                sp_screen = subplot(sp_ctr, title='Screens')
                sp_ctr += 1
                profile_meas.plot_standard(sp_profile,
                                           color='black',
                                           label='Measured',
                                           norm=True,
                                           center='Right')
                tdc_screen1.plot_standard(sp_screen, color='black')

            color = screen.plot_standard(sp_screen,
                                         label=n_image)[0].get_color()
            best_screen.plot_standard(sp_screen, color=color, ls='--')
            best_profile.plot_standard(sp_profile,
                                       label=n_image,
                                       norm=True,
                                       center='Right')
            sp_profile.legend()
            sp_screen.legend()

            all_profiles.append(best_profile)

        # Averaging the reconstructed profiles
        all_profiles_time, all_profiles_current = [], []
        for profile in all_profiles:
            profile.shift('Right')
            #all_profiles_time.append(profile.time - profile.time[np.argmax(profile.current)])
            all_profiles_time.append(profile.time)
        new_time = np.linspace(min(x.min() for x in all_profiles_time),
                               max(x.max() for x in all_profiles_time),
                               len_profile)
        for tt, profile in zip(all_profiles_time, all_profiles):
            new_current = np.interp(new_time,
                                    tt,
                                    profile.current,
                                    left=0,
                                    right=0)
            new_current *= charge / new_current.sum()
            all_profiles_current.append(new_current)
        all_profiles_current = np.array(all_profiles_current)
        mean_profile = np.mean(all_profiles_current, axis=0)
        std_profile = np.std(all_profiles_current, axis=0)
        average_profile = tracking.BeamProfile(new_time, mean_profile,
                                               tracker.energy_eV, charge)
        average_profile.plot_standard(sp_profile_comp,
                                      label='Reconstructed',
                                      norm=True,
                                      center='Right')

        ms.figure('Test averaging %s' % main_label)
        sp = plt.subplot(1, 1, 1)
        for yy in all_profiles_current:
            sp.plot(new_time, yy / np.trapz(yy, new_time), lw=0.5)

        to_plot = [
            ('Average', new_time, mean_profile, 'black', 3),
            ('+1 STD', new_time, mean_profile + std_profile, 'black', 1),
            ('-1 STD', new_time, mean_profile - std_profile, 'black', 1),
        ]

        integral = np.trapz(mean_profile, new_time)
        for pm, ctr, color in [(profile_meas, 1, 'red'),
                               (profile_meas2, 2, 'green')]:
            #factor = integral/np.trapz(pm.current, pm.time)
            #t_meas = pm.time-pm.time[np.argmax(pm.current)]
            i_meas = np.interp(new_time, pm.time, pm.current)
            bp = tracking.BeamProfile(new_time,
                                      i_meas,
                                      energy_eV=tracker.energy_eV,
                                      charge=charge)
            bp.shift('Right')

            to_plot.append(('TDC %i' % ctr, bp.time, bp.current, color, 3))

        for label, tt, profile, color, lw in to_plot:
            gf = gaussfit.GaussFit(tt, profile)
            width_fs = gf.sigma * 1e15
            if label is None:
                label = ''
            label = (label + ' %i fs' % width_fs).strip()
            factor = np.trapz(profile, tt)
            sp.plot(tt, profile / factor, color=color, lw=lw, label=label)

        sp.legend(title='Gaussian fit $\sigma$')

    plt.show()
Exemple #21
0
            xx, yy = screen._xx, screen._yy
            gf = gaussfit.GaussFit(xx, yy)
            all_mean.append(gf.mean)

        mean0 = np.mean(all_mean)
        print('%s: Mean0: %.3e mm' % (main_label, mean0 * 1e3))

    if fit_emittance and main_label != 'Short':

        timestamp0 = misc.get_timestamp(os.path.basename(p_dict['filename0']))
        tracker0 = tracking.Tracker(magnet_file,
                                    timestamp0,
                                    struct_lengths,
                                    n_particles,
                                    n_emittances,
                                    screen_bins,
                                    screen_cutoff,
                                    smoothen,
                                    profile_cutoff,
                                    len_profile,
                                    quad_wake=quad_wake)

        bp_test = tracking.get_gaussian_profile(40e-15, tt_halfrange,
                                                len_profile, charge,
                                                tracker0.energy_eV)
        screen_sim = tracker0.matrix_forward(bp_test, [10e-3, 10e-3],
                                             [0, 0])['screen']
        all_emittances = []
        all_beamsizes = []
        for proj in projx0:
            screen_meas = get_screen_from_proj(proj, x_axis0, invert_x0)
Exemple #22
0
                        data_dir1 +
                        '2021_05_18-23_32_12_Calibration_SARUN18-UDCP020.h5')
#streaker_calib_files = (data_dir1+'2021_05_19-00_13_25_Calibration_SARUN18-UDCP020.h5', data_dir1+'2021_05_19-00_24_47_Calibration_SARUN18-UDCP020.h5')

strong_streak_file = data_dir1 + '2021_05_18-23_43_39_Lasing_False_SARBD02-DSCR050.h5'
weak_streak_file = data_dir2 + '2021_05_19-14_14_22_Calibration_SARUN18-UDCP020.h5'

meta_data_strong = h5_storage.loadH5Recursive(
    strong_streak_file)['meta_data_begin']
meta_data_weak = h5_storage.loadH5Recursive(
    weak_streak_file)['raw_data']['meta_data_begin']

n_streaker = 1
charge = 200e-12

tracker = tracking.Tracker(**config.get_default_tracker_settings())
sc = streaker_calibration.StreakerCalibration(
    'Aramis',
    n_streaker,
    10e-3,
    fit_gap=True,
    fit_order=False,
    proj_cutoff=tracker.screen_cutoff)
for scf in streaker_calib_files:
    sc.add_file(scf)

streaker_offset = sc.fit_type('centroid')['streaker_offset']
tracker.set_simulator(sc.meta_data)
gauss_kwargs = config.get_default_gauss_recon_settings()

if calibrate_gap: