def compute_segment_lists(seglistdict, time_slides, minimum_gap, timing_params, full_segments = True, verbose = False): if verbose: print >>sys.stderr, "constructing segment list ..." seglistdict = seglistdict.copy() if not full_segments: # cull too-short single-instrument segments from the input # segmentlist dictionary; this can significantly increase # the speed of the get_coincident_segmentlistdict() # function when the input segmentlists have had many data # quality holes poked out of them power.remove_too_short_segments(seglistdict, timing_params) # extract the segments that are coincident under the time # slides new = cafe.get_coincident_segmentlistdict(seglistdict, time_slides) # adjust surviving segment lengths up to the next integer # number of PSDs for seglist in new.values(): # Try Adjusting Upper Bounds: # count the number of PSDs in each segment psds = [power.psds_from_job_length(timing_params, float(abs(seg))) for seg in seglist] # round up to the nearest integer. psds = [int(math.ceil(max(n, 1.0))) for n in psds] # compute the duration of each job durations = [power.job_length_from_psds(timing_params, n) for n in psds] # update segment list for i, seg in enumerate(seglist): seglist[i] = segments.segment(seg[0], seg[0] + durations[i]) # and take intersection with original segments to # not exceed original bounds new &= seglistdict # Try Adjusting Lower Bounds: # count the number of PSDs in each segment psds = [power.psds_from_job_length(timing_params, float(abs(seg))) for seg in seglist] # round up to the nearest integer. psds = [int(math.ceil(max(n, 1.0))) for n in psds] # compute the duration of each job durations = [power.job_length_from_psds(timing_params, n) for n in psds] # update segment list for i, seg in enumerate(seglist): seglist[i] = segments.segment(seg[1] - durations[i], seg[1]) # and take intersection with original segments to # not exceed original bounds new &= seglistdict # try to fill gaps between jobs new.protract(minimum_gap / 2).contract(minimum_gap / 2) # and take intersection with original segments to not # exceed original bounds seglistdict &= new # remove segments that are too short power.remove_too_short_segments(seglistdict, timing_params) # done return seglistdict
def compute_segment_lists(seglistdict, time_slides, minimum_gap, timing_params, full_segments=True, verbose=False): if verbose: print >> sys.stderr, "constructing segment list ..." seglistdict = seglistdict.copy() if not full_segments: # cull too-short single-instrument segments from the input # segmentlist dictionary; this can significantly increase # the speed of the get_coincident_segmentlistdict() # function when the input segmentlists have had many data # quality holes poked out of them power.remove_too_short_segments(seglistdict, timing_params) # extract the segments that are coincident under the time # slides new = cafe.get_coincident_segmentlistdict(seglistdict, time_slides) # adjust surviving segment lengths up to the next integer # number of PSDs for seglist in new.values(): # Try Adjusting Upper Bounds: # count the number of PSDs in each segment psds = [ power.psds_from_job_length(timing_params, float(abs(seg))) for seg in seglist ] # round up to the nearest integer. psds = [int(math.ceil(max(n, 1.0))) for n in psds] # compute the duration of each job durations = [ power.job_length_from_psds(timing_params, n) for n in psds ] # update segment list for i, seg in enumerate(seglist): seglist[i] = segments.segment(seg[0], seg[0] + durations[i]) # and take intersection with original segments to # not exceed original bounds new &= seglistdict # Try Adjusting Lower Bounds: # count the number of PSDs in each segment psds = [ power.psds_from_job_length(timing_params, float(abs(seg))) for seg in seglist ] # round up to the nearest integer. psds = [int(math.ceil(max(n, 1.0))) for n in psds] # compute the duration of each job durations = [ power.job_length_from_psds(timing_params, n) for n in psds ] # update segment list for i, seg in enumerate(seglist): seglist[i] = segments.segment(seg[1] - durations[i], seg[1]) # and take intersection with original segments to # not exceed original bounds new &= seglistdict # try to fill gaps between jobs new.protract(minimum_gap / 2).contract(minimum_gap / 2) # and take intersection with original segments to not # exceed original bounds seglistdict &= new # remove segments that are too short power.remove_too_short_segments(seglistdict, timing_params) # done return seglistdict