Exemplo n.º 1
0
def main():
    parser = optparse.OptionParser(description=__doc__)
    parser.add_option(
        '-w',
        '--rewrite_prefix',
        action='append',
        default=[],
        dest='prefix_map',
        metavar='SPEC',
        help=('Two path prefixes, separated by colons ' +
              'specifying that a file whose (relative) path ' +
              'name starts with the first prefix should have ' +
              'that prefix replaced by the second prefix to ' +
              'form a path relative to the output directory. ' +
              'The resulting path is used in the deps mapping ' +
              'file path to a list of provided and required ' + 'namespaces.'))
    parser.add_option('-o',
                      '--output_file',
                      action='store',
                      default=[],
                      metavar='SPEC',
                      help=('Where to output the generated deps file.'))
    options, args = parser.parse_args()

    path_rewriter = PathRewriter(options.prefix_map)

    # Write the generated deps file.
    with open(options.output_file, 'w') as output:
        for path in args:
            js_deps = source.Source(source.GetFileContents(path))
            path = path_rewriter.RewritePath(path)
            line = 'goog.addDependency(\'%s\', %s, %s);\n' % (
                path, sorted(js_deps.provides), sorted(js_deps.requires))
            output.write(line)
Exemplo n.º 2
0
def _GetRelativePathToSourceDict(root, prefix=''):
    """Scans a top root directory for .js sources.

  Args:
    root: str, Root directory.
    prefix: str, Prefix for returned paths.

  Returns:
    dict, A map of relative paths (with prefix, if given), to source.Source
      objects.
  """
    # Remember and restore the cwd when we're done. We work from the root so
    # that paths are relative from the root.
    start_wd = os.getcwd()
    os.chdir(root)

    path_to_source = {}
    for path in treescan.ScanTreeForJsFiles('.'):
        prefixed_path = _NormalizePathSeparators(os.path.join(prefix, path))
        path_to_source[prefixed_path] = source.Source(
            source.GetFileContents(path))

    os.chdir(start_wd)

    return path_to_source
Exemplo n.º 3
0
  def testSourceScan(self):
    test_source = source.Source(_TEST_SOURCE)

    self.assertEqual(set(['foo', 'foo.test']),
                     test_source.provides)
    self.assertEqual(test_source.requires,
                     set(['goog.dom', 'goog.events.EventType']))
Exemplo n.º 4
0
  def testSourceScanBase(self):
    test_source = source.Source(_TEST_BASE_SOURCE)

    self.assertEqual(set(['goog']),
                     test_source.provides)
    self.assertEqual(test_source.requires, set())
    self.assertFalse(test_source.is_goog_module)
Exemplo n.º 5
0
  def testGoogStatementsInComments(self):
    test_source = source.Source(_TEST_COMMENT_SOURCE)

    self.assertEqual(set(['foo']),
                     test_source.provides)
    self.assertEqual(set(['goog.events.EventType']),
                     test_source.requires)
    self.assertFalse(test_source.is_goog_module)
Exemplo n.º 6
0
  def testSourceScanGoogModule(self):
    test_source = source.Source(_TEST_MODULE_SOURCE)

    self.assertEqual(set(['foo']),
                     test_source.provides)
    self.assertEqual(set(['bar']),
                     test_source.requires)
    self.assertTrue(test_source.is_goog_module)
Exemplo n.º 7
0
def main():
    """CLI frontend to MakeDepsFile."""
    logging.basicConfig(format=(sys.argv[0] + ': %(message)s'),
                        level=logging.INFO)
    options, args = _GetOptionsParser().parse_args()

    logging.warning(
        'This utility is deprecated! See '
        'https://github.com/google/closure-library/wiki/Migrating-off-Closure-Python-Scripts'
        ' for more details.')

    path_to_source = {}

    # Roots without prefixes
    for root in options.roots:
        path_to_source.update(_GetRelativePathToSourceDict(root))

    # Roots with prefixes
    for root_and_prefix in options.roots_with_prefix:
        root, prefix = _GetPair(root_and_prefix)
        path_to_source.update(_GetRelativePathToSourceDict(root,
                                                           prefix=prefix))

    # Source paths
    for path in args:
        path_to_source[path] = source.Source(source.GetFileContents(path))

    # Source paths with alternate deps paths
    for path_with_depspath in options.paths_with_depspath:
        srcpath, depspath = _GetPair(path_with_depspath)
        path_to_source[depspath] = source.Source(
            source.GetFileContents(srcpath))

    # Make our output pipe.
    if options.output_file:
        out = open(options.output_file, 'w')
    else:
        out = sys.stdout

    out.write(('// This file was autogenerated by %s.\n' %
               os.path.basename(__file__)))
    out.write('// Please do not edit.\n')

    out.write(MakeDepsFile(path_to_source))
Exemplo n.º 8
0
    def __init__(self, lanes, lines):

        self.current_time = 0
        self.warmup_time = 0
        self.delta_t = 0.125
        self.last_300 = []
        self.last_300_private = []

        self.people_source = source.Source(0.2)
        self.car_source = source.Source(1)

        self.warmup = True
        self.init_target_functions(lines)
        self.init_lanes(lanes)
        self.init_queues()
        self.init_elements(lines)

        self.listener = None

        self.warmup_buses = Set([])
Exemplo n.º 9
0
 def __init__(self, position, exit_road, blocks_before_turn, speed,
              people_carried):
     self.position = position
     self.delay_time = 0
     self.start_time = 0
     self.exit_road = exit_road
     self.blocks_before_turn = blocks_before_turn
     self.speed = speed
     self.people_carried = people_carried
     self.state = ADVANCE
     self.change_lane = source.Source(1)
     self.last_delta = 0
Exemplo n.º 10
0
def main():
    """CLI frontend to MakeDepsFile."""
    logging.basicConfig(format=(sys.argv[0] + ': %(message)s'),
                        level=logging.INFO)
    options, args = _GetOptionsParser().parse_args()

    path_to_source = {}

    # Roots without prefixes
    for root in options.roots:
        path_to_source.update(_GetRelativePathToSourceDict(root))

    # Roots with prefixes
    for root_and_prefix in options.roots_with_prefix:
        root, prefix = _GetPair(root_and_prefix)
        path_to_source.update(_GetRelativePathToSourceDict(root,
                                                           prefix=prefix))

    # Source paths
    for path in args:
        path_to_source[path] = source.Source(source.GetFileContents(path))

    # Source paths with alternate deps paths
    for path_with_depspath in options.paths_with_depspath:
        srcpath, depspath = _GetPair(path_with_depspath)
        path_to_source[depspath] = source.Source(
            source.GetFileContents(srcpath))

    # Make our output pipe.
    if options.output_file:
        out = open(options.output_file, 'w')
    else:
        out = sys.stdout

    out.write(('// This file was autogenerated by %s.\n' %
               os.path.basename(__file__)))
    out.write('// Please do not edit.\n')

    out.write(MakeDepsFile(path_to_source))
Exemplo n.º 11
0
class Receiver:
    def __init__(self, nRx, modOrd, N0):
        self.nRx = nRx
        self.N0 = N0
        self.modOrd = modOrd
        self.modSyms = utils.getModSym(modOrd)

    def step(self, rxSym, H = None, method = 'MMSE'):
        noise = utils.cplxRandn(rxSym.shape, self.N0)
        rx = rxSym + noise

        if method == 'MMSE':
            assert (not H is None)
            M = np.dot(utils.hermitian(H), np.linalg.inv(np.dot(H, utils.hermitian(H)) + np.eye(H.shape[0]) * self.N0))
            est = np.dot(M, rx)
            return utils.slice(est, self.modSyms)

if __name__ == '__main__':
    rcv = Receiver(3, 2, 1e-7)
    import source
    import chan
    src = source.Source(2, 2)
    chan = chan.Channel(2, 3)
    txSyms = src.step()
    rxSyms = chan.step(txSyms)
    demapped = rcv.step(rxSyms, chan.H, 'MMSE')
    print("Tx symbols:")
    print(txSyms)
    print("demapped symbols:")
    print(demapped)
Exemplo n.º 12
0
# -*- coding: utf-8 -*-

import api
import xbmc
import source

if __name__ == '__main__':
    cache_path = xbmc.translatePath('special://temp')
    src = source.Source(cache_path)
    api.AddonMain(int(sys.argv[1]), src)
Exemplo n.º 13
0
    def find_sources(self):
        """Algorithm to detect sources for this beam"""
        # generate results directory
        results_dir = "{0}/{1}/{2}/beam{3}".\
          format(self.options["results_filepath"],
                 self.options["field"],
                 self.options["date"],
                 self.beam_num)
        if not os.path.isdir(results_dir):
            os.makedirs(results_dir)
        
        # Stokes averaged over time
        if self.options["verbose"]:
            print("Log: Averaging Stokes over time.")
        I_data = np.zeros(self.options["num_channels"])
        Q_data = np.zeros(self.options["num_channels"])
        U_data = np.zeros(self.options["num_channels"])
        V_data = np.zeros(self.options["num_channels"])
        for i in xrange(self.options["num_channels"]):
            if self.channels[i].error: continue
            I_data[i], Q_data[i], U_data[i], V_data[i] = \
              self.channels[i].average()
        if self.options["file_verbose"]:
            np.savez(results_dir+"/time_avg",
                     channel=range(self.options["num_channels"]),
                     I_data = I_data,
                     Q_data = Q_data,
                     U_data = U_data,
                     V_data = V_data)
            chans = range(0,self.options["num_channels"])
            plt.stokes_plot(chans, "Channel", I_data, Q_data, U_data,
                            V_data, results_dir+"/time_avg.png")
            
        # convolve time-avereraged data to detect RFI
        if self.options["verbose"]:
            print("Log: Performing RFI detection convolution.")
        con = np.zeros(2*self.options["rfi_con_width"] + 1)
        con[0] = -0.5
        con[self.options["rfi_con_width"]] = 1.0
        con[-1] = -0.5
        I_data = np.convolve(I_data,con,mode="same")
        Q_data = np.convolve(Q_data,con,mode="same")
        U_data = np.convolve(U_data,con,mode="same")
        V_data = np.convolve(V_data,con,mode="same")
        if self.options["file_verbose"]:
            np.savez(results_dir+"/rfi_conv_time_avg",
                     channel=range(self.options["num_channels"]),
                     I_data = I_data,
                     Q_data = Q_data,
                     U_data = U_data,
                     V_data = V_data)
            chans = range(0,self.options["num_channels"])
            plt.stokes_plot(chans, "Channel", I_data, Q_data, U_data,
                            V_data,
                            results_dir+"/rfi_conv_time_avg.png")

        # eliminated edge channels
        if self.options["verbose"]:
            print("Log: eliminating edge channels.")
        for i in xrange(self.options["edge_buff_chan"]):
            self.channels[i].error=True
            self.channels[-1-i].error=True
        # determine the minimum mean and std dev in our intervals
        interval_width = self.options["num_channels"]/\
          self.options["num_intervals"]
        for data in [I_data, Q_data, U_data, V_data]:
            means = np.array([np.nanmean(data[i:i+interval_width])
                              for i in
                              range(0,self.options["num_channels"],
                                    interval_width)])
            stddevs = np.array([np.nanstd(data[i:i+interval_width])
                                for i in
                                range(0,self.options["num_channels"],
                                      interval_width)])
            min_ind = stddevs[stddevs.nonzero()].argmin()
            min_stddev = stddevs[min_ind]
            min_mean = means[min_ind]
            # determine bad channels and flag them
            bad_chans = np.where(np.abs(data) > min_mean +\
                                 self.options["rfi_mask"]*min_stddev)[0]
            if self.options["verbose"]:
                print("Log: eliminated {0} channels with RFI".\
                      format(len(bad_chans)))
            for c in bad_chans:
                self.channels[c].error = True

        # recompute Stokes averaged over time
        if self.options["verbose"]:
            print("Log: Recomputing average Stokes over time.")
        I_data = np.zeros(self.options["num_channels"])
        Q_data = np.zeros(self.options["num_channels"])
        U_data = np.zeros(self.options["num_channels"])
        V_data = np.zeros(self.options["num_channels"])
        for i in xrange(self.options["num_channels"]):
            if self.channels[i].error: continue
            I_data[i], Q_data[i], U_data[i], V_data[i] = \
              self.channels[i].average()
        if self.options["file_verbose"]:
            np.savez(results_dir+"/clean_time_avg",
                     channel=range(self.options["num_channels"]),
                     I_data = I_data,
                     Q_data = Q_data,
                     U_data = U_data,
                     V_data = V_data)
            chans = range(0,self.options["num_channels"])
            plt.stokes_plot(chans, "Channel", I_data, Q_data, U_data,
                            V_data,
                            results_dir+"/clean_time_avg.png")
            
        # now, detect sources in each bin as well as over the full
        # bandpass
        num_bins = int(self.options["band_width"]/
                       self.options["bin_width"])+1
        chans_per_bin = int(self.options["num_channels"]/num_bins)
        for b in range(num_bins+1):
            if b == num_bins:
                b = 999
                start_chan = 0
                end_chan = self.options["num_channels"]
            else:
                start_chan = b*chans_per_bin
                end_chan = (b+1)*chans_per_bin
                if end_chan > self.options["num_channels"]:
                    end_chan = self.options["num_channels"]
            # check if we're already outside edge buffer
            if (end_chan < self.options["edge_buff_chan"] or
                start_chan > (self.options["num_channels"] -
                              self.options["edge_buff_chan"])):
                continue
            if self.options["verbose"]:
                print("Log: Analyzing bin {0}".format(b))
            # results directory for this bin
            bin_results_dir = results_dir+"/bin{0:03d}".format(b)
            if not os.path.isdir(bin_results_dir):
                os.makedirs(bin_results_dir)
            # Average over channels
            if self.options["verbose"]:
                print("Log: Averaging Stokes over channels in this bin")
            I_data = np.zeros(self.channels[0].num_points)
            Q_data = np.zeros(self.channels[0].num_points)
            U_data = np.zeros(self.channels[0].num_points)
            V_data = np.zeros(self.channels[0].num_points)
            num_good_points = 0.
            for c in xrange(start_chan,end_chan):
                if self.channels[c].error: continue
                I_data, Q_data, U_data, V_data = \
                  self.channels[c].add_points(I_data,Q_data,U_data,
                                              V_data)
                num_good_points += 1.
            if num_good_points == 0.: continue
            I_data /= num_good_points
            Q_data /= num_good_points
            U_data /= num_good_points
            V_data /= num_good_points
            if self.options["verbose"]:
                print("Log: Correcting coordinates.")
            RA, DEC, AST = get_coordinates(self.beam_num,
                                           **self.options)
            if self.options["file_verbose"]:
                np.savez(bin_results_dir+"/chan_avg",
                         chan_range=[start_chan,end_chan],
                         RA = RA,
                         DEC = DEC,
                         AST = AST,
                         I_data = I_data,
                         Q_data = Q_data,
                         U_data = U_data,
                         V_data = V_data)
                plt.stokes_plot(AST, "AST", I_data, Q_data, U_data,
                                V_data,
                                bin_results_dir+"/chan_avg.png")

            # smooth data
            if self.options["verbose"]:
                print("Log: Performing smoothing convolution.")
            angle = np.arange(2*self.options["smooth_con_width"]+1)
            angle = angle*10.*np.pi/(2.*self.options["smooth_con_width"])
            angle = angle - 5.*np.pi
            con = np.sin(angle)/angle
            con[self.options["smooth_con_width"]] = 1.
            I_data = np.convolve(I_data,con,mode="same")/np.sum(con)
            Q_data = np.convolve(Q_data,con,mode="same")/np.sum(con)
            U_data = np.convolve(U_data,con,mode="same")/np.sum(con)
            V_data = np.convolve(V_data,con,mode="same")/np.sum(con)
            # chop off edges after convolution
            if self.options["verbose"]:
                print("Log: Chopping off edges after convolution")
            RA = RA[self.options["edge_buff_time"]:
                    -self.options["edge_buff_time"]]
            DEC = DEC[self.options["edge_buff_time"]:
                      -self.options["edge_buff_time"]]
            AST = AST[self.options["edge_buff_time"]:
                      -self.options["edge_buff_time"]]
            I_data = I_data[self.options["edge_buff_time"]:
                            -self.options["edge_buff_time"]]
            Q_data = Q_data[self.options["edge_buff_time"]:
                            -self.options["edge_buff_time"]]
            U_data = U_data[self.options["edge_buff_time"]:
                            -self.options["edge_buff_time"]]
            V_data = V_data[self.options["edge_buff_time"]:
                            -self.options["edge_buff_time"]]
            if self.options["file_verbose"]:
                np.savez(bin_results_dir+"/smooth_chan_avg",
                         chan_range=[start_chan,end_chan],
                         RA = RA,
                         DEC = DEC,
                         AST = AST,
                         I_data = I_data,
                         Q_data = Q_data,
                         U_data = U_data,
                         V_data = V_data)
                plt.stokes_plot(AST, "AST", I_data, Q_data, U_data,
                                V_data,
                                bin_results_dir+"/smooth_chan_avg.png")
            
            # convolve for source detection
            if self.options["verbose"]:
                print("Log: Performing source detection convolution.")
            con = np.zeros(2*self.options["source_con_width"]+1)
            con[0] = -0.25
            con[self.options["source_con_width"]/2] = -0.25
            con[self.options["source_con_width"]] = 1.0
            con[3*self.options["source_con_width"]/2] = -0.25
            con[-1] = -0.25
            I_data_source = np.convolve(I_data,con,mode="same")
            if self.options["file_verbose"]:
                np.savez(bin_results_dir+"/source_chan_avg",
                         chan_range=[start_chan,end_chan],
                         RA = RA,
                         DEC = DEC,
                         AST = AST,
                         I_data = I_data_source)
                plt.single_stokes(AST, "AST", I_data_source,
                                  "Stokes I (K)",
                                  bin_results_dir+
                                  "/source_chan_avg.png")
            if self.options["verbose"]:
                print("Log: Locating sources.")
            source_points = np.where(I_data_source >
                                     self.options["source_mask"]*
                                     self.options["sigma"])[0]
            # storage for sources
            sources = []
            # i is the starting point for this source
            i=0
            while i < len(source_points):
                # j is the ending point for this source
                j = i+1
                # as long as [j] = [j-1] + 1, still on same source
                while (j < len(source_points) and
                       source_points[j] == source_points[j-1] + 1):
                    j += 1
                # get the necessary data for this source
                # first find max
                this_I_data = I_data_source[source_points[i]:
                                            source_points[j-1]]
                if len(this_I_data) == 0:
                    i = j
                    continue
                # max point in I_data_source array for this source
                max_point = this_I_data.argmax() + source_points[i]
                # now, get coords and data for fitting
                time_end = False
                base1_start = (max_point-
                               self.options["num_source_points"]-
                               self.options["point_sep"]-
                               self.options["num_outer_points"])
                if base1_start < 0: base1_start = 0
                base1_end = base1_start+self.options["num_outer_points"]
                if base1_end < 0: base1_end = 0
                source_start = max_point-self.options["num_source_points"]
                if source_start < 0: source_start = 0
                source_end = max_point+self.options["num_source_points"]+1
                if source_end > len(AST): source_end = len(AST)
                base2_start = (max_point+1+
                               self.options["num_source_points"]+
                               self.options["point_sep"])
                if base2_start > len(AST): base2_start = len(AST)
                base2_end = base2_start+self.options["num_outer_points"]
                if base2_end > len(AST): base2_end = len(AST)
                # source is near end of observation
                if base1_start < 0 or base2_end > len(AST):
                    time_end = True
                this_RA = RA[base1_start:base1_end]
                this_RA = np.append(this_RA,RA[source_start:source_end])
                this_RA = np.append(this_RA,RA[base2_start:base2_end])
                this_DEC = DEC[base1_start:base1_end]
                this_DEC = np.append(this_DEC,DEC[source_start:source_end])
                this_DEC = np.append(this_DEC,DEC[base2_start:base2_end])
                this_AST = AST[base1_start:base1_end]
                this_AST = np.append(this_AST,AST[source_start:source_end])
                this_AST = np.append(this_AST,AST[base2_start:base2_end])
                this_I_data = I_data[base1_start:base1_end]
                this_I_data = np.append(this_I_data,I_data[source_start:source_end])
                this_I_data = np.append(this_I_data,I_data[base2_start:base2_end])
                this_Q_data = Q_data[base1_start:base1_end]
                this_Q_data = np.append(this_Q_data,Q_data[source_start:source_end])
                this_Q_data = np.append(this_Q_data,Q_data[base2_start:base2_end])
                this_U_data = U_data[base1_start:base1_end]
                this_U_data = np.append(this_U_data,U_data[source_start:source_end])
                this_U_data = np.append(this_U_data,U_data[base2_start:base2_end])
                this_V_data = V_data[base1_start:base1_end]
                this_V_data = np.append(this_V_data,V_data[source_start:source_end])
                this_V_data = np.append(this_V_data,V_data[base2_start:base2_end])
                # sstart = max_point-25
                # send = max_point+25
                # if sstart < 0:
                #     time_end = True
                #     sstart = 0
                # if send > len(AST):
                #     time_end = True
                #     send = len(AST)
                # this_RA = RA[sstart:send]
                # this_DEC = DEC[sstart:send]
                # this_AST = AST[sstart:send]
                # this_I_data = I_data[sstart:send]
                # this_Q_data = Q_data[sstart:send]
                # this_U_data = U_data[sstart:send]
                # this_V_data = V_data[sstart:send]
                # check dec scan to see if we change direction
                # across source
                dec_end = False
                for k in range(len(this_DEC)-1):
                    if (np.sign(this_DEC[k+1]-this_DEC[k]) !=
                        np.sign(this_DEC[1]-this_DEC[0])):
                        dec_end = True
                        break
                # now, add it
                sources.append(source.Source(this_RA, this_DEC, this_AST,
                                             this_I_data, this_Q_data,
                                             this_U_data, this_V_data,
                                             time_end, dec_end))
                # start next search from where this one left off
                i = j
            if self.options["verbose"]:
                print("Log: Found {0} sources.".format(len(sources)))
                print("Log: Fitting good sources.")
            good_sources = []
            bad_sources = []
            for s in range(len(sources)):
                if sources[s].time_end or sources[s].dec_end:
                    bad_sources.append(s)
                else:
                    plt_filename = bin_results_dir+"/source{0:03d}".format(s)
                    sources[s].fit(plt_filename, **self.options)
                    if sources[s].good_fit:
                        good_sources.append(s)
                    else:
                        bad_sources.append(s)
            if self.options["verbose"]:
                print("Log: Fit {0} good sources.".format(len(good_sources)))
                print("Log: Found {0} bad sources.".format(len(bad_sources)))
            if self.options["file_verbose"]:
                with open(bin_results_dir+"/good_sources.txt","w") as f:
                    f.write("# SourceNum centerRA centerDEC peakI widthDEC\n")
                    f.write("# --------- deg      deg       K     deg\n")
                    for s in good_sources:
                        f.write("{0:03d} {1:.3f} {2:.3f} {3:.3f} {4:.3f}\n".\
                                format(s,sources[s].center_RA,
                                       sources[s].center_DEC,
                                       sources[s].center_I,
                                       sources[s].fit_p[2]))
                with open(bin_results_dir+"/bad_sources.txt","w") as f:
                    f.write("# SourceNum Reasons\n")
                    for s in bad_sources:
                        reasons = ""
                        if sources[s].dec_end:
                            reasons = reasons+"DecChange,"
                        if sources[s].time_end:
                            reasons = reasons+"EndOfScan,"
                        if not sources[s].good_fit:
                            reasons = reasons+"BadFit,"
                        f.write("{0:03d} {1}\n".format(s,reasons))
            np.savez(bin_results_dir+"/sources",
                     sources=sources)
Exemplo n.º 14
0
scene.append(
    geometry.Box(geometry.Point(-1000, -1000, -1500),
                 geometry.Point(1000, 1000, -1600),
                 0,
                 geometry.REFLECTOR,
                 logging=True))
scene.append(
    geometry.Box(geometry.Point(-1000, -1000, 1500),
                 geometry.Point(1000, 1000, 1600),
                 0,
                 geometry.RECEIVER,
                 logging=True))

sources = []
sources.append(
    source.Source(np.array([0, 0, 1450]), np.array([1]), np.array([0, 1, -1])))

if __name__ == '__main__':
    print("Main")
    iterations = 1000
    threads = 6
    timeout = 40
    tracker = simulation.Tracker(scene, sources, iterations)
    pool = [
        threading.Thread(target=simulation.operator,
                         args=(
                             tracker,
                             scene,
                             timeout,
                         )) for i in range(threads)
    ]
Exemplo n.º 15
0
import source
from datetime import datetime, timedelta
import time
import sys

GBP_USD = source.Source('GBP_USD')

# Creates table if not on DB
GBP_USD.create_table('S5')

timenow = datetime.utcnow() + timedelta(hours=1)
GBP_USD.pull_to_table(
    datetime.strftime(timenow - timedelta(hours=4), '%Y-%m-%d %H:%M:%S'),
    datetime.strftime(timenow + timedelta(minutes=2), '%Y-%m-%d %H:%M:%S'),
    'S5')
print('Initialised')
time.sleep(3)

while True:

    try:
        # Change for Daylight Saving Time
        timenow = datetime.utcnow() + timedelta(hours=1)

        GBP_USD.pull_to_table(
            datetime.strftime(timenow - timedelta(minutes=5),
                              '%Y-%m-%d %H:%M:%S'),
            datetime.strftime(timenow + timedelta(minutes=2),
                              '%Y-%m-%d %H:%M:%S'), 'S5')

        sys.stdout.flush()
Exemplo n.º 16
0
  def testSourceScanModuleAlias(self):
    test_source = source.Source(_TEST_MODULE_ALIAS_SOURCE)

    self.assertEqual(set(['goog.dom', 'goog.events']), test_source.requires)
    self.assertTrue(test_source.is_goog_module)
Exemplo n.º 17
0
 def _produce_source_of_waves(self):
     """
     Create a source of propagating waves
     :return Source
     """
     return source.Source(self._source)
Exemplo n.º 18
0
if checkerInstallation == None:
    die( "Could not find checker installation. Please specify it in the config file (checkerInstallation=...) or via the --checkerInstallation option" )

#apply functor to list to get absolute paths:
testcases = map( abspath, utils.get_enumerated_config_option( config, 'testcase' ) )
if len( testcases ) == 0:
    die( "No testcases specified. Please specify at least one test case in the configuration" )

vms = map( abspath, utils.get_enumerated_config_option( config, 'vm' ) )
if len( vms ) == 0:
    die( "No VMs specified. Please specify at least one VM in the configuration" )
    
installers = args[1:]

if len( installers ) > 0:
    source = source.Source()
    for i in installers:
        source.addDummy( 5, i )
else:
    source = ftpsource.FtpSource()
    if options.since:
        try:
            sdt = datetime.datetime.strptime( options.since, '%Y-%m-%d' )
        except ValueError:
            sdt = datetime.datetime.strptime( options.since, '%Y-%m-%d-%H-%M' )
        source.setStartDate( sdt )
    found = True
    nextsec = 0
    while found:
        sec = "Source{0}".format( nextsec )
        nextsec += 1
Exemplo n.º 19
0
 def MakeSource():
   source.Source(_TEST_BAD_BASE_SOURCE)
Exemplo n.º 20
0
    def getVisitMatchesBySensor(self, matchDatabase, matchVisit, dataIdRegex):
        """ Get a dict of all Catalog Sources matching dataId, but
        within another Science_Ccd_Exposure's polygon"""

        # If the dataIdEntry is identical to an earlier query, we must already have all the data
        dataIdStr = self._dataIdToString(
            dataIdRegex,
            defineFully=True)  # E.g. visit862826551-snap.*-raft.*-sensor.*

        if self.visitMatchQueryCache.has_key(matchDatabase):
            if self.visitMatchQueryCache[matchDatabase].has_key(matchVisit):
                vmqCache = self.visitMatchQueryCache[matchDatabase][matchVisit]

                if vmqCache.has_key(dataIdStr):
                    vmCache = self.visitMatchCache[matchDatabase][matchVisit]
                    vmDict = {}
                    for key, ss in vmCache.items():
                        if re.search(dataIdStr, key):
                            vmDict[key] = ss
                    return vmDict

        # Load each of the dataIds
        dataIdList = self.getDataIdsFromRegex(dataIdRegex)

        # Set up the outputs
        calib = self.getCalibBySensor(dataIdRegex)
        vmDict = {}
        for k in calib.keys():
            vmDict[k] = []

        for dataIdEntry in dataIdList:
            visit, raft, sensor = dataIdEntry['visit'], dataIdEntry[
                'raft'], dataIdEntry['sensor']
            dataIdEntryStr = self._dataIdToString(
                dataIdEntry,
                defineFully=True)  # E.g. visit862826551-snap0-raft30-sensor20

            haveAllKeys = True
            sqlDataId = []
            for keyNames in [['visit', 'sce.visit'], ['raft', 'sce.raftName'],
                             ['sensor', 'sce.ccdName']]:
                key, sqlName = keyNames
                if dataIdEntry.has_key(key):
                    sqlDataId.append(
                        self._sqlLikeEqual(sqlName, dataIdEntry[key]))
                else:
                    haveAllKeys = False
            sqlDataId = " and ".join(sqlDataId)

            # Poly comes from our own database
            sql1  = 'SELECT poly FROM '+self.sceTable+' as sce ' \
                 + 'WHERE %s ' % (sqlDataId) \
                 + 'INTO @poly;'

            sql2 = 'CALL scisql.scisql_s2CPolyRegion(@poly, 20);'

            # Selection of source matches from the comparison database
            self.verifyDataIdKeys(dataIdRegex.keys(), raiseOnFailure=True)
            setMethods = [
                "set" + x for x in qaDataUtils.getSourceSetAccessors()
            ]
            selectList = ["s." + x for x in qaDataUtils.getSourceSetDbNames()]
            selectStr = ", ".join(selectList)
            sql3  = 'SELECT sce.visit, sce.raftName, sce.ccdName, sce.filterName, ' \
                + ' sce.fluxMag0, sce.fluxMag0Sigma,'                               \
                + '   CASE WHEN sce.filterId = 0 THEN sro.uMag' \
                + '        WHEN sce.filterId = 1 THEN sro.gMag' \
                + '        WHEN sce.filterId = 2 THEN sro.rMag' \
                + '        WHEN sce.filterId = 3 THEN sro.iMag' \
                + '        WHEN sce.filterId = 4 THEN sro.zMag' \
                + '        WHEN sce.filterId = 5 THEN sro.yMag' \
                + '   END as mag,'                              \
                + ' sro.ra, sro.decl, sro.isStar, sro.refObjectId,' \
                + selectStr \
                + ' FROM %s.'+self.sTable+' AS s USE INDEX FOR JOIN(IDX_htmId20)' % (matchDatabase) \
                + ' INNER JOIN %s.'+self.sceTable+' AS sce ' % (matchDatabase) \
                + ' ON (s.'+self.sceId+' = sce.'+self.sceId+') AND (sce.visit = %s)' % (matchVisit) \
                + '   INNER JOIN %s.'+self.romTable+' AS rsm ON (s.'+self.sId+' = rsm.'+self.sId+')' % (matchDatabase) \
                + '   INNER JOIN %s.RefObject AS sro ON (sro.refObjectId = rsm.refObjectId)'  % (matchDatabase) \
                + '   INNER JOIN scisql.Region AS reg ON (s.htmId20 BETWEEN reg.htmMin AND reg.htmMax) ' \
                + 'WHERE scisql_s2PtInCPoly(s.ra, s.decl, @poly) = 1;'

            #if not re.search("\%", sql1) and haveAllKeys:
            #    dataIdCopy = copy.copy(dataIdEntry)
            #    dataIdCopy['snap'] = "0"
            #    key = self._dataIdToString(dataIdCopy)
            #    if self.visitMatchCache.has_key(key):
            #        vmDict[key] = self.visitMatchCache[key]
            #        continue

            self.printStartLoad("Loading DatasetMatches for: " +
                                dataIdEntryStr + "...")
            self.dbInterface.execute(sql1)
            self.dbInterface.execute(sql2)
            results = self.dbInterface.execute(sql3)

            self.printMidLoad("Found %d matches..." % (len(results)))

            for row in results:
                s = pqaSource.Source()
                qaDataUtils.setSourceBlobsNone(s)
                sref = pqaSource.RefSource()
                qaDataUtils.setSourceBlobsNone(sref)

                nValues = 11
                mvisit, mraft, mccd, mfilt, fmag0, fmag0Err, mag, ra, dec, isStar, refObjId = row[:
                                                                                                  nValues]
                filt = afwImage.Filter(mfilt, True)

                sref.setId(refObjId)
                sref.setRa(ra)
                sref.setDec(dec)
                flux = 10**(-mag / 2.5)
                sref.setPsfFlux(flux)
                sref.setApFlux(flux)
                sref.setModelFlux(flux)
                sref.setInstFlux(flux)

                i = 0
                for value in row[nValues:]:
                    method = getattr(s, setMethods[i])
                    if value is not None:
                        method(value)
                    i += 1

                for sss in [s, sref]:
                    if isStar == 1:
                        sss.setFlagForDetection(sss.getFlagForDetection()
                                                | pqaSource.STAR)
                    else:
                        sss.setFlagForDetection(sss.getFlagForDetection()
                                                & ~pqaSource.STAR)

                # fluxes
                s.setPsfFlux(s.getPsfFlux() / fmag0)
                s.setApFlux(s.getApFlux() / fmag0)
                s.setModelFlux(s.getModelFlux() / fmag0)
                s.setInstFlux(s.getInstFlux() / fmag0)

                # flux errors
                psfFluxErr = qaDataUtils.calibFluxError(
                    s.getPsfFlux(), s.getPsfFluxErr(), fmag0, fmag0Err)
                s.setPsfFluxErr(psfFluxErr)

                apFluxErr = qaDataUtils.calibFluxError(s.getApFlux(),
                                                       s.getApFluxErr(), fmag0,
                                                       fmag0Err)
                s.setApFluxErr(apFluxErr)

                modFluxErr = qaDataUtils.calibFluxError(
                    s.getModelFlux(), s.getModelFluxErr(), fmag0, fmag0Err)
                s.setModelFluxErr(modFluxErr)

                instFluxErr = qaDataUtils.calibFluxError(
                    s.getInstFlux(), s.getInstFluxErr(), fmag0, fmag0Err)
                s.setInstFluxErr(instFluxErr)

                vm = vmDict[dataIdEntryStr]
                vm.append([sref, s, filt])

            self.printStopLoad()

        # cache it
        if not self.visitMatchQueryCache.has_key(matchDatabase):
            self.visitMatchQueryCache[matchDatabase] = {}
            self.visitMatchCache[matchDatabase] = {}

        if not self.visitMatchQueryCache[matchDatabase].has_key(matchVisit):
            self.visitMatchQueryCache[matchDatabase][matchVisit] = {}
            self.visitMatchCache[matchDatabase][matchVisit] = {}

        self.visitMatchQueryCache[matchDatabase][matchVisit][dataIdStr] = True
        for k, ss in vmDict.items():
            self.visitMatchCache[matchDatabase][matchVisit][k] = ss

        return vmDict
Exemplo n.º 21
0
 def __init__(self):
     self.login = Login.Login()
     self.source = source.Source()
Exemplo n.º 22
0
def main():
    logging.basicConfig(format=(sys.argv[0] + ': %(message)s'),
                        level=logging.INFO)
    options, args = _GetOptionsParser().parse_args()

    # Make our output pipe.
    if options.output_file:
        out = open(options.output_file, 'w')
    else:
        out = sys.stdout

    sources = set()

    logging.info('Scanning paths...')
    for path in options.roots:
        for js_path in treescan.ScanTreeForJsFiles(path):
            sources.add(_PathSource(js_path))

    # Add scripts specified on the command line.
    for path in args:
        sources.add(source.Source(_PathSource(path)))

    logging.info('%s sources scanned.', len(sources))

    # Though deps output doesn't need to query the tree, we still build it
    # to validate dependencies.
    logging.info('Building dependency tree..')
    tree = depstree.DepsTree(sources)

    input_namespaces = set()
    inputs = options.inputs or []
    for input_path in inputs:
        js_input = _GetInputByPath(input_path, sources)
        if not js_input:
            logging.error('No source matched input %s', input_path)
            sys.exit(1)
        input_namespaces.update(js_input.provides)

    input_namespaces.update(options.namespaces)

    if not input_namespaces:
        logging.error('No namespaces found. At least one namespace must be '
                      'specified with the --namespace or --input flags.')
        sys.exit(2)

    # The Closure Library base file must go first.
    base = _GetClosureBaseFile(sources)
    deps = [base] + tree.GetDependencies(input_namespaces)

    output_mode = options.output_mode
    if output_mode == 'list':
        out.writelines([js_source.GetPath() + '\n' for js_source in deps])
    elif output_mode == 'script':
        out.writelines([js_source.GetSource() for js_source in deps])
    elif output_mode == 'compiled':

        # Make sure a .jar is specified.
        if not options.compiler_jar:
            logging.error(
                '--compiler_jar flag must be specified if --output is '
                '"compiled"')
            sys.exit(2)

        compiled_source = jscompiler.Compile(
            options.compiler_jar, [js_source.GetPath() for js_source in deps],
            options.compiler_flags)

        if compiled_source is None:
            logging.error('JavaScript compilation failed.')
            sys.exit(1)
        else:
            logging.info('JavaScript compilation succeeded.')
            out.write(compiled_source)

    else:
        logging.error('Invalid value for --output flag.')
        sys.exit(2)