Example #1
2
    def OnExportSurface(self, pubsub_evt):
        filename, filetype = pubsub_evt.data
        if (filetype == const.FILETYPE_STL) or\
           (filetype == const.FILETYPE_VTP) or\
           (filetype == const.FILETYPE_PLY) or\
           (filetype == const.FILETYPE_STL_ASCII):

            # First we identify all surfaces that are selected
            # (if any)
            proj = prj.Project()
            polydata_list = []

            for index in proj.surface_dict:
                surface = proj.surface_dict[index]
                if surface.is_shown:
                    polydata_list.append(surface.polydata)

            if len(polydata_list) == 0:
                utl.debug("oops - no polydata")
                return
            elif len(polydata_list) == 1:
                polydata = polydata_list[0]
            else:
                polydata = pu.Merge(polydata_list)

            # Having a polydata that represents all surfaces
            # selected, we write it, according to filetype
            if filetype == const.FILETYPE_STL:
                writer = vtk.vtkSTLWriter()
                writer.SetFileTypeToBinary()
            elif filetype == const.FILETYPE_STL_ASCII:
                writer = vtk.vtkSTLWriter()
                writer.SetFileTypeToASCII()
            elif filetype == const.FILETYPE_VTP:
                writer = vtk.vtkXMLPolyDataWriter()
            #elif filetype == const.FILETYPE_IV:
            #    writer = vtk.vtkIVWriter()
            elif filetype == const.FILETYPE_PLY:
                writer = vtk.vtkPLYWriter()
                writer.SetFileTypeToASCII()
                writer.SetColorModeToOff()
                #writer.SetDataByteOrderToLittleEndian()
                #writer.SetColorModeToUniformCellColor()
                #writer.SetColor(255, 0, 0)

            if filetype in (const.FILETYPE_STL, const.FILETYPE_PLY):
                # Invert normals
                normals = vtk.vtkPolyDataNormals()
                normals.SetInputData(polydata)
                normals.SetFeatureAngle(80)
                normals.AutoOrientNormalsOn()
                #  normals.GetOutput().ReleaseDataFlagOn()
                normals.UpdateInformation()
                normals.Update()
                polydata = normals.GetOutput()

            filename = filename.encode(wx.GetDefaultPyEncoding())
            writer.SetFileName(filename)
            writer.SetInputData(polydata)
            writer.Write()
Example #2
0
	def _update(self):
		if not g.screen_map:
			utils.error(".init() not run -- what are you playing at?")

		pygame.event.pump()
		for event in pygame.event.get():
			if event.type == pygame.QUIT:
				utils.debug("quit signal caught")
				self.running = False
				return

			self.inputmanager.handle(event)

		if self.inputmanager.check_key(pygame.K_q):
			self.running = False
			return

		if self.inputmanager.check_key_single(pygame.K_m):
			g.screen = g.screen_map["title"]
			self.force_update = True
			return

		if self.inputmanager.check_key_single(pygame.K_r):
			g.deaths += 1
			self.force_update = True
			return

		#if self.inputmanager.check_key_single(pygame.K_n):
			#if isinstance(self.screen, core.World):
				#g.screen = g.screen_map.get(self.screen.index+1, g.screen_map["win"])
				#self.force_update = True
				#return

		self.screen.update(self.inputmanager)
Example #3
0
def parse_this_user(user_dict):
    """
    """
    debug("Going to parse html for user {}".format(user_dict['username']))
    about_node = parse_about(user_dict['about'])

    return about_node
Example #4
0
def run_test_sigterm():
  utils.zk_wipe()
  utils.run_vtctl('CreateKeyspace -force test_keyspace')

  # create the database so vttablets start, as it is serving
  tablet_62344.create_db('vt_test_keyspace')

  tablet_62344.init_tablet('master', 'test_keyspace', '0', start=True)

  # start a 'vtctl Sleep' command in the background
  sp = utils.run_bg(utils.vtroot+'/bin/vtctl -logfile=/dev/null Sleep %s 60s' %
                    tablet_62344.tablet_alias,
                    stdout=PIPE, stderr=PIPE)

  # wait for it to start, and let's kill it
  time.sleep(2.0)
  utils.run(['pkill', 'vtaction'])
  out, err = sp.communicate()

  # check the vtctl command got the right remote error back
  if "vtaction interrupted by signal" not in err:
    raise utils.TestError("cannot find expected output in error:", err)
  utils.debug("vtaction was interrupted correctly:\n" + err)

  tablet_62344.kill_vttablet()
Example #5
0
def main(args=sys.argv[1:]):

  # parse command line options
  parser = optparse.OptionParser()
  parser.add_option('-d', '--debug', dest='debug',
                    action='store_true', default=False,
                    help="enable debug")
  parser.add_option('-n', '--noisy', dest='noisy',
                    action='store_true', default=False,
                    help="enable noisy output")
  parser.add_option('-s', '--screen', dest='screen',
                    action='store_true', default=False,
                    help="set screen")
  parser.add_option('--amo', dest='amo',
                    action='store_true', default=False,
                    help="set AMO")
  options, args = parser.parse_args(args)

  # set variables
  if options.debug:
    print 'setting debug'
    utils.setdebug(1)
  if options.noisy:
    utils.setnoisy(1)

  # Read in each config file and run the tests on it.
  for arg in args:
    utils.debug("running test file " + arg)
    test_file(arg, options.screen, options.amo)
def setup_tablets():
  # Start up a master mysql and vttablet
  utils.debug("Setting up tablets")
  utils.run_vtctl('CreateKeyspace test_keyspace')
  master_tablet.init_tablet('master', 'test_keyspace', '0')
  utils.run_vtctl('RebuildShardGraph test_keyspace/0')
  utils.run_vtctl('RebuildKeyspaceGraph test_keyspace')
  utils.validate_topology()

  setup_schema()
  replica_tablet.create_db('vt_test_keyspace')
  master_tablet.start_vttablet(memcache=True)

  replica_tablet.init_tablet('idle', 'test_keyspace', start=True, memcache=True)
  snapshot_dir = os.path.join(utils.vtdataroot, 'snapshot')
  utils.run("mkdir -p " + snapshot_dir)
  utils.run("chmod +w " + snapshot_dir)
  utils.run_vtctl('Clone -force %s %s' %
                  (master_tablet.tablet_alias, replica_tablet.tablet_alias))

  utils.run_vtctl('Ping test_nj-0000062344')
  utils.run_vtctl('SetReadWrite ' + master_tablet.tablet_alias)
  utils.check_db_read_write(62344)

  utils.validate_topology()
  utils.run_vtctl('Ping test_nj-0000062345')
  utils.run_vtctl('ChangeSlaveType test_nj-0000062345 replica')
Example #7
0
 def buildDir(self):
     utils.debug("EmergeBase.buildDir() called", 2)
     builddir = os.path.join(self.workDir(), self.workDirPattern())
     if self.subinfo.options.unpack.unpackIntoBuildDir and self.subinfo.hasTargetSourcePath():
         builddir = os.path.join(builddir, self.subinfo.targetSourcePath())
     utils.debug("package builddir is: %s" % builddir, 2)
     return self.__adjustPath(builddir)
def check_port(name, port):
    debug('Checking for "{0}" command', name)
    for i in e('${PATH}').split(':'):
        if os.path.exists(e('${i}/${name}')):
            return

    error('Command {0} not found. Please run "pkg install {1}" or install from ports', name, port)
Example #9
0
File: ttest.py Project: wlach/talos
 def createProfile(self, profile_path, browser_config):
     # Create the new profile
     temp_dir, profile_dir = self._ffsetup.CreateTempProfileDir(profile_path,
                                                  browser_config['preferences'],
                                                  browser_config['extensions'])
     utils.debug("created profile") 
     return profile_dir, temp_dir
Example #10
0
    def cleanupProcesses(self, process_name, child_process, browser_wait):
        #kill any remaining browser processes
        #returns string of which process_names were terminated and with what signal

        # if we are running this against the metro browser, we currently use
        # metrotestharness.exe as a way to launch the metro browser process.
        # Talos thinks this harness is the browser, see:
        # http://hg.mozilla.org/build/talos/file/8c5f2725fbdd/talos/run_tests.py#l249
        # We must inform talos about the sub process, the metro browser itself,
        # that is spawned from metrotestharness. The metro browser proc is
        # given the same name as the non metro equivalent: 'firefox.exe'
        if process_name == "metrotestharness" and \
                "firefox" not in self.extra_prog:
            self.extra_prog.append("firefox")

        processes_to_kill = filter(lambda n: n, ([process_name, child_process] +
                                                 self.extra_prog))
        utils.debug("Terminating: %s", ", ".join(str(p) for p in processes_to_kill))
        terminate_result = self.TerminateAllProcesses(browser_wait, *processes_to_kill)
        #check if anything is left behind
        if self.checkAllProcesses(process_name, child_process):
            #this is for windows machines.  when attempting to send kill messages to win processes the OS
            # always gives the process a chance to close cleanly before terminating it, this takes longer
            # and we need to give it a little extra time to complete
            time.sleep(browser_wait)
            processes = self.checkAllProcesses(process_name, child_process)
            if processes:
                raise talosError("failed to cleanup processes: %s" % processes)

        return terminate_result
Example #11
0
    def filter_plugin(self, plug_results, setobj, changed=False):
        '''
        Apply specified filter to plugin results.
        '''
        # If there is no filter defined, return as is.
        if self.filterp == None:
            return plug_results

        filtered_plug_results = []

        # get type definitions for this memobj type
        typedefs = setobj.get_field_typedefs()
        # if filterp (e.g., 'pid') name is in our typedefs
        if self.filterp_name in typedefs.keys():
            debug("passed keys: %s" % typedefs.keys())
            # for each memoobj
            for elem in plug_results:
                filter_passed = None
                # if changed, these are tuples of the before and after elements
                if changed:
                    filter_passed = (self.__filter_passed(elem[0], typedefs) or self.__filter_passed(elem[1], typedefs))    
                else:    
                    filter_passed = self.__filter_passed(elem, typedefs) 

                debug("filter passed %s" % filter_passed)
                if filter_passed:
                    filtered_plug_results.append(elem)
            
        return filtered_plug_results
Example #12
0
    def request_shell (self, data, ) :
        self._request_type = "shell"
        utils.debug("entering shell", )

        self._session_tunnel = _base.SessionTunnel(self, "", )

        return session.SSHSession.request_shell(self, data, )
Example #13
0
    def found_tag(self, decoder, something, taglist):
        """
        Called when the decoder reads a tag.
        """
        debug("found_tags:", self.sound_file.filename_for_display)
        for k in taglist.keys():
            if "image" not in k:
                debug("\t%s=%s" % (k, taglist[k]))
            if isinstance(taglist[k], gst.Date):
                taglist["year"] = taglist[k].year
                taglist["date"] = "%04d-%02d-%02d" % (taglist[k].year, taglist[k].month, taglist[k].day)
        tag_whitelist = (
            "artist",
            "album",
            "title",
            "track-number",
            "track-count",
            "genre",
            "date",
            "year",
            "timestamp",
            "disc-number",
            "disc-count",
        )
        tags = {}
        for k in taglist.keys():
            if k in tag_whitelist:
                tags[k] = taglist[k]

        self.sound_file.tags.update(tags)
        self.query_duration()
Example #14
0
def __import__(module):  # pylint: disable=W0622
    utils.debug("module to import: %s" % module, 2)
    if not os.path.isfile(module):
        try:
            return __builtin__.__import__(module)
        except ImportError as e:
            utils.warning("import failed for module %s: %s" % (module, e.message))
            return None
    else:
        sys.path.append(os.path.dirname(module))
        modulename = os.path.basename(module).replace(".py", "")

        suff_index = None
        for suff in imp.get_suffixes():
            if suff[0] == ".py":
                suff_index = suff
                break

        if suff_index is None:
            utils.die("no .py suffix found")

        with open(module) as fileHdl:
            try:
                return imp.load_module(modulename.replace(".", "_"), fileHdl, module, suff_index)
            except ImportError as e:
                utils.warning("import failed for file %s: %s" % (module, e.message))
                return None
Example #15
0
 def create_db(self, name):
   self.mquery('', 'drop database if exists %s' % name)
   while self.has_db(name):
     utils.debug("%s sleeping while waiting for database drop: %s" % (self.tablet_alias, name))
     time.sleep(0.3)
     self.mquery('', 'drop database if exists %s' % name)
   self.mquery('', 'create database %s' % name)
Example #16
0
def autostart():
    """
    Starts the cleaning service.
    """
    cleaner = Cleaner()

    service_sleep = 4  # Lower than 4 causes too much stress on resource limited systems such as RPi
    ticker = 0
    delayed_completed = False

    while not cleaner.monitor.abortRequested():
        if get_setting(service_enabled):
            scan_interval_ticker = get_setting(scan_interval) * 60 / service_sleep
            delayed_start_ticker = get_setting(delayed_start) * 60 / service_sleep

            if delayed_completed and ticker >= scan_interval_ticker:
                results, _ = cleaner.clean_all()
                notify(results)
                ticker = 0
            elif not delayed_completed and ticker >= delayed_start_ticker:
                delayed_completed = True
                results, _ = cleaner.clean_all()
                notify(results)
                ticker = 0

            cleaner.monitor.waitForAbort(service_sleep)
            ticker += 1
        else:
            cleaner.monitor.waitForAbort(service_sleep)

    debug(u"Abort requested. Terminating.")
    return
Example #17
0
 def run(self):
     if self.arduino == None:
         return
     debug("DroidControlThread: Thread started")
     self.execute_commands()
     self.arduino.close()
     debug("DroidControlThread: Thread stopped")
Example #18
0
def autostart():
    """
    Starts the cleaning service.
    """
    cleaner = Cleaner()

    service_sleep = 10
    ticker = 0
    delayed_completed = False

    while not xbmc.abortRequested:
        if get_setting(service_enabled):
            scan_interval_ticker = get_setting(scan_interval) * 60 / service_sleep
            delayed_start_ticker = get_setting(delayed_start) * 60 / service_sleep

            if delayed_completed and ticker >= scan_interval_ticker:
                results, exit_status = cleaner.cleanup()
                if results and exit_status == 0:
                    notify(results)
                ticker = 0
            elif not delayed_completed and ticker >= delayed_start_ticker:
                delayed_completed = True
                results, exit_status = cleaner.cleanup()
                if results and exit_status == 0:
                    notify(results)
                ticker = 0

            xbmc.sleep(service_sleep * 1000)
            ticker += 1
        else:
            xbmc.sleep(service_sleep * 1000)

    debug("Abort requested. Terminating.")
    return
    def _display_previews(self):
        initial = self.displayed_position * NCOLS
        final = initial + NUM_PREVIEWS

        if len(self.files) < final:
            for i in xrange(final-len(self.files)):
                try:
                    self.previews[-i-1].Hide()
                except IndexError:
                    utils.debug("doesn't exist!")
            self.nhidden_last_display = final-len(self.files)
        else:
            if self.nhidden_last_display:
                for i in xrange(self.nhidden_last_display):
                    try:
                        self.previews[-i-1].Show()
                    except IndexError:
                        utils.debug("doesn't exist!")
                self.nhidden_last_display = 0

        for f, p in zip(self.files[initial:final], self.previews):
            p.SetDicomToPreview(f)
            if f.selected:
                self.selected_panel = p
            #p.interactor.Render()

        for f, p in zip(self.files[initial:final], self.previews):
            p.Show()
Example #20
0
    def Output(self, Ground1, Ground2, Ground3):
        #self.Groundings = self.output
        ground_1 = None
        ground_2 = None
        ground_3 = None
        
        if self.Grounds.isGround(Ground1) and self.isVariable(Ground1):
            ground_1 = self.Grounds.getGrounding(Ground1)
        elif not self.isVariable(Ground1):
            ground_1 = Ground1
            
        if self.Grounds.isGround(Ground2) and self.isVariable(Ground2):
            ground_2 = self.Grounds.getGrounding(Ground2)
        elif not self.isVariable(Ground2):
            ground_2 = Ground2
            
        if self.Grounds.isGround(Ground3) and self.isVariable(Ground3):
            ground_3 = self.Grounds.getGrounding(Ground3)
        elif not self.isVariable(Ground3):
            ground_3 = Ground3

        
        if ground_1 and ground_2 and ground_3:
            debug([ground_1, ground_2, ground_3], prefix="triple")
            
            self.hypergraph.add_edge(ground_2, ground_3,\
            edge_data=[ground_1], edge_type='triple', with_merge=False)
Example #21
0
    def found_tag(self, decoder, something, taglist):
        """
        Called when the decoder reads a tag.
        """
        debug('found_tags:', self.sound_file.filename_for_display)
        for k in taglist.keys():
            if 'image' not in k:
                debug('\t%s=%s' % (k, taglist[k]))
            if isinstance(taglist[k], gst.Date):
                taglist['year'] = taglist[k].year
                taglist['date'] = '%04d-%02d-%02d' % (taglist[k].year,
                                    taglist[k].month, taglist[k].day)
        tag_whitelist = (
            'artist',
            'album',
            'title',
            'track-number',
            'track-count',
            'genre',
            'date',
            'year',
            'timestamp',
            'disc-number',
            'disc-count',
        )
        tags = {}
        for k in taglist.keys():
            if k in tag_whitelist:
                tags[k] = taglist[k]

        self.sound_file.tags.update(tags)
        self.query_duration()
def compress_textures(filenames, dest):
    utils.pg_init((10, 10))
    data = []

    for name in filenames:
        try:
            textures = PackedTextureGroup(name)
        except FileNotFoundError:
            continue
        image = textures.pack.image
        rawImageStr = pg.image.tostring(image, 'RGB')
        compressedImageStr = gzip.compress(rawImageStr)
        utils.debug('compressed size: {}MB. ratio: {}'.format(
            len(compressedImageStr) / 1024 ** 2,
            len(compressedImageStr) / len(rawImageStr)
        ))
        textureMetas = []
        for p, (id, t) in zip(textures.pack.poses, textures.iter_all()):
            textureMetas.append((
                id, t.xoff, t.yoff, p[0], p[1], t.image.get_width(), t.image.get_height(),
            ))
        metaItem = {
            'name': textures.name,
            'size': image.get_size(),
            'format': 'RGB',
            'image': compressedImageStr,
            'textureMetas': textureMetas,
        }
        data.append(metaItem)
    with open(dest, 'wb') as outfile:
        pickle.dump(data, outfile, -1)
 def get(self, id, fail=0):
     id //= 2
     if id not in self.textures:
         utils.debug('{}: id: {} not in range {}'.format(
             self.name, id, (0, len(textures) - 1)))
         return None
     return self.textures[id]
Example #24
0
    def on_message(self, bus, message):
        t = message.type
        import gst

        if t == gst.MESSAGE_ERROR:
            error, _ = message.parse_error()
            self.eos = True
            self.on_error(error)
            self.done()
        elif gst.pbutils.is_missing_plugin_message(message):
            global user_canceled_codec_installation
            detail = gst.pbutils.missing_plugin_message_get_installer_detail(message)
            debug("missing plugin:", detail.split("|")[3], self.sound_file.uri)
            self.pipeline.set_state(gst.STATE_NULL)
            if gst.pbutils.install_plugins_installation_in_progress():
                while gst.pbutils.install_plugins_installation_in_progress():
                    gtk_sleep(0.1)
                self.restart()
                return
            if user_canceled_codec_installation:
                self.error = "Plugin installation cancelled"
                debug(self.error)
                self.done()
                return
            ctx = gst.pbutils.InstallPluginsContext()
            gst.pbutils.install_plugins_async([detail], ctx, self.install_plugin_cb)

        elif t == gst.MESSAGE_EOS:
            self.eos = True
            self.done()

        elif t == gst.MESSAGE_TAG:
            self.found_tag(self, "", message.parse_tag())
        return True
Example #25
0
    def play(self):
        if not self.parsed:
            command = " ! ".join(self.command)
            debug("launching: '%s'" % command)
            try:
                self.pipeline = gst.parse_launch(command)
                bus = self.pipeline.get_bus()
                assert not self.connected_signals
                self.connected_signals = []
                for name, signal, callback in self.signals:
                    if name:
                        element = self.pipeline.get_by_name(name)
                    else:
                        element = bus
                    sid = element.connect(signal, callback)
                    self.connected_signals.append((element, sid))

                self.parsed = True

            except gobject.GError, e:
                show_error("GStreamer error when creating pipeline", str(e))
                self.error = str(e)
                self.eos = True
                self.done()
                return

            bus.add_signal_watch()
            watch_id = bus.connect("message", self.on_message)
            self.watch_id = watch_id
Example #26
0
def getDependencies(category, package, version, runtimeOnly=False):
    """returns the dependencies of this package as list of strings:
    category/package"""
    if not os.path.isfile(getFilename(category, package, version)):
        utils.die("package name %s/%s-%s unknown" % (category, package, version))

    package, subpackage = getSubPackage(category, package)
    print "getDependencies:", package, subpackage
    if subpackage:
        utils.debug(
            "solving package %s/%s/%s-%s %s"
            % (category, subpackage, package, version, getFilename(category, package, version)),
            2,
        )
    else:
        utils.debug(
            "solving package %s/%s-%s %s" % (category, package, version, getFilename(category, package, version)), 2
        )
    mod = __import__(getFilename(category, package, version))

    deps = []
    if hasattr(mod, "subinfo"):
        info = mod.subinfo()
        depDict = info.hardDependencies
        depDict.update(info.dependencies)
        depDict.update(info.runtimeDependencies)
        if not runtimeOnly:
            depDict.update(info.buildDependencies)

        for line in depDict.keys():
            (category, package) = line.split("/")
            version = PortageInstance.getNewestVersion(category, package)
            deps.append([category, package, version, depDict[line]])

    return deps
Example #27
0
 def notify(self, command):
     debug('Received command %s'%repr(command))
     if command[:1] == 'AT':
         atcommand = command[1:].strip()
         if atcommand == 'Z':
             self.doReset()
         elif atcommand == 'I':
             self.write_raw(self.ID)
         elif atcommand == 'E0':
             self.setEcho(0)
         elif atcommand == 'E1':
             self.setEcho(1)
         elif atcommand == 'H0':
             self.setHeader(0)
         elif atcommand == 'H1':
             self.setHeader(1)
         else:
             self.unknown()
     else:
         if self.checkHexadecimal(command):
             if self.status == self.IDLE:
                 self.doInit()
                 self.ok()
                 self.write_crlf()
             svc = strhex_to_int(command[:2])
             pid = strhex_to_int(command[2:])
             self.notifySensor(svc, pid)
         else:
             self.error()
     self.write_end()
Example #28
0
def remInstalled(category, package, version, buildtype=""):
    """ deprecated, use InstallDB.installdb.remInstalled() instead """
    utils.debug("remInstalled called", 2)
    if buildtype != "":
        fileName = "installed-" + buildtype
    else:
        fileName = "installed"
    utils.debug("removing package %s - %s from %s" % (package, version, fileName), 2)
    dbFileName = os.path.join(utils.etcDir(), fileName)
    tmpdbfile = os.path.join(utils.etcDir(), "TMPinstalled")
    found = False
    if os.path.exists(dbFileName):
        with open(dbFileName, "rb") as dbFile:
            with open(tmpdbfile, "wb") as tfile:
                for line in dbFile:
                    ## \todo the category should not be part of the search string
                    ## because otherwise it is not possible to unmerge package using
                    ## the same name but living in different categories
                    if not line.startswith("%s/%s" % (category, package)):
                        tfile.write(line)
                    else:
                        found = True
        os.remove(dbFileName)
        os.rename(tmpdbfile, dbFileName)
    return found
Example #29
0
    def __no_cvs_check_user_override(self):
        """Return True iff pre-commit-checks are turned off by user override...

        ... via the ~/.no_cvs_check file.

        This function also performs all necessary debug traces, warnings,
        etc.
        """
        no_cvs_check_fullpath = expanduser('~/.no_cvs_check')
        # Make sure the tilde expansion worked.  Since we are only using
        # "~" rather than "~username", the expansion should really never
        # fail...
        assert (not no_cvs_check_fullpath.startswith('~'))

        if not isfile(no_cvs_check_fullpath):
            return False

        # The no_cvs_check file exists.  Verify its age.
        age = time.time() - getmtime(no_cvs_check_fullpath)
        one_day_in_seconds = 24 * 60 * 60

        if (age > one_day_in_seconds):
            warn('%s is too old and will be ignored.' % no_cvs_check_fullpath)
            return False

        debug('%s found - pre-commit checks disabled' % no_cvs_check_fullpath)
        syslog('Pre-commit checks disabled for %(rev)s on %(repo)s by user'
               ' %(user)s using %(no_cvs_check_fullpath)s'
               % {'rev': self.new_rev,
                  'repo': self.email_info.project_name,
                  'user': get_user_name(),
                  'no_cvs_check_fullpath': no_cvs_check_fullpath,
                  })
        return True
Example #30
0
    def on_task_finished(self, task):
        task.sound_file.progress = 1.0

        if task.error:
            debug("error in task, skipping rename:", task.output_filename)
            if vfs_exists(task.output_filename):
                vfs_unlink(task.output_filename)
            self.errors.append(task.error)
            self.error_count += 1
            return

        duration = task.get_duration()
        if duration:
            self.duration_processed += duration

        # rename temporary file
        newname = self.window.prefs.generate_filename(task.sound_file)
        log(beautify_uri(task.output_filename), "->", beautify_uri(newname))

        # safe mode. generate a filename until we find a free one
        p, e = os.path.splitext(newname)
        p = p.replace("%", "%%")
        p = p + " (%d)" + e
        i = 1
        while vfs_exists(newname):
            newname = p % i
            i += 1

        task.error = vfs_rename(task.output_filename, newname)
        if task.error:
            self.errors.append(task.error)
            self.error_count += 1
Example #31
0
def main(args):
    if args.debug:
        debug()

    if args.cuda:
        if "CUDA_VISIBLE_DEVICES" not in os.environ:
            os.environ["CUDA_VISIBLE_DEVICES"] = '0'
        config = tf.ConfigProto()
        config.gpu_options.allow_growth = True
    else:
        config = tf.ConfigProto(device_count={'GPU': 0})
        os.environ["CUDA_VISIBLE_DEVICES"] = "-1"

    np.random.seed(args.seed)
    tf.set_random_seed(args.seed)

    data_class = MyLM
    wordvec_class = TencentChinese
    logger.info("模型侧加载数据")
    if args.cache:
        if not os.path.isdir(args.cache_dir):
            os.mkdir(args.cache_dir)
        data = try_cache(data_class, {
            "file_id": args.datapath,
            "max_sent_length": args.max_sent_length
        }, args.cache_dir)
        vocab = data.vocab_list
        logger.info("加载词向量")
        embed = try_cache(
            lambda wv, ez, vl: wordvec_class(wv).load_matrix(ez, vl),
            (args.wv_path, args.embedding_size, vocab), args.cache_dir,
            wordvec_class.__name__)
    else:
        data = data_class(file_id=args.datapath,
                          max_sent_length=args.max_sent_length)
        logger.info("定义并加载词向量文件")
        wv = wordvec_class(args.wv_path)
        vocab = data.vocab_list
        embed = wv.load_matrix(args.embedding_size, vocab)

    embed = np.array(embed, dtype=np.float32)
    if not os.path.isdir(args.output_dir):
        os.mkdir(args.output_dir)

    with tf.Session(config=config) as sess:
        model = create_model(sess, data, args, embed)
        if args.mode == "train":
            logger.info("开始训练...")
            model.train_process(sess, data, args)
        else:
            logger.info("开始测试...")
            model.test_process(sess, data, args)
Example #32
0
def plot_signals(symbol, period=default_periods, refresh=False, start_date=config.start_date, end_date=config.end_date):
    """Plots the macd buy/sell signals for the given symbol, saves this data in a .csv file, and plots this data. Only uses the first and last periods
    The MACD is a lagging trend indicator.

    Parameters:
        symbol : str
        period : int or list of int, optional
            Must contain 3 values. First value is signal line, second is fast line, third is slow line.
        refresh : bool, optional
        start_date : date, optional
        end_date : date, optional

    Returns:
        figure, axes
            A figure and axes containing the macd signals for the given symbol
    """

    if len(period) != 3:
        raise ValueError("MACD requires 3 periods")

    generate_signals(symbol, period=period, refresh=refresh, start_date=start_date, end_date=end_date)
    fig, ax = plot_macd(symbol, period=period, refresh=refresh, start_date=start_date, end_date=end_date)
    df = pd.read_csv(utils.get_file_path(config.ta_data_path, table_filename, symbol=symbol), index_col="Date", parse_dates=["Date"])[start_date:end_date]

    macd_column_name = "MACD" + str(period[1]) + "-" + str(period[2])
    signal_column_name = "MACD" + str(period[0])
    signal_column_name = get_signal_name(period=period)

    buy_signals = df.loc[df[signal_column_name] == ta.buy_signal]
    ax[0].scatter(buy_signals.index, df.loc[df.index.isin(buy_signals.index)]["Close"], label=ta.buy_signal, color=ta.signal_colors[ta.buy_signal], marker=ta.signal_markers[ta.buy_signal], s=config.scatter_size, alpha=config.scatter_alpha)
    ax[1].scatter(buy_signals.index, df.loc[df.index.isin(buy_signals.index)][macd_column_name], label=ta.buy_signal, color=ta.signal_colors[ta.buy_signal], marker=ta.signal_markers[ta.buy_signal], s=config.scatter_size, alpha=config.scatter_alpha)

    sell_signals = df.loc[df[signal_column_name] == ta.sell_signal]
    ax[0].scatter(sell_signals.index, df.loc[df.index.isin(sell_signals.index)]["Close"], label=ta.sell_signal, color=ta.signal_colors[ta.sell_signal], marker=ta.signal_markers[ta.sell_signal], s=config.scatter_size, alpha=config.scatter_alpha)
    ax[1].scatter(sell_signals.index, df.loc[df.index.isin(sell_signals.index)][macd_column_name], label=ta.sell_signal, color=ta.signal_colors[ta.sell_signal], marker=ta.signal_markers[ta.sell_signal], s=config.scatter_size, alpha=config.scatter_alpha)

    soft_buy_signals = df.loc[df[signal_column_name] == ta.soft_buy_signal]
    ax[0].scatter(soft_buy_signals.index, df.loc[df.index.isin(soft_buy_signals.index)]["Close"], label=ta.soft_buy_signal, color=ta.signal_colors[ta.soft_buy_signal], marker=ta.signal_markers[ta.soft_buy_signal], s=config.scatter_size, alpha=config.scatter_alpha)
    ax[1].scatter(soft_buy_signals.index, df.loc[df.index.isin(soft_buy_signals.index)][macd_column_name], label=ta.soft_buy_signal, color=ta.signal_colors[ta.soft_buy_signal], marker=ta.signal_markers[ta.soft_buy_signal], s=config.scatter_size, alpha=config.scatter_alpha)

    soft_sell_signals = df.loc[df[signal_column_name] == ta.soft_sell_signal]
    ax[0].scatter(soft_sell_signals.index, df.loc[df.index.isin(soft_sell_signals.index)]["Close"], label=ta.soft_sell_signal, color=ta.signal_colors[ta.soft_sell_signal], marker=ta.signal_markers[ta.soft_sell_signal], s=config.scatter_size, alpha=config.scatter_alpha)
    ax[1].scatter(soft_sell_signals.index, df.loc[df.index.isin(soft_sell_signals.index)][macd_column_name], label=ta.soft_sell_signal, color=ta.signal_colors[ta.soft_sell_signal], marker=ta.signal_markers[ta.soft_sell_signal], s=config.scatter_size, alpha=config.scatter_alpha)

    utils.prettify_ax(ax[0], title=symbol + "Price", start_date=start_date, end_date=end_date)
    utils.prettify_ax(ax[1], title=symbol + signal_column_name, center=True, start_date=start_date, end_date=end_date)

    utils.prettify_fig(fig)
    fig.savefig(utils.get_file_path(config.ta_graphs_path, get_signal_name(period) + graph_filename, symbol=symbol))
    utils.debug(fig)

    return fig, ax
Example #33
0
 def _initialize(self):
     self._term_settings = termios.tcgetattr(
         sys.stdin)  # record term. settings to restore upon exit
     tty.setcbreak(
         sys.stdin.fileno())  # setup keyboard I/O for cup switches
     if self.fadecandy.can_connect():
         debug('connected to %s' % self.address)
     else:
         debug("ERROR: Fadecandy server not answering at %s!" %
               self.address)
         update_matrix(command="MSG", value="MSG,FC_7890")
         play_sound(config['sounds']['fadecandy_offline'], wait=True)
         return False
Example #34
0
    def create_table(self, conn, setobj):
        '''
        Create a new db table for the specified memobj type.

        @conn: a db connection object
        @setobj: a setobj for the memobj type
        '''
        command = "create table %s (" % (self.get_table_name(setobj))
        for elem in setobj.get_child().fields.keys():
            command += "%s text," % elem
        command = command.rstrip(",") + ")"
        debug(command)
        conn.execute(command)
Example #35
0
 def run(self):
     sift = cv2.SIFT(self.config.get('points'), self.config.get('levels'))
     start_time = time()
     keypoints, descriptors = sift.detectAndCompute(self.image, None)
     debug("SIFT time: {} seconds.".format(time() - start_time))
     self.results = {
         'img': self.image,
         'ext': self.extension,
         'kp': keypoints,
         'desc': descriptors
     }
     image = cv2.drawKeypoints(self.image, keypoints)
     save_image(image, self.name, self.extension)
Example #36
0
def download_sp500():
    """Generates two csv files, one containing the full S&P500 table from Wikipedia, and the other containing only the symbols
    """

    table = pd.read_html(
        "https://en.wikipedia.org/wiki/List_of_S%26P_500_companies")
    df = table[0]
    df.to_csv(sp500_full_table_path, index=False)
    df[symbol_column_name] = df[symbol_column_name].str.replace(".", "-")
    utils.debug(df)
    df.to_csv(sp500_symbols_table_path,
              columns=[symbol_column_name],
              index=False)
Example #37
0
 def query_duration(self):
     """
     Ask for the duration of the current pipeline.
     """
     try:
         if not self.sound_file.duration and self.pipeline:
             self.sound_file.duration = self.pipeline.query_duration(
                 gst.FORMAT_TIME)[0] / gst.SECOND
             debug('got file duration:', self.sound_file.duration)
             if self.sound_file.duration < 0:
                 self.sound_file.duration = None
     except gst.QueryError:
         self.sound_file.duration = None
Example #38
0
def buildworld():
    info('Building world from ${{OS_ROOT}}')
    info('Log file: {0}', worldlog)
    debug('World make.conf: {0}', makeconfbuild)

    sh("env -u DEBUG -u MAKEFLAGS MAKEOBJDIRPREFIX=${OBJDIR}",
       "make",
       "-j {0}".format(makejobs),
       "-C ${OS_ROOT}",
       "__MAKE_CONF={0}".format(makeconfbuild),
       "NO_CLEAN=YES",
       "buildworld",
       log=worldlog)
Example #39
0
def run_test_ddl():
  global GLOBAL_MASTER_START_POSITION
  start_position = GLOBAL_MASTER_START_POSITION
  utils.debug("run_test_ddl: starting @ %s" % start_position)
  master_conn = _get_master_stream_conn()
  master_conn.dial()
  binlog_pos, data, err = master_conn.stream_start(start_position)
  if err:
    raise utils.TestError("Update stream returned error '%s'", err)

  if data['Sql'] != create_vt_insert_test.replace('\n', ''):
    raise utils.TestError("Test Failed: DDL %s didn't match the original %s" % (data['Sql'], create_vt_insert_test))
  utils.debug("Test DDL: PASS")
Example #40
0
 def init(self, key):
     if key in self.param_dict:
         sensor_type = self.get_type(key)
         sensor = globals()['add_' + sensor_type](
             self.world, self.blueprint, self.vehicle,
             self.param_dict[key]['transform'])
         sensor.listen(lambda data: self.param_dict[key]['callback'](data))
         self.sensor_dict[key] = sensor
         debug(info=key + ' successfully initialized !',
               info_type='success')
     else:
         debug(info='Unknown sensor ' + str(key), info_type='error')
         return None
Example #41
0
    def _check_zk_output(self, cmd, expected):
        # directly for sanity
        out, err = utils.run(utils.vtroot + '/bin/zk ' + cmd, trap_output=True)
        self.assertEqualNormalized(out, expected,
                                   'unexpected direct zk output')

        # using zkocc
        out, err = utils.run(utils.vtroot +
                             '/bin/zk --zk.zkocc-addr=localhost:%u %s' %
                             (utils.zkocc_port_base, cmd),
                             trap_output=True)
        self.assertEqualNormalized(out, expected, 'unexpected zk zkocc output')
        utils.debug("Matched: " + out)
def post_chunk(results_server, results_link, id, filename):
    tmpf = open(filename, "r")
    file_data = tmpf.read()
    try:
        ret = post_file.post_multipart(results_server, results_link,
                                       [("key", "value")],
                                       [("filename", filename, file_data)])
    except:
        print "FAIL: error in post data"
        sys.exit(0)
    links = process_Request(ret)
    utils.debug(id + ": sent results")
    return links
Example #43
0
def start():
    global TESTTHREAD
    try:
        if TESTTHREAD and isinstance(TESTTHREAD,
                                     TestThread) and TESTTHREAD.isAlive():
            if not TESTTHREAD._TestThread__running.isSet():
                TESTTHREAD._TestThread__running.set()
                TESTTHREAD.start()
        else:
            TESTTHREAD = TestThread('TESTTHREAD')
            TESTTHREAD.start()
    except:
        utils.debug(utils.debug_line(), "testthreadclass")
Example #44
0
 def __insert_into_table(self, conn, memobj, setobj):
     '''
     Insert a single memobj into a db.
     '''
     fields = tuple(
         [memobj.fields[field] for field in memobj.fields.keys()])
     qms = "?"
     for x in xrange(len(fields) - 1):
         qms += ",?"
     cmd = 'insert into %s values(%s)' % (self.get_table_name(setobj), qms)
     debug("cmd: %s\nfields: %s\ntypes: %s" %
           (cmd, fields, str([type(x) for x in fields])))
     conn.execute(cmd, fields)
Example #45
0
def main(args):
    if args.debug:
        debug()

    if args.cuda:
        config = tf.ConfigProto()
        config.gpu_options.allow_growth = True
    else:
        config = tf.ConfigProto(device_count={'GPU': 0})
        os.environ["CUDA_VISIBLE_DEVICES"] = "-1"

    data_class = MultiTurnDialog.load_class(args.dataset)
    wordvec_class = WordVector.load_class(args.wvclass)
    if wordvec_class == None:
        wordvec_class = Glove
    if args.cache:
        data = try_cache(data_class, (args.datapath, ), args.cache_dir)
        vocab = data.frequent_vocab_list
        embed = try_cache(
            lambda wv, ez, vl: wordvec_class(wv).load_matrix(ez, vl),
            (args.wvpath, args.word_embedding_size, vocab), args.cache_dir,
            wordvec_class.__name__)
        word2vec = try_cache(
            lambda wv, ez, vl: wordvec_class(wv).load_dict(vl),
            (args.wvpath, args.word_embedding_size, vocab), args.cache_dir,
            wordvec_class.__name__)
    else:
        data = data_class(
            args.datapath,
            min_frequent_vocab_times=args.min_frequent_vocab_times,
            max_sent_length=args.max_sent_length,
            max_turn_length=args.max_turn_length)
        wv = wordvec_class(args.wvpath)
        vocab = data.frequent_vocab_list  #dim:9508
        embed = wv.load_matrix(args.word_embedding_size, vocab)
        word2vec = wv.load_dict(vocab)

    embed = np.array(embed, dtype=np.float32)
    with tf.Session(config=config) as sess:
        model = create_model(sess, data, args, embed)
        if args.mode == "train":
            model.train_process(sess, data, args)
        else:
            multi_ref_res = model.test_multi_ref(sess, data, word2vec, args)
            test_res = model.test_process(sess, data, args)
            test_res.update(multi_ref_res)

            for key, val in test_res.items():
                if isinstance(val, bytes):
                    test_res[key] = str(val)
            json.dump(test_res, open("./result.json", "w"))
Example #46
0
def main(args, load_exclude_set, restoreCallback):
    logging.basicConfig(\
     filename=0,\
     level=logging.DEBUG,\
     format='%(asctime)s %(filename)s[line:%(lineno)d] %(message)s',\
     datefmt='%H:%M:%S')

    if args.debug:
        debug()
    logging.info(json.dumps(args, indent=2))

    cuda_init(0, args.cuda)

    volatile = Storage()
    volatile.load_exclude_set = load_exclude_set
    volatile.restoreCallback = restoreCallback

    data_class = BERTLanguageProcessingBase.load_class('BERT' + args.dataset)
    data_arg = Storage()
    data_arg.file_id = args.datapath
    data_arg.bert_vocab_name = args.bert_vocab
    wordvec_class = WordVector.load_class(args.wvclass)
    if wordvec_class is None:
        wordvec_class = Glove

    def load_dataset(data_arg, wvpath, embedding_size):
        wv = wordvec_class(wvpath)
        dm = data_class(**data_arg)
        return dm, wv.load_matrix(embedding_size, dm.vocab_list)

    if args.cache:
        dm, volatile.wordvec = try_cache(
            load_dataset, (data_arg, args.wvpath, args.embedding_size),
            args.cache_dir, data_class.__name__ + "_" + wordvec_class.__name__)
    else:
        dm, volatile.wordvec = load_dataset(data_arg, args.wvpath,
                                            args.embedding_size)

    volatile.dm = dm

    param = Storage()
    param.args = args
    param.volatile = volatile

    model = Seq2seq(param)
    if args.mode == "train":
        model.train_process()
    elif args.mode == "test":
        model.test_process()
    else:
        raise ValueError("Unknown mode")
Example #47
0
def sma(symbol,
        period,
        refresh=False,
        start_date=config.start_date,
        end_date=config.end_date):
    """Calculates the simple moving agerage for the given symbol, saves this data in a .csv file, and returns this data
    The SMA is a lagging trend indicator.

    Parameters:
        symbol : str
        period : int
        refresh : bool, optional
        start_date : date, optional
        end_date : date, optional

    Returns:
        dataframe
            A dataframe containing the simple moving agerage for the given symbol
    """

    if not utils.refresh(utils.get_file_path(
            config.ta_data_path, table_filename, symbol=symbol),
                         refresh=refresh):
        df = pd.read_csv(utils.get_file_path(config.ta_data_path,
                                             table_filename,
                                             symbol=symbol),
                         index_col="Date",
                         parse_dates=["Date"])[start_date:end_date]
    else:
        if utils.refresh(utils.get_file_path(config.prices_data_path,
                                             prices.price_table_filename,
                                             symbol=symbol),
                         refresh=refresh):
            prices.download_data_from_yahoo(symbol,
                                            start_date=start_date,
                                            end_date=end_date)
        df = pd.read_csv(utils.get_file_path(config.prices_data_path,
                                             prices.price_table_filename,
                                             symbol=symbol),
                         usecols=["Date", "Close"],
                         index_col="Date",
                         parse_dates=["Date"])[start_date:end_date]

    if ("SMA" + str(period)) not in df.columns:
        df["SMA" + str(period)] = df["Close"].rolling(period).mean()
        utils.debug(df["SMA" + str(period)])
        df.to_csv(
            utils.get_file_path(config.ta_data_path,
                                table_filename,
                                symbol=symbol))
    return df["SMA" + str(period)]
Example #48
0
    def __check_commit_p(self, commit):
        """Return True if checks on the commit should be done; False if not.

        The purpose of this routine is to centralize the logic being used
        to determine whether a given commit should be subject to the various
        checks we apply to new commits, or not.

        commit: A CommitInfo object.
        """
        if commit.pre_existing_p:
            # This commit already exists in the repository, so we should
            # normally not check it. Otherwise, we could run the risk of
            # failing a check for a commit which was fine before but no
            # longer follows more recent policies. This would cause problems
            # when trying to create new references, for instance.
            #
            # Also, if we started checking pre-existing commits, this could
            # add up very quickly in situation where new branches are created
            # from branches that already have many commits.

            if is_null_rev(self.old_rev):
                # It is possible that the user may have requested that all
                # new commits in our reference be checked (see below) but,
                # since this is a new branch, we ignore that option for
                # pre-existing commits (otherwise, the same commits would be
                # perpetually be re-checked each time a new branch is created).
                return False

            elif (
                self.search_config_option_list("hooks.force-precommit-checks")
                is not None
            ):
                # The user explicitly requested that all new commits on
                # this reference much always be checked.
                return True

            return False

        if commit.is_revert():
            # We have decided that revert commits should not be subject
            # to any check (QB08-047). This allows users to quickly revert
            # a commit if need be, without having to worry about bumping
            # into any check of any kind.
            debug(
                "revert commit detected,"
                " all checks disabled for this commit: %s" % commit.rev
            )
            return False

        # All other commits should be checked.
        return True
Example #49
0
 def __init__(self, command_queue, lock):
     threading.Thread.__init__(self)
     self.command_queue = command_queue
     self.lock = lock
     self.running = True
     self.arduino = None
     try:
         self.arduino = serial.Serial(port=config.SERIAL_NAME,
                                      baudrate=config.SERIAL_BAUD_RATE,
                                      write_timeout=config.SERIAL_TIMEOUT)
     except serial.SerialException:
         debug("DroidControlThread: Unable to find serial device")
         debug("DroidControlThread: Thread will immediately exit")
         self.stop()
Example #50
0
    def do_diffs(self):
        '''
        Return the differences between two dbs.

        @return: list of (list of changed memobjs, list of new memobjs, plugin 
            name)
        '''
        # we can only compare tables that exist in both dbs
        diff_tables = self.db_ops.get_tables(self.diff)
        debug("DB1: %s" % diff_tables)
        db_tables = self.db_ops.get_tables(self.db)
        debug("DB2: %s" % db_tables)
        debug("PLUGIN: %s" % plugin)

        # Can only compare memobjs for tables which exist n both dbs
        compareable = set.intersection(set(diff_tables), set(db_tables))
        compareable = [
            x for x in compareable if x.split("_")[0] in self.plugins
        ]
        debug("Can't compare %s" %
              str(set.symmetric_difference(set(diff_tables), set(db_tables))))

        res = []
        for table in compareable:
            changed, new, setobj = self.do_diff(table)
            res.append((changed, new, table.split("_")[0]))
        return res
Example #51
0
def _run_test_vtctl_clone(server_mode):
  if server_mode:
    clone_flags = '-server-mode'
  else:
    clone_flags = ''
  utils.zk_wipe()

  # Start up a master mysql and vttablet
  utils.run_vtctl('CreateKeyspace -force snapshot_test')

  tablet_62344.init_tablet('master', 'snapshot_test', '0')
  utils.run_vtctl('RebuildShardGraph snapshot_test/0')
  utils.validate_topology()

  tablet_62344.populate('vt_snapshot_test', create_vt_insert_test,
                        populate_vt_insert_test)
  tablet_62344.start_vttablet()

  tablet_62044.create_db('vt_snapshot_test')
  tablet_62044.init_tablet('idle', start=True)

  # small test to make sure the directory validation works
  snapshot_dir = os.path.join(utils.vtdataroot, 'snapshot')
  utils.run("rm -rf %s" % snapshot_dir)
  utils.run("mkdir -p %s" % snapshot_dir)
  utils.run("chmod -w %s" % snapshot_dir)
  out, err = utils.run(utils.vtroot+'/bin/vtctl -logfile=/dev/null Clone -force %s %s %s' %
                       (clone_flags, tablet_62344.tablet_alias,
                        tablet_62044.tablet_alias),
                       trap_output=True, raise_on_error=False)
  if "Cannot validate snapshot directory" not in err:
    raise utils.TestError("expected validation error", err)
  if "Un-reserved test_nj-0000062044" not in err:
    raise utils.TestError("expected Un-reserved", err)
  utils.debug("Failed Clone output: " + err)
  utils.run("chmod +w %s" % snapshot_dir)

  call(["touch", "/tmp/vtSimulateFetchFailures"])
  utils.run_vtctl('Clone -force %s %s %s' %
                  (clone_flags, tablet_62344.tablet_alias,
                   tablet_62044.tablet_alias))

  utils.pause("look at logs!")
  tablet_62044.assert_table_count('vt_snapshot_test', 'vt_insert_test', 4)
  tablet_62344.assert_table_count('vt_snapshot_test', 'vt_insert_test', 4)

  utils.validate_topology()

  tablet_62344.kill_vttablet()
  tablet_62044.kill_vttablet()
    def __post(self, session, url, payload, retry_interval=6, login=False):
        while True:
            try:
                response = session.post(url, payload, timeout=self.__timeout)
                if response.status_code > 400:
                    self.__sleep()
                    time.sleep(retry_interval)
                    continue
                else:
                    self.__wake()

                if not login:
                    if "hashAlg" in response.text:
                        info("Session killed, renewing!")
                        self.__session = self.__new_session()
                else:
                    return response
                return response
            except requests.exceptions.HTTPError:
                debug("HTTPError posting {}. Retrying in {}s".format(
                    url, retry_interval))
            except requests.exceptions.ConnectionError:
                debug("ConnectionError posting {}. Retrying in {}s".format(
                    url, retry_interval))
            except requests.exceptions.Timeout:
                debug("Timeout posting {}. Retrying in {}s".format(
                    url, retry_interval))
            except requests.exceptions.RequestException as err:
                debug("None-specific RequestException posting {}, "
                      "{}. Retrying in {}s".format(url, str(err),
                                                   retry_interval))

            time.sleep(retry_interval)
Example #53
0
 def get_project(self):
     debug('[Get project]')
     projects = self.jira.projects()
     projects_count = len(projects)
     if projects_count == 1:
         return projects[0]
     elif projects_count < 1:
         return None
     else:
         project_menu = [project.name for project in projects]
         entered_project = yield ('Choose your project:', [project_menu])
         for project in projects:
             if project.name == entered_project.text:
                 return project
Example #54
0
    def validate(self):
        """Raise InvalidUpdate if the update is invalid.

        This method verifies that the reference update itself is valid
        (by calling the validate_ref_update method), and then verifies
        that all the new commits introduced by this update passes
        style-checking.  Otherwise, raise InvalidUpdate.
        """
        debug('validate_ref_update (%s, %s, %s)' %
              (self.ref_name, self.old_rev, self.new_rev))
        self.__reject_frozen_ref_update()
        self.validate_ref_update()
        self.__check_max_commit_emails()
        self.pre_commit_checks()
Example #55
0
    def RemoveActor(self, pubsub_evt):
        utils.debug("RemoveActor")
        actor = pubsub_evt.data
        ren = self.ren
        ren.RemoveActor(actor)
        self.interactor.Render()
        self._to_show_ball -= 1
        self._check_and_set_ball_visibility()
	
        #=====aji==================================================================
        try:
            Publisher.sendMessage('Del actor volume')
        except:
            None
Example #56
0
    def _game_loop(self):
        self._start_ticks = pygame.time.get_ticks()

        while True:
            self.elapsed_seconds = (pygame.time.get_ticks() -
                                    self._start_ticks) / 1000
            self._last_time_check = time_check(self.game_length,
                                               self.elapsed_seconds,
                                               self._last_time_check)
            """ TIME'S UP - GAME OVER """
            if self._last_time_check == 0:
                debug("OUT OF TIME")
                play_sound(config['sounds']['you_lose'])
                self._game_over()
                break
            """ Check for & record cup switch hits, or if (Q) pressed, which returns a True """
            if self.cups.apply_cup_hits():
                debug("QUIT")
                self._game_over()
                break
            """ See if all 10 cups have been hit == WINNER """
            if self.cups.count_cups() == 10:
                debug("WINNER")
                play_music(config['music']['you_win'])
                play_sound(config['sounds']['win_cheering'])
                pygame.mixer.music.fadeout(8000)
                self._game_over()
                break

        log_entry = (
            f"{self.player.player_initials} threw {self.cups.balls_thrown} balls and hit {self.cups.count_cups()} cups in {int(self.elapsed_seconds)} seconds"
        )
        debug(log_entry)
        game_log(log_entry)
def test_calibrate_and_transform():
    directory = CAMERA_CALIBRATION_DIR
    filenames = glob.glob(directory + '/*.jpg')
    # filenames = ['camera_cal/calibration2.jpg', 'camera_cal/calibration1.jpg', 'camera_cal/calibration3.jpg']

    camera = Camera(filenames)
    mtx, dist = camera.load_or_calibrate_camera()
    for filename in filenames:
        debug(filename)
        img = mpimg.imread(filename)
        warped, M, Minv = camera.corners_unwarp(img, filename,
                                                CHESSBOARD_SQUARES[0],
                                                CHESSBOARD_SQUARES[1], mtx,
                                                dist)
Example #58
0
def plot_percentage_gains(symbol,
                          refresh=False,
                          start_date=config.start_date,
                          end_date=config.end_date):
    """Plots a graph of the percentage gains for the given symbol

    Parameters:
        symbol : str
        refresh : bool, optional
        start_date : date, optional
        end_date : date, optional

    Returns:
        figure, axes
            A subplot containing the percentage gains for the given symbol
    """

    if isinstance(symbol, str):
        symbol = [symbol]
    symbol.sort()

    fig, ax = plt.subplots(figsize=config.figsize)
    for s in symbol:
        if utils.refresh(utils.get_file_path(config.prices_data_path,
                                             price_table_filename,
                                             symbol=s),
                         refresh=refresh):
            download_data_from_yahoo(s,
                                     start_date=start_date,
                                     end_date=end_date)
        df = pd.read_csv(utils.get_file_path(config.prices_data_path,
                                             price_table_filename,
                                             symbol=s),
                         index_col="Date",
                         parse_dates=["Date"])[start_date:end_date]

        ax.plot(df.index, df["Close"] / df["Close"][0], label=s + "Price")

    utils.prettify_ax(ax,
                      title="".join(str(s) for s in symbol) + "Price",
                      start_date=start_date,
                      end_date=end_date)

    utils.prettify_fig(fig)
    fig.savefig(
        utils.get_file_path(config.prices_graphs_path,
                            price_graph_filename,
                            symbol=",".join(str(s) for s in symbol)))
    utils.debug(fig)
    return fig, ax
Example #59
0
    def reduce_eqns(self, var_set, eqns, b):
        set_reduction(eqns, b)

        for i, key in enumerate(eqns):
            if b[i] == 0:
                for cell in eqns[key]:
                    if cell not in self.kb:
                        # self.kb[cell] = None
                        self.fringe.append(cell)
            elif len(eqns[key]) == b[i]:
                for cell in eqns[key]:
                    if cell not in self.kb:
                        self.flag(*cell)
                        debug(f'Mine Flagged at {cell}')
Example #60
0
 def get_next_configurable_chain(self, run_pool, completed_chain_names):
     """Get the next runnable chain wrt the current run pool and the available completed chain outputs"""
     chain = self.chains[run_pool.pop(0)]
     required_input_chains = chain.get_required_finished_chains()
     # check if the chain requires inputs from other chains
     if required_input_chains:
         undefined_inputs = [x for x in required_input_chains if x not in self.chains]
         if undefined_inputs:
             error(f"Chain {chain.get_name()} requires an input from the non-existing chain(s): {undefined_inputs}")
         if not chain.ready(completed_chain_names):
             run_pool.append(chain.get_name())
             debug("Postponing chain {}".format(chain.get_name()))
             return None, None
     return chain, required_input_chains