Exemple #1
0
 def doesAssetAlreadyExistInScene(self, assetName):
     debug(app = self, method = 'doesAssetAlreadyExistInScene', message = 'assetName...\n%s' % assetName, verbose = False)
     assetExists = False
     for each in cmds.ls(type = 'transform'):
         if assetName in each:
             assetExists = True
     return assetExists
Exemple #2
0
    def get_default(self, name, default, force=False):
        # Temporary name fix for when we're given a setting name with the prefix
        # In the future, this test will be gone and no setting will have
        # the 'lldb.' prefix
        if not name.startswith(self.__prefix):
            # Final code should be:
            name = self.__prefix + name
        else:
            debug(debugAny, 'Setting name has lldb prefix: %s' % name)
            import traceback
            traceback.print_stack()

        if not force and name in self.__values:
            return self.__values[name]

        setting = default

        if sublime.active_window() and sublime.active_window().active_view():
            setting = sublime.active_window().active_view().settings().get(name, default)

        if setting is default:
            setting = self.__settings.get(name, default)

        # Cache the setting value and setup a listener
        self.__values[name] = setting
        if name not in self.__settings_keys:
            self.__settings_keys.append(name)
            listener = self.create_listener(name)
            self.__settings.add_on_change(name, listener.on_change)

        debug(debugSettings, 'setting %s: %s' % (name, repr(setting)))
        return setting
Exemple #3
0
    def get_haar_points(self, haarCascade, method=co.cv.CV_HAAR_DO_CANNY_PRUNING):
        """
        Search for points matching the haarcascade selected.

        Arguments:
        - self: The main object pointer.
        - haarCascade: The selected cascade.
        - methode: The search method to use. DEFAULT: co.cv.CV_HAAR_DO_CANNY_PRUNING.

        Returns a list with the matches.
        """

        cascade = co.cv.cvLoadHaarClassifierCascade( haarCascade, self.imgSize )

        if not cascade:
            debug.exception( "ocvfw", "The Haar Classifier Cascade load failed" )

        co.cv.cvResize( self.img, self.small_img, co.cv.CV_INTER_LINEAR )

        co.cv.cvClearMemStorage( self.storage )

        points = co.cv.cvHaarDetectObjects( self.small_img, cascade, self.storage, 1.2, 2, method, co.cv.cvSize(20, 20) )

        if points:
            matches = [ [ co.cv.cvPoint( int(r.x*self.imageScale), int(r.y*self.imageScale)), \
                          co.cv.cvPoint( int((r.x+r.width)*self.imageScale), int((r.y+r.height)*self.imageScale) )] \
                          for r in points]
            debug.debug( "ocvfw", "cmGetHaarPoints: detected some matches" )
            return matches
 def getUserFilmVotes(self, filmwebID, userData):
     
     self.userData = userData
 
     if self.login() == False:
         return False
     
     # pobranie oceny
     debug.debug('Zalogowano')
     vote_array = {}
     api_method = 'getUserFilmVotes [null, null]\n'.encode('string_escape')
     string = self.sendRequest(api_method, 'get')
     matches = re.findall('\[([0-9]+),[^,]+,([0-9]+),', string)
     if len(matches) > 0:
         for m in matches:
             vote_array[m[0]] = m[1]
         
     # pobranie tytułu
     api_method = 'getFilmsInfoShort [[' + filmwebID + ']]\n'.encode('string_escape')
     string = self.sendRequest(api_method, 'get')
     matches = re.findall('\[\["([^"]+)",', string)
     if len(matches) > 0:
         title = matches[0]
     else:
         title = ''
         
     if filmwebID in vote_array.keys():
         return [title, vote_array[filmwebID]]
     else:
         return [title, '0']
Exemple #5
0
    def get_haar_roi_points(self, haarCascade, rect, origSize=(0, 0), method=co.cv.CV_HAAR_DO_CANNY_PRUNING):
        """
        Search for points matching the haarcascade selected.

        Arguments:
        - self: The main object pointer.
        - haarCascade: The selected cascade.
        - methode: The search method to use. DEFAULT: co.cv.CV_HAAR_DO_CANNY_PRUNING.

        Returns a list with the matches.
        """

        cascade = co.cv.cvLoadHaarClassifierCascade( haarCascade, self.imgSize )

        if not cascade:
            debug.exception( "ocvfw", "The Haar Classifier Cascade load failed" )

        co.cv.cvClearMemStorage(self.storage)

        imageROI = co.cv.cvGetSubRect(self.img, rect)

        if cascade:
            points = co.cv.cvHaarDetectObjects( imageROI, cascade, self.storage,
                                    1.2, 2, method, co.cv.cvSize(20,20) )
        else:
            debug.exception( "ocvfw", "The Haar Classifier Cascade load Failed (ROI)" )

        if points:
            matches = [ [ co.cv.cvPoint( int(r.x+origSize[0]), int(r.y+origSize[1])), \
                          co.cv.cvPoint( int(r.x+r.width+origSize[0]), int(r.y+r.height+origSize[1] ))] \
                          for r in points]

            debug.debug( "ocvfw", "cmGetHaarROIPoints: detected some matches" )
            return matches
Exemple #6
0
	def get(self):
		"""Internal use only: not a public interface"""

		attempts = 2
		result = ""

		if self.port is not None:
			while 1:
				c = self.port.read(1)

				# if nothing was recieved
				if len(c) == 0:

					if(attempts <= 0):
						break

					debug("get() found nothing")
					
					attempts -= 1
					continue

				# skip carraige returns
				if c == '\r':
					continue

				# end on chevron
				if c == ">":
					break;
				else: # whatever is left must be part of the response
					result = result + c
		else:
			debug("NO self.port!", True)

		return result
Exemple #7
0
    def set_lkpoint(self, point):
        """
        Set a point to follow it using the L. Kallman method.

        Arguments:
        - self: The main object pointer.
        - point: A co.cv.cvPoint Point.
        """

        cvPoint = co.cv.cvPoint( point.x, point.y )

        self.img_lkpoints["current"] = [ co.cv.cvPointTo32f ( cvPoint ) ]

        if self.img_lkpoints["current"]:
            co.cv.cvFindCornerSubPix (
                self.grey,
                self.img_lkpoints["current"],
                co.cv.cvSize (20, 20), co.cv.cvSize (-1, -1),
                co.cv.cvTermCriteria (co.cv.CV_TERMCRIT_ITER | co.cv.CV_TERMCRIT_EPS, 20, 0.03))

            point.set_opencv( cvPoint )
            self.img_lkpoints["points"].append(point)

            setattr(point.parent, point.label, point)

            if len(self.img_lkpoints["last"]) > 0:
                self.img_lkpoints["last"].append( self.img_lkpoints["current"][0] )

            debug.debug( "ocvfw", "cmSetLKPoints: New LK Point Added" )
        else:
            self.img_lkpoints["current"] = []
Exemple #8
0
def maintainMembers():
    """
    use the membersrefresh operation to maintain
    the universe of known nodes.
    """
    global universe
    
    debug("Running maintain members.", 
        info=True, threshold=1)

    possibledead = set(universe.keys())

    membership.membersRefresh()

    # Add in new nodes.
    tempUniverse = membership.getCurrentMemberDict()
    for uid in tempUniverse:
        if uid not in universe and not me.getMe().__eq__(tempUniverse[uid]):
            universe[uid] = nodes.ExternalNode.fromBase(tempUniverse[uid])
        if uid in possibledead:
            possibledead.remove(uid)

    # Remove dead nodes
    for dead in possibledead:
        deadNode(dead)

    # should I add me in here? not sure.
    universe[me.getMe().getUid()] = me.getMe()

    debug("has a universe of size: " + str(len(universe)), info=True)
    def _publish_gpu_for_item(self, item, output, work_template, primary_publish_path, sg_task, comment, thumbnail_path, progress_cb):
        """
        Export a gpu cache for the specified item and publish it  to Shotgun.
        """
        group_name = item["name"].strip("|")
        debug(app = None, method = '_publish_gpu_for_item', message = 'group_name: %s' % group_name, verbose = False)
        tank_type = output["tank_type"]
        publish_template = output["publish_template"]        
                
        # get the current scene path and extract fields from it using the work template:
        scene_path = os.path.abspath(cmds.file(query=True, sn= True))
        fields = work_template.get_fields(scene_path)
        publish_version = fields["version"]

        # update fields with the group name:
        fields["grp_name"] = group_name

        ## create the publish path by applying the fields with the publish template:
        publish_path = publish_template.apply_fields(fields)
        #'@asset_root/publish/gpu/{name}[_{grp_name}].v{version}.abc'
        gpuFileName = os.path.splitext(publish_path)[0].split('\\')[-1] 
        fileDir = '/'.join(publish_path.split('\\')[0:-1])
        debug(app = None, method = '_publish_gpu_for_item', message = 'gpuFileName: %s' % gpuFileName, verbose = False)
        
        ## Now fix the shaders
        shd.fixDGForGPU()
        
        if cmds.objExists('CORE_ARCHIVES_hrc'):
            cmds.setAttr('CORE_ARCHIVES_hrc.visiblity', 0)
        
        if cmds.objExists('ROOT_ARCHIVES_DNT_hrc'):
            cmds.setAttr('ROOT_ARCHIVES_DNT_hrc.visiblity', 0)
                                       
        ## build and execute the gpu cache export command for this item:
        try:
            print '====================='
            print 'Exporting gpu now to %s\%s' % (fileDir, gpuFileName)
            #PUT THE FILE EXPORT COMMAND HERE
            cmds.select(clear = True)
            for geo in cmds.listRelatives(group_name, children = True):
                if 'geo_hrc' in geo:
                    geoGroup = str(group_name)
                    debug(app = None, method = '_publish_gpu_for_item', message = 'geoGroup: %s' % geoGroup, verbose = False)
                
            cmds.select(geoGroup)
            
            debug(app = None, method = '_publish_gpu_for_item', message = 'geoGroup: %s' % geoGroup, verbose = False)
            debug(app = None, method = '_publish_gpu_for_item', message = "gpuCache -startTime 1 -endTime 1 -optimize -optimizationThreshold 40000 -directory \"%s\" -fileName %s %s;" % (fileDir, gpuFileName, geoGroup), verbose = False)
            
            mel.eval("gpuCache -startTime 1 -endTime 1 -optimize -optimizationThreshold 40000 -directory \"%s\" -fileName %s %s;" % (fileDir, gpuFileName, geoGroup))

            print 'Finished gpu export...'
            print '====================='
            
            if cmds.objExists('dgSHD'):            
                ## Now reconnect the FileIn nodes
                for key, var in filesDict.items():
                    cmds.connectAttr('%s.outColor' % key, '%s.color' % var)
        except Exception, e:
            raise TankError("Failed to export gpu cache file")
Exemple #10
0
def setSpeakLanguage( nNumLang = getDefaultSpeakLanguage(), proxyTts = False ):
    "change the tts speak language"
    print( "SetSpeakLanguage to: %d" % nNumLang );
    if( not proxyTts ):
        proxyTts = naoqitools.myGetProxy( "ALTextToSpeech" );
    if( not proxyTts ):
        debug.debug( "ERR: setSpeakLanguage: can't connect to tts" );
        return;

    try:
        if( nNumLang == constants.LANG_FR ):
            proxyTts.loadVoicePreference( "NaoOfficialVoiceFrench" );
        elif ( nNumLang == constants.LANG_EN ):
            proxyTts.loadVoicePreference( "NaoOfficialVoiceEnglish" );
        elif ( nNumLang == constants.LANG_SP ):
            proxyTts.loadVoicePreference( "NaoOfficialVoiceSpanish" );
        elif ( nNumLang == constants.LANG_IT ):
            proxyTts.loadVoicePreference( "NaoOfficialVoiceItalian" );
        elif ( nNumLang == constants.LANG_GE ):
            proxyTts.loadVoicePreference( "NaoOfficialVoiceGerman" );
        elif ( nNumLang == constants.LANG_CH ):
            proxyTts.loadVoicePreference( "NaoOfficialVoiceChinese" );
        elif ( nNumLang == constants.LANG_PO ):
            proxyTts.loadVoicePreference( "NaoOfficialVoicePolish" );
        elif ( nNumLang == constants.LANG_KO ):
            proxyTts.loadVoicePreference( "NaoOfficialVoiceKorean" );            
        else:
            proxyTts.loadVoicePreference( "NaoOfficialVoiceEnglish" );
    except:
        print( "ERR: setSpeakLanguage: loadVoicePreference error" );
Exemple #11
0
 def connectionLost(self, reason):
     """
     Callback called when this connection is lost
     """
     super.connectionLost()
     debug("Connection lost for reason: " + str(reason), info=True)
     _parentNode.destroyTCPConnection()
Exemple #12
0
    def onStateChange(self, uri, state, code, reason):
        debug("Call state: %s %s %s" % (uri, state, code))
        self.uri = uri
        self.callerIDLabel.setText(self.uri)
        if state == "CALLING" or state == "CONNECTING":
            self.show_error(_("Calling..."))
            self.dialButton.hide()
            self.hangupButton.show()

        if state == "CONFIRMED":
            self.seconds = 0
            self.callerIDLabel.setText(self.uri)
            self.timerLabel.setText(u"0:00")
            self.setWindowTitle(_("Call in process"))
            self.startTimer()
            self.show_call()

        if state == "DISCONNCTD":
            self.setWindowTitle(_("Telesk"))
            self.timerLabel.setText(u"0:00")
            if code == 503:
                self.show_error(_("Not available"))
            if code == 404:
                self.show_error(_("Not found"))
            if code == 486:
                self.show_error(_("Busy here"))
            if code == 603:
                self.show_error(_("Decline"))

            self.errortimer.start()
            self.connect(self.errortimer, QtCore.SIGNAL("timeout()"), self.show_dialer)
 def _load_cache_data(self):
     cache = self.get_cache_path()
     debug(self, "Load: " + cache)
     if not os.path.exists(cache):
         return None
     mtime = None
     try:
         mtime = os.path.getmtime(cache)
     except:
         warn(self, "Cannot stat cache file: " + cache)
     refresh = self.get_cache_refresh()
     if refresh and refresh != -1:
         diff = time.time() - mtime
         if diff > refresh:
             debug(self, "Refreshing cache")
             self.hook_pre_refresh()
             return None
     data = None
     with open(cache, 'rb') as f:
         f = open(cache, 'rb')
         try:
             data = pickle.load(f)
         except: 
             warn(self, "Picke can't load data from file")
             return None 
     return data
Exemple #14
0
def create(source, i, width, height, q):
    
    if (source[:5] == 'image'):
        output = ''
        jsonGet = xbmc.executeJSONRPC('{"jsonrpc": "2.0", "method": "Textures.GetTextures", "params": {  "properties":["url", "cachedurl"], "filter": {"field": "url", "operator": "is", "value":"' + source + '"}}, "id": 1}')
        jsonGet = unicode(jsonGet, 'utf-8', errors='ignore')
        jsonGetResponse = json.loads(jsonGet)
        
        if 'result' in jsonGetResponse and 'textures' in jsonGetResponse['result']:
            for t in jsonGetResponse['result']['textures']:
                if 'cachedurl' in t and t['cachedurl'] != '':
                    file = __thumbpath__ + t['cachedurl'].replace('\\', '/')
                   
                    # resize image
                    try:
                        image = Image.open(file)
                        if image.mode != 'RGB':
                            image = image.convert('RGB')
                        h = image.size[1]
                        if h > 10:
                            if (h > height):
                                image.load()
                                image = image.resize((width, height), Image.ANTIALIAS)
                            image_bin = cStringIO.StringIO()
                            image.save(image_bin, 'JPEG', quality=int(q))
                            output = image_bin.getvalue()
                            image_bin.close()
                    
                    except Exception as Error:
                        debug.debug(str(file))
                        debug.debug(str(Error))
    
    return output
    
def displayText(room,textlist,enqueue=False):
	#debug.warn("Displaying campaign text "+str(textlist))
	if VS.isserver():
		return
	if room==-1:
		debug.error("Room is -1!!!")
	import Base
	room=Base.GetCurRoom()
	func=Base.MessageToRoom
	if enqueue:
		func=Base.EnqueueMessageToRoom
	if type(textlist) is str:
		if textlist!='':
			debug.debug('*** Base.message('+textlist+')')
			func(room,textline(textlist))
	else:
		if textlist and len(textlist):
			debug.debug('*** Base.message('+str(textlist[0])+')')
			#Base.MessageToRoom(room,str(textlist[0]))
			stri=''
			if enqueue:
				for x in textlist:
					func(room,textline(x))
			else:
				for x in textlist:
					stri+=textline(x)+"\n"
				func(room,stri);
Exemple #16
0
    def untile(self):
        debug('Untiling %s' % (self))
        for c in self.store.masters + self.store.slaves:
			c.restore()

        self.tiling = False
        xpybutil.conn.flush()
Exemple #17
0
 def doesAssetAlreadyExistInScene(self, assetName):
     debug(app = self, method = 'doesAssetAlreadyExistInScene', message = 'assetName...\n%s' % assetName, verbose = False)
     assetExists = False
     if cmds.ls(assetName) != []:
         assetExists = True
     
     return assetExists
Exemple #18
0
    def __init__(self, wid):
        self.wid = wid

        self.name = ewmh.get_wm_name(self.wid).reply() or 'N/A'
        debug('Connecting to %s' % self)

        window.listen(self.wid, 'PropertyChange', 'FocusChange')
        event.connect('PropertyNotify', self.wid, self.cb_property_notify)
        event.connect('FocusIn', self.wid, self.cb_focus_in)
        event.connect('FocusOut', self.wid, self.cb_focus_out)

        # This connects to the parent window (decorations)
        # We get all resize AND move events... might be too much
        self.parentid = window.get_parent_window(self.wid)
        window.listen(self.parentid, 'StructureNotify')
        event.connect('ConfigureNotify', self.parentid, 
                      self.cb_configure_notify)

        # A window should only be floating if that is default
        self.floating = config.floats_default

        # Not currently in a "moving" state
        self.moving = False

        # Load some data
        self.desk = ewmh.get_wm_desktop(self.wid).reply()

        # Add it to this desktop's tilers
        tile.update_client_add(self)

        # First cut at saving client geometry
        self.save()
Exemple #19
0
 def remove(self):
     tile.update_client_removal(self)
     debug('Disconnecting from %s' % self)
     event.disconnect('ConfigureNotify', self.parentid)
     event.disconnect('PropertyNotify', self.wid)
     event.disconnect('FocusIn', self.wid)
     event.disconnect('FocusOut', self.wid)
Exemple #20
0
 def __getitem__(self, key):
     if isinstance(key, int):
         return self.modes[key]
     elif isinstance(key, basestring):
         return self.__dict__[key]
     else:
         debug("OBD commands can only be retrieved by PID value or dict name", True)
Exemple #21
0
    def compute(self, _data):
        r = Response(_data)
        debug("command returned: %s" % _data)
        if "SEARCHING..." in _data:
            _data = _data[12:]

        # strips spaces, and removes [\n\r\t]
        _data = "".join(_data.split())

        if (len(_data) > 0) and ("NODATA" not in _data) and isHex(_data):
            # the first 4 chars are codes from the ELM (we don't need those)
            _data = _data[4:]

            # constrain number of bytes in response
            if (self.bytes > 0): # zero bytes means flexible response
                constrainHex(_data, self.bytes)

            # decoded value into the response object
            r.set(self.decode(_data))

        else:
            # not a parseable response
            debug("return data could not be decoded")

        return r
def ForEachSys (functio):
    debug.debug("Getting reachable systems...")
    systems = AllSystems()
    debug.debug("done")
    for sys in systems:
	functio(sys)
    return len(systems)
 def get( self, strPathBoxName ):
     try:
         nNbrFrame = self.animations_FrameNumber[strPathBoxName];
     except:
         nNbrFrame = 0;
     debug.debug( "INF: choregraphetools.FrameNumber.get( '%s' ): %d" % ( strPathBoxName, nNbrFrame ) );
     return nNbrFrame;
def _setBaseFluidAttrs(fluid):
    #set required attributes
    attrs = ['ty', 'sx', 'sy', 'sz']

    ## Lock attrs
    for eachAttr in attrs:
        cmds.setAttr ("%s.%s" % (fluid[0], eachAttr), lock = True)
        cmds.setAttr ("%s.%s" % (fluid[0], eachAttr), keyable = False , channelBox = False)

    #scale fluid container
    attrs = ['scaleX', 'scaleY', 'scaleZ', 'rx']
    for each in attrs:
        if each != 'rx':
            var = 0.26
        else:
            var = -90
        try:
            cmds.setAttr ("%s.%s" % (fluid[0], each), var)
        except RuntimeError:
            cmds.setAttr ("%s.%s" % (fluid[0], each), lock = False)
            cmds.setAttr ("%s.%s" % (fluid[0], each), var)
    try:
        cmds.setAttr ("%sShape.gravity" % fluid[0], 1)
    except RuntimeError:
        pass

    debug(None, method = '_setBaseFluidAttrs', message = 'Successfully set attrs for %s:' % fluid , verbose = False)
Exemple #25
0
    def openTCPConnection(self, bindAddress=None):
        """
        Open a new TCP connection from the local node to this node.
        """
        debug("Trying to a new connection to [ " + self.getShortUid() + " ]", info=True)
        connector = connections.openConnection(self.getIp(), self.getPort())

        if not bindAddress:
            bindAddress = (me.getMe().getIp(), config.getNextSendPort())
            # bindAddress = twisted.internet.address.IPv4Address(
            #    'TCP', me.getMe().getIp(), config.SEND_PORT)
            # config.getNextSendPort()
        """
        c = connections.HissTCPClientConnection.fromPrimitives(
            self,
            self.getIp(), 
            self.getPort(), 
            bindAddress,
            connector)
        """
        """
        c = connections.HissTCPClientConnection.fromPrimitives(
            self,
            self.getIp(), 
            self.getPort(), 
            None,
            connector)
        self._tcpConnection = c
        """
        c = connections.HissConnection.fromPrimitives(self, self.getIp(), self.getPort(), None, connector)
        self._tcpConnection = c
Exemple #26
0
def main():
    debug("FUN TIME!")
    
    stdin = sys.stdin.readlines()

    threads = 5
    pause = .1
    slots = [None] * threads
    for line in stdin:
        user, password = line.strip().split(";")[:2]
        user = user.lower()

        for discover in discovers:
            passed = False
            tries = 0
            while not passed:
                tries += 1
                for pos in xrange(threads):
                    if slots[pos] is None or not slots[pos].is_alive():
                        slots[pos] = discover(user, password)
                        passed = True
                        break
                time.sleep(pause)
            pause *= tries ** 0.2 * .9
            if tries > 10:
                pause = pause * 10 + 0.001

    for slot in slots:
        if slot is not None:
            slot.get_result()

    debug(">>> EOF !!")
Exemple #27
0
def run_test(build, machine, test, path):
    # Open files for raw output from the victim and log data from the test
    raw_file_name = os.path.join(path, RAW_FILE_NAME)
    debug.verbose('open %s for raw output' % raw_file_name)
    raw_file = open(raw_file_name, 'w')

    # run the test, dumping the output to the raw file as we go
    try:
        debug.verbose('harness: setup test')
        test.setup(build, machine, path)
        debug.verbose('harness: run test')
        starttime = datetime.datetime.now()
        for out in test.run(build, machine, path):
            # timedelta for the time this line was emitted from the start of the run
            timestamp = datetime.datetime.now() - starttime
            # format as string, discarding sub-second precision
            timestr = str(timestamp).split('.', 1)[0]
            # filter output line of control characters
            filtered_out = filter(lambda c: c in string.printable, out.rstrip())
            # debug filtered output along with timestamp
            debug.debug('[%s] %s' % (timestr, filtered_out))
            # log full raw line (without timestamp) to output file
            raw_file.write(out)
        debug.verbose('harness: output complete')
    except KeyboardInterrupt:
        # let the user know that we are on our way out
        debug.error('Interrupted! Performing cleanup...')
        raise
    finally:
        raw_file.close()
        debug.verbose('harness: cleanup test')
        test.cleanup(machine)
def myPCall( proxy, strFuncName, args ):
  try:
    listArgs = [ proxy, strFuncName, args ];
    thread.start_new_thread( LaunchCall, (listArgs,) );
    return;
  except BaseException, err:
    debug.debug( "MyPCall: Exception catched: %s" % err );
Exemple #29
0
def ReadBaseNameList(faction):
    bnl=[]
    debug.debug('reading base names %s', faction)
    filename = 'universe/fgnames/'+faction+'.txt'
    try:
        f = open (filename,'r')
        bnl = f.readlines()
        f.close()
    except:
        try:
            f = open ('../'+filename,'r')
            bnl = f.readlines()
            f.close()
        except:
            try:
                f = open ('../universe/names.txt','r')
                bnl = f.readlines()
                f.close()
            except:
                try:
                    f = open ('universe/names.txt','r')
                    bnl = f.readlines()
                    f.close()
                except:
                    global genericalphabet
                    bnl=genericalphabet
    for i in xrange(len(bnl)):
        bnl[i]=bnl[i].rstrip()#.decode('utf8','ignore')
    import vsrandom
    vsrandom.shuffle(bnl)
    return bnl
Exemple #30
0
	def stop(self):
		if self.thread is not None:
			debug("Stopping async thread...")
			self.running = False
			self.thread.join()
			self.thread = None
			debug("Async thread stopped")
def ReloadUniverse():
    global genUniverse, hasUniverse
    if cp >= 0:
        ShowProgress.activateProgressScreen("loading", force=True)
        debug.debug('Purging...')
        for i in fg_util.AllFactions():
            fg_util.PurgeZeroShips(i)
            systemcount[i] = 0
        debug.debug('StartSystemCount')
        ForEachSys(CountSystems)
        debug.debug(systemcount)
        debug.debug('EndSystemCount')
        genUniverse = 0
        needNewUniverse = 0
        curfaclist = fg_util.AllFactions()
        reflist = fg_util.ReadStringList(cp, "FactionRefList")

        if (reflist != curfaclist):
            needNewUniverse = 1
            debug.debug('reflist is ' + str(reflist))
            debug.debug('curfaclist is ' + str(curfaclist))

        if (fg_util.HasLegacyFGFormat()):
            needNewUniverse = 1
            debug.warn(
                'save using legacy FG format... resetting universe to reformat'
            )
            fg_util.DeleteLegacyFGLeftovers()

        if needNewUniverse:
            fg_util.WriteStringList(cp, "FactionRefList", curfaclist)
            debug.debug('generating ships... ... ...')
            GenerateAllShips(
            )  ###Insert number of flight groups and max ships per fg
            debug.debug('placing ships... ... ...')
            genUniverse = MakeUniverse()
            #now every system has distributed ships in the save data!
        else:
            GenerateAllShips()
            debug.debug("Second Load")
            for i in range(len(fgnames)):
                fg_util.origfgoffset = fgoffset[i]
                fgnames[i] = fg_util.TweakFGNames(origfgnames[i])
                fgoffset[i] += 1
        campaigns.loadAll(cp)
        hasUniverse = True
        #TODO: add ships to current system (for both modes)  uru?
        ShowProgress.deactivateProgressScreen('loading')
    else:
        debug.error('fatal error: no cockpit')
def _create_WAKE_FluidTexture(oceanShader='',
                              size='',
                              pathToPreset='',
                              wakeFluidShapeName=CONST.WAKE_FLUID_SHAPENODE):
    """
    Create a 3d fluid texture, make it a 2d simulation and texture the waveHeightOffset of the ocean shader with it's outAlpha.
    @param oceanShader:
    @param size:
    @type oceanShader:
    @type size:
    """
    debug(None,
          method='_create_WAKE_FluidTexture',
          message='Building fluid %s' % wakeFluidShapeName,
          verbose=False)
    fluidShape = cmds.shadingNode('fluidTexture3D',
                                  asTexture=True,
                                  name=wakeFluidShapeName)

    # Get parent of shape and set attrs
    fluid = cmds.listRelatives(fluidShape, parent=True)
    _setBaseFluidAttrs(fluid)

    ## Connect to time
    cmds.connectAttr("time1.outTime", (fluidShape + ".currentTime"))
    debug(None,
          method='_create_WAKE_FluidTexture',
          message='%s connected to time1' % fluidShape,
          verbose=False)

    ## Apply wake preset
    mel.eval("""applyPresetToNode " """ + fluidShape +
             """ " "" "" "%s" 1;""" % pathToPreset)
    debug(None,
          method='_create_WAKE_FluidTexture',
          message='Mel preset applied: %s' % pathToPreset,
          verbose=False)

    #expression to maintain resolution/container size relationship
    expStringList = [
        "int $width = %s.dimensionsW;\r\n" % fluidShape,
        'int $height = %s.dimensionsH;\r\n' % fluidShape,
        'if ($width>= $height)\r\n', '{\r\n',
        '%s.baseResolution = $width*4;\r\n' % fluidShape, '}\r\n', 'else\r\n',
        '{\r\n%s.baseResolution = $height*4;\r\n}' % fluidShape
    ]

    utils.checkExpressionExists('waterSurfaceFluidTexture')
    cmds.expression(n='waterSurfaceFluidTexture',
                    string=utils.processExpressionString(expStringList))
    debug(None,
          method='_create_WAKE_FluidTexture',
          message=' Expression %s_foamTexture_ContainerSize built' %
          fluidShape,
          verbose=False)

    baseTextureName = wakeFluidShapeName.split('Shape')[0]
    cmds.rename(fluid[0], '%s' % baseTextureName)
    utils.createTypeTag(obj='%s' % baseTextureName,
                        typeName='%s' % baseTextureName)
    debug(None,
          method='_create_WAKE_FluidTexture',
          message=' Rename and Tag successful..',
          verbose=False)

    ## Connect new wake fluid tex to ocean
    cmds.connectAttr("%s.outAlpha" % wakeFluidShapeName,
                     "%s.waveHeightOffset" % oceanShader,
                     force=True)

    debug(None,
          method='_create_WAKE_FluidTexture',
          message=' Returning %s:' % fluidShape,
          verbose=False)
    return fluidShape
    sys.path.append('T:/software/lsapipeline/custom')
import maya_genericSettings as settings
import utils as utils
from debug import debug
import CONST as CONST
#reload(utils)
#reload(CONST)
#reload(settings)

## METALCORE / XML STUF
try:
    from mentalcore import mapi
    from mentalcore import mlib
except:
    debug(None,
          method='core_archive_lib',
          message='metalcore mapi and mlib failed to load!!',
          verbose=False)
    pass

import xml.etree.ElementTree as xml
from xml.etree import ElementTree
from xml.etree.ElementTree import Element, SubElement, Comment
from xml.dom import minidom
import tempfile
import gzip


def _convertOceanToPolys():
    oceanName = 'ocean_srf'
    dispName = '%s_displacement_geo' % oceanName
Exemple #34
0
def should_ignore(client):
    # Don't waste time on clients we'll never possibly tile
    if client in ignore:
        return True

    nm = ewmh.get_wm_name(client).reply()

    wm_class = icccm.get_wm_class(client).reply()
    if wm_class is not None:
        try:
            inst, cls = wm_class
            matchNames = set([inst.lower(), cls.lower()])

            if matchNames.intersection(config.ignore):
                debug('Ignoring %s because it is in the ignore list' % nm)
                return True

            if hasattr(config, 'tile_only') and config.tile_only:
                if not matchNames.intersection(config.tile_only):
                    debug('Ignoring %s because it is not in the tile_only '
                          'list' % nm)
                    return True
        except ValueError:
            pass

    if icccm.get_wm_transient_for(client).reply() is not None:
        debug('Ignoring %s because it is transient' % nm)
        ignore.append(client)
        return True

    wtype = ewmh.get_wm_window_type(client).reply()
    if wtype:
        for atom in wtype:
            aname = util.get_atom_name(atom)

            if aname in (
                    '_NET_WM_WINDOW_TYPE_DESKTOP', '_NET_WM_WINDOW_TYPE_DOCK',
                    '_NET_WM_WINDOW_TYPE_TOOLBAR', '_NET_WM_WINDOW_TYPE_MENU',
                    '_NET_WM_WINDOW_TYPE_UTILITY',
                    '_NET_WM_WINDOW_TYPE_SPLASH', '_NET_WM_WINDOW_TYPE_DIALOG',
                    '_NET_WM_WINDOW_TYPE_DROPDOWN_MENU',
                    '_NET_WM_WINDOW_TYPE_POPUP_MENU',
                    '_NET_WM_WINDOW_TYPE_TOOLTIP',
                    '_NET_WM_WINDOW_TYPE_NOTIFICATION',
                    '_NET_WM_WINDOW_TYPE_COMBO', '_NET_WM_WINDOW_TYPE_DND'):
                debug('Ignoring %s because it has type %s' % (nm, aname))
                ignore.append(client)
                return True

    wstate = ewmh.get_wm_state(client).reply()
    if wstate is None:
        debug('Ignoring %s because it does not have a state' % nm)
        return True

    for atom in wstate:
        aname = util.get_atom_name(atom)

        # For now, while I decide how to handle these guys
        if aname == '_NET_WM_STATE_STICKY':
            debug('Ignoring %s because it is sticky and they are weird' % nm)
            return True
        if aname in ('_NET_WM_STATE_SHADED', '_NET_WM_STATE_HIDDEN',
                     '_NET_WM_STATE_FULLSCREEN', '_NET_WM_STATE_MODAL'):
            debug('Ignoring %s because it has state %s' % (nm, aname))
            return True

    d = ewmh.get_wm_desktop(client).reply()
    if d == 0xffffffff:
        debug('Ignoring %s because it\'s on all desktops' \
              '(not implemented)' % nm)
        return True

    return False
Exemple #35
0
    def database(self, datas=None, type='storage', table_name=None):
        #type: storage | database
        config = configset.configset()
        config.configname = 'lfd.ini'
        dbname = config.read_config('DATABASE', 'name', value='lfd.db3')
        host = config.read_config('DATABASE', 'host', value='127.0.0.1')
        port = config.read_config('DATABASE', 'port', value='3306')
        dbtype = config.read_config('DATABASE', 'type', value='sqlite')
        username = config.read_config('DATABASE', 'username', value='root')
        password = config.read_config('DATABASE', 'password', value='')

        debug(dbname=dbname)
        debug(host=host)
        debug(port=port)
        debug(dbtype=dbtype)

        if dbtype == 'sqlite':
            try:
                # from sqlite3 import dbapi2 as sqlite
                import sqlite3 as sqlite
            except ImportError:
                sys.exit(
                    "You not have module \"pysqlite2\", please download before ! \n"
                )
            except:
                traceback.format_exc()
                sys.exit("ERROR by SYSTEM")

            SQL_CREATE = '''CREATE TABLE IF NOT EXISTS packages ( \
                       'id' INTEGER PRIMARY KEY  AUTOINCREMENT, \
                       'name' VARCHAR(255) NOT NULL, \
                       'description' VARCHAR(255) NOT NULL, \
                       'relpath' VARCHAR(255) NOT NULL);'''
            SQL_CREATE_REPO = '''CREATE TABLE IF NOT EXISTS localstorage ( \
                       'total' VARCHAR(255) NOT NULL, \
                       'packages' VARCHAR(255) NOT NULL);'''
            SQL_DROP = "DROP TABLE %s;" % (table_name)
            conn = sqlite.connect(dbname)
            cursor = conn.cursor()

            def create():
                cursor.execute(SQL_CREATE)
                conn.commit()
                cursor.execute(SQL_CREATE_REPO)
                conn.commit()

            def get(table_name):
                exc01 = cursor.execute('SELECT * FROM %s;' % (table_name))
                conn.commit()
                return exc01.fetchall()

            def insert():
                if datas:
                    SQL_INSERT = 'INSERT INTO packages (\'name\', \'description\', \'relpath\') VALUES("%s", "%s", "%s");' % (
                        datas[0], convert.convert(
                            datas[1]), convert.convert(datas[2]))
                    SQL_INSERT_LOCALSTORAGE = 'INSERT INTO localstorage (\'total\', \'packages\') VALUES("%s", "%s");' % (
                        datas[0], convert.convert(datas[1]))
                    try:
                        # print "SQL_INSERT =", SQL_INSERT
                        if type == 'storage':
                            cursor.execute(SQL_INSERT_LOCALSTORAGE)
                        elif type == 'database':
                            cursor.execute(SQL_INSERT)
                        conn.commit()
                    except:
                        if type == 'database':
                            SQL_INSERT = "INSERT INTO packages ('name', 'relpath') VALUES('%s', '%s');" % (
                                datas[0], convert.convert(datas[2]))
                        cursor.execute(SQL_INSERT)
                        conn.commit()

            def truncate(table_name):
                cursor.execute('DELETE FROM %s;' % (table_name))
                conn.commit()
                cursor.execute('VACUUM;')
                conn.commit()

            def drop():
                cursor.execute(SQL_DROP)
                conn.commit()

        elif dbtype == 'mysql':
            SQL_CREATE = '''CREATE TABLE IF NOT EXISTS packages ( \
                       `id` BIGINT(100) AUTO_INCREMENT NOT NULL PRIMARY KEY, \
                       `name` VARCHAR(255) NOT NULL, \
                       `description` VARCHAR(255) NOT NULL, \
                       `relpath` VARCHAR(255) NOT NULL)'''

            try:
                import MySQLdb
                conn = MySQLdb.connect(host, username, password, dbname, port)
                cursor = conn.cursor()
            except ImportError:
                sys.exit(
                    "You not have module \"MySQLdb\", please download before ! \n"
                )
            except:
                try:
                    conn = MySQLdb.connect(host, username, password, port=port)
                    cursor = conn.cursor()
                    cursor.execute(SQL_CREATE)
                    conn.commit()
                except:
                    traceback.format_exc()
                    sys.exit("ERROR by SYSTEM")
                if datas:
                    try:
                        SQL_INSERT = "INSERT INTO packages (`name`, `description`, `relpath`) VALUES(%s, %s, %s);" % (
                            datas[0], datas[1], datas[2])
                        cursor.execute(SQL_INSERT)
                        conn.commit()
                    except:
                        SQL_INSERT = "INSERT INTO packages (`name`, `description`, `relpath`) VALUES(%s, %s, %s);" % (
                            datas[0], ' ', datas[2])
                        cursor.execute(SQL_INSERT)
                        conn.commit()

        elif dbtype == 'oracle':
            SQL_CREATE = '''CREATE TABLE IF NOT EXISTS packages ( \
                       'id' BIGINT(100) GENERATED ALWAYS AS IDENTITY (START WITH 1 INCREMENT BY 1) NOT NULL PRIMARY KEY, \
                       'name' VARCHAR(255) NOT NULL, \
                       'description' VARCHAR(255) NOT NULL, \
                       'relpath' VARCHAR(255) NOT NULL)'''
            sys.exit("STILL DEVELOPMENT, PLEASE USE ANOTHER DATABASE TYPE")

        elif dbtype == 'postgres':
            SQL_CREATE = '''CREATE TABLE IF NOT EXISTS packages ( \
                       'id' BIGSERIAL NOT NULL PRIMARY KEY, \
                       'name' VARCHAR(255) NOT NULL, \
                       'description' VARCHAR(255) NOT NULL, \
                       'relpath' VARCHAR(255) NOT NULL)'''

            try:
                import psycopg2
                conn = psycopg2.connect(
                    "dbname=%s, user=%s, password=%s, host=%s, port=%s" %
                    (dbname, username, password, host, port))
                cursor = conn.cursor()
                cursor.execute(SQL_CREATE)
                conn.commit()
            except ImportError:
                sys.exit(
                    "You not have module \"Psycopg2\", please download before ! \n"
                )
            except:
                traceback.format_exc()
                sys.exit("ERROR by SYSTEM")
    def profile(self, profile):
        # read addon settings
        sVolume = ADDON.getSetting('volume')
        sPlayer = ADDON.getSetting('player')
        sVideo = ADDON.getSetting('video')
        sCec = ADDON.getSetting('profile' + profile + '_cec')

        # read settings from profile
        f = xbmcvfs.File(ADDON_PATH_DATA + 'profile' + profile + '.json', 'r')
        result = f.read()
        try:
            jsonResult = json.loads(result)
            f.close()
        except:
            debug.notify(
                ADDON_LANG(32104) + ' ' + profile + ' (' +
                sName[int(profile)] + ')')
            debug.error(
                '[LOAD JSON FROM FILE]: Error reading from profile - ' +
                str(profile))
            return False

        # settings needed quote for value
        quote_needed = [
            'audiooutput.audiodevice', 'audiooutput.passthroughdevice',
            'locale.audiolanguage', 'lookandfeel.soundskin'
        ]

        # set settings readed from profile file
        debug.notice('[RESTORING SETTING]: ' + sName[int(profile)])
        for setName, setValue in jsonResult.items():
            # skip setting that type is disable to changing
            if 'false' in sPlayer and setName.startswith('videoplayer'):
                continue
            if 'false' in sVideo and setName.startswith('videoscreen'):
                continue

            debug.debug('[RESTORING SETTING]: ' + setName + ': ' + setValue)
            # add quotes
            if setName in quote_needed:
                setValue = '"' + setValue + '"'
            # set setting
            if 'true' in sVolume and setName == 'volume':
                xbmc.executeJSONRPC(
                    '{"jsonrpc": "2.0", "method": "Application.SetVolume", "params": {"volume": '
                    + jsonResult['volume'] + '}, "id": 1}')
            else:
                xbmc.executeJSONRPC(
                    '{"jsonrpc": "2.0", "method": "Settings.SetSettingValue", "params": {"setting": "'
                    + setName + '", "value": ' + setValue.encode('utf-8') +
                    '}, "id": 1}')

        debug.notify(sName[int(profile)].decode('utf-8'))

        # write curent profile
        f = xbmcvfs.File(ADDON_PATH_DATA + 'profile', 'w')
        f.write(profile)
        f.close()

        # CEC
        if sCec != '' and int(sCec) > 0:
            debug.notice('[SENDING CEC COMMAND]: ' + cecCommands[int(sCec)])
            xbmc.executebuiltin(cecCommands[int(sCec)])
    def save(self):
        xbmc_version = int(xbmc.getInfoLabel('System.BuildVersion')[0:2])
        debug.debug('[XBMC VERSION]: ' + str(xbmc_version))

        enabledProfiles = self.getEnabledProfiles()
        ret = dialog.DIALOG().start('script-audio-profiles-menu.xml',
                                    labels={10071: ADDON_LANG(32100)},
                                    buttons=enabledProfiles[1],
                                    list=10070)
        if ret is None:
            return False
        else:
            button = enabledProfiles[0][ret]

        settingsToSave = {}

        if xbmc_version < 17:
            json_s = [
                # get all settings from System / Audio section
                '{"jsonrpc":"2.0","method":"Settings.GetSettings", "params":{"level": "expert", "filter":{"section":"system","category":"audiooutput"}},"id":1}',
                # get volume level
                '{"jsonrpc": "2.0", "method": "Application.GetProperties", "params": {"properties": ["volume"]}, "id": 1}',
                # get all settings from Video / Playback section
                '{"jsonrpc":"2.0","method":"Settings.GetSettings", "params":{"level": "expert", "filter":{"section":"videos","category":"videoplayer"}}, "id":1}',
                # get all settings from System / Video section
                '{"jsonrpc":"2.0","method":"Settings.GetSettings", "params":{"level": "expert", "filter":{"section":"system","category":"videoscreen"}}, "id":1}'
            ]
        else:
            json_s = [
                # get all settings from System / Audio section
                '{"jsonrpc":"2.0","method":"Settings.GetSettings", "params":{"level": "expert", "filter":{"section":"system","category":"audio"}},"id":1}',
                # get volume level
                '{"jsonrpc": "2.0", "method": "Application.GetProperties", "params": {"properties": ["volume"]}, "id": 1}',
                # get all settings from Video / Playback section
                '{"jsonrpc":"2.0","method":"Settings.GetSettings", "params":{"level": "expert", "filter":{"section":"player","category":"videoplayer"}}, "id":1}',
                # get all settings from System / Video section
                '{"jsonrpc":"2.0","method":"Settings.GetSettings", "params":{"level": "expert", "filter":{"section":"system","category":"display"}}, "id":1}'
            ]

        # send json requests
        for j in json_s:
            jsonGet = xbmc.executeJSONRPC(j)
            jsonGet = json.loads(unicode(jsonGet, 'utf-8'))
            debug.debug('[JSON]: ' + str(jsonGet))

            if 'result' in jsonGet:
                if 'settings' in jsonGet['result']:
                    for set in jsonGet['result']['settings']:
                        if 'value' in set.keys():

                            if set['value'] == True or set[
                                    'value'] == False:  # lowercase bolean values
                                settingsToSave[set['id']] = str(
                                    set['value']).lower()
                            else:
                                if type(set['value']) is int:
                                    settingsToSave[set['id']] = str(
                                        set['value'])
                                else:
                                    settingsToSave[set['id']] = str(
                                        set['value']).encode('utf-8')

                if 'volume' in jsonGet['result']:
                    settingsToSave['volume'] = str(jsonGet['result']['volume'])

        # prepare JSON string to save to file
        jsonToWrite = json.dumps(settingsToSave)

        # create dir in addon data if not exist
        if not xbmcvfs.exists(ADDON_PATH_DATA):
            xbmcvfs.mkdir(ADDON_PATH_DATA)

        # save profile file
        debug.notice('[SAVING SETTING]: ' + sName[button])
        f = xbmcvfs.File(ADDON_PATH_DATA + 'profile' + str(button) + '.json',
                         'w')
        result = f.write(jsonToWrite)
        f.close()

        debug.notify(
            ADDON_LANG(32102) + ' ' + str(button) + ' (' + sName[button] + ')')
Exemple #38
0
 def _register_publish(self, path, name, sg_task, publish_version, tank_type, comment, thumbnail_path, dependency_paths):
     """
     Helper method to register publish using the 
     specified publish info.
     """
     debug(None, method = '_register_publish', message = "path: %s" % path, verbose = False)
     debug(None, method = '_register_publish', message = "name: %s" % name, verbose = False)
     debug(None, method = '_register_publish', message = "sg_task: %s" % sg_task, verbose = False)
     debug(None, method = '_register_publish', message = "publish_version: %s" % publish_version, verbose = False)
     debug(None, method = '_register_publish', message = "tank_type: %s" % tank_type, verbose = False)
     debug(None, method = '_register_publish', message = "comment: %s" % comment, verbose = False)
     debug(None, method = '_register_publish', message = "thumbnail_path: %s" % thumbnail_path, verbose = False)
     debug(None, method = '_register_publish', message = "dependency_paths: %s" % dependency_paths, verbose = False)
     
     # construct args:
     args = {
     "tk": self.parent.tank,
     "context": self.parent.context,
     "comment": comment,
     "path": path,
     "name": name,
     "version_number": publish_version,
     "thumbnail_path": thumbnail_path,
     "task": sg_task,
     "dependency_paths": dependency_paths,
     "published_file_type":tank_type,
     }
     
     self.parent.log_debug("Register publish in shotgun: %s" % str(args))
     debug(None, method = '_register_publish', message = "args: %s" %  str(args), verbose = False)
     debug(None, method = '_register_publish', message = "Registering the publish now...", verbose = False)
     # register publish;
     sg_data = tank.util.register_publish(**args)
     
     return sg_data
    def execute(self, tasks, work_template, progress_cb, **kwargs):
        """
        Main hook entry point
        :tasks:         List of tasks to be pre-published.  Each task is be a 
                        dictionary containing the following keys:
                        {   
                            item:   Dictionary
                                    This is the item returned by the scan hook 
                                    {   
                                        name:           String
                                        description:    String
                                        type:           String
                                        other_params:   Dictionary
                                    }
                                   
                            output: Dictionary
                                    This is the output as defined in the configuration - the 
                                    primary output will always be named 'primary' 
                                    {
                                        name:             String
                                        publish_template: template
                                        tank_type:        String
                                    }
                        }
                        
        :work_template: template
                        This is the template defined in the config that
                        represents the current work file
               
        :progress_cb:   Function
                        A progress callback to log progress during pre-publish.  Call:
                        
                            progress_cb(percentage, msg)
                             
                        to report progress to the UI
                        
        :returns:       A list of any tasks that were found which have problems that
                        need to be reported in the UI.  Each item in the list should
                        be a dictionary containing the following keys:
                        {
                            task:   Dictionary
                                    This is the task that was passed into the hook and
                                    should not be modified
                                    {
                                        item:...
                                        output:...
                                    }
                                    
                            errors: List
                                    A list of error messages (strings) to report    
                        }
        """
        results = []

        # validate tasks:
        for task in tasks:
            item = task["item"]
            output = task["output"]
            errors = []

            # report progress:
            progress_cb(0, "Validating", task)
            if item["type"] == "fx_caches":
                debug(app=None,
                      method='shotFX_PrePublishHook',
                      message='fx_caches type found',
                      verbose=False)
                errors.extend(self._validate_item_for_publish(item))
            elif item["type"] == "nparticle_caches":
                debug(app=None,
                      method='shotFX_PrePublishHook',
                      message='nparticle_caches type found',
                      verbose=False)
                errors.extend(self._validate_item_for_publish(item))
            elif item["type"] == "fx_renders":
                debug(app=None,
                      method='shotFX_PrePublishHook',
                      message='fx_renders type found',
                      verbose=False)
                errors.extend(self._validate_item_for_publish(item))
            else:
                # don't know how to publish this output types!
                errors.append(
                    "Don't know how to publish this item! \nPlease contact your supervisor or lead..."
                    % output["name"])

            # if there is anything to report then add to result
            if len(errors) > 0:
                # add result:
                results.append({"task": task, "errors": errors})

            progress_cb(100)

        return results
Exemple #40
0
    def _do_maya_publish(self, task, work_template, comment, thumbnail_path, sg_task, progress_cb):
        """
        Publish the main Maya scene
        """
        import maya.cmds as cmds
        
        progress_cb(0.0, "Finding scene dependencies", task)
        dependencies = self._maya_find_additional_scene_dependencies()
        debug(None, method = '_do_maya_publish', message = 'dependencies: %s' % dependencies, verbose = False)
        ## Get scene path
        scene_path = os.path.abspath(cmds.file(query=True, sn= True))
        debug(None, method = '_do_maya_publish', message = 'scene_path: %s' % scene_path, verbose = False)
        
        ## Test if it's a valid scene path
        if not work_template.validate(scene_path):
            raise TankError("File '%s' is not a valid work path, unable to publish!" % scene_path)
        
        ## Use templates to convert to publish path:
        output = task["output"]
        fields = work_template.get_fields(scene_path)
        fields["TankType"] = output["tank_type"]
        
        ## Now update the name to be the actual assetName from shotgun and remove the _ for saving
        fields['name'] = fields['Shot'].replace('_', '')
    
        publish_template = output["publish_template"]
        publish_path = publish_template.apply_fields(fields)

        if os.path.exists(publish_path):
            debug(None, method = '_do_maya_publish', message = 'Publish path exists! Altering file v# now', verbose = False)
            ## If it already exists version up one. 
            ## We should never fail a publish just because a published asset already exists
            
            cmds.warning('Found existing publish_path: %s' % publish_path)
            cmds.warning('Adjusting publish_path now...')
            path = '\\'.join(publish_path.split('\\')[0:-1])
            getfiles = os.listdir(path)
            debug(None, method = '_do_maya_publish', message = 'path: %s' % path, verbose = False)
            
            if 'Keyboard' in getfiles:
                getfiles.remove('Keyboard')
            
            ## legacy check remove any ma files from the list as we're now publishing only to mb!
            for each in getfiles:
                if not each.endswith('mb'):
                    getfiles.remove(each)
            
            ## Now process the rest of the list..
            ## Get the max of the list
            highestVersFile = max(getfiles).split('.')[1].split('v')[-1]

            ## Update the fields with a new version number
            fields["version"]  = int(highestVersFile) + 1
            
            ## Apply the fields to the templates paths..
            publish_path = publish_template.apply_fields(fields)
            
            ## Output the new publish path to the scripteditor
            cmds.warning('NewPublishPath: %s' % publish_path)
            debug(None, method = '_do_maya_publish', message = 'NEW publish_path: %s' % publish_path, verbose = False)
            
        ## Save the scene so we have a valid os.rename path
        progress_cb(10.0, "Saving the current working scene")
        self.parent.log_debug("Saving the current working scene...")
        publish_name = self._get_publish_name(publish_path, publish_template, fields)
        if fields['version'] < 10:
            padding = '00'
        elif fields['version'] < 100:
            padding = '0'
        else:
            padding = ''
        cmds.file(rename = '%s.v%s%s' % (publish_name, padding, fields['version']))
        cmds.file(save=True, force= True)
        debug(None, method = '_do_maya_publish', message = 'Saved scene to %s.v%s%s' % (publish_name, padding, fields['version']), verbose = False)
        print 'Saved scene to %s.v%s%s' % (publish_name, padding, fields['version'])
        
        progress_cb(50.0, "Publishing the file to publish area")
        
        try:
            publish_folder = os.path.dirname(publish_path)
            self.parent.ensure_folder_exists(publish_folder)
            debug(None, method = '_do_maya_publish', message = 'publish_folder: %s' % publish_folder, verbose = False)
            
            getCurrentScenePath = os.path.abspath(cmds.file(query=True, sn= True))
            os.rename(getCurrentScenePath, publish_path)
            self.parent.log_debug("Publishing %s --> %s..." % (getCurrentScenePath, publish_path))
            debug(None, method = '_do_maya_publish', message = "Publishing %s --> %s..." % (getCurrentScenePath, publish_path), verbose = False)
            progress_cb(65.0, "Moved the publish")       
             
        except Exception, e:
            raise TankError("Failed to copy file: \n%s \nto\n%s\nError: %s" % (getCurrentScenePath, publish_path, e))
Exemple #41
0
    def processTemplates(self,
                         tk,
                         templateFile='',
                         id='',
                         shotNum='',
                         inprogressBar='',
                         type=''):
        """
        Used to fetch most recent publishes
        @param tk : tank instance
        @param templateFile: the tank template file specified in the shot_step.yml
        #param assetDict: dict in format assetName, assetParent
        @type templateFile: template
        """
        ## Now fetch all the template paths from shotgun
        getTemplatePaths = tk.paths_from_template(templateFile, {
            'Step': 'Light',
            'id': id,
            'Shot': shotNum
        })
        debug(app=self,
              method='processTemplates',
              message='getTemplatePaths:    %s' % getTemplatePaths,
              verbose=False)

        ## Now look for each assets template path:
        xmlFile = max(getTemplatePaths)
        debug(app=self,
              method='processTemplates',
              message='Max Version xmlFile.... %s' % xmlFile,
              verbose=False)

        ## Now if versions has stuff in it..
        if not xmlFile:
            debug(app=self,
                  method='processTemplates',
                  message='Can not find any xml files for %s' % shotNum,
                  verbose=False)
            pass
        else:
            debug(app=self,
                  method='processTemplates',
                  message='Valid Xml Path?: %s' %
                  os.path.isfile(xmlFile.replace(os.path.sep, "/")),
                  verbose=False)

            if os.path.isfile(xmlFile.replace(
                    os.path.sep, "/")):  ## is this a valid xml file!?
                if type == 'lighting':
                    inprogressBar.updateProgress(
                        percent=35, doingWhat='Rebuilding Lights XML...')
                    debug(app=self,
                          method='processTemplates',
                          message='Loading Lighting XML NOW...',
                          verbose=False)
                    read_light_xml.actionLightXML(
                        pathToXML=xmlFile.replace(os.path.sep, "/"))

                    debug(app=self,
                          method='processTemplates',
                          message='Lighting XML Load Complete...',
                          verbose=False)

                elif type == 'shaders':
                    inprogressBar.updateProgress(
                        percent=20, doingWhat='createAll shaders...')

                    shd.createAll(XMLPath=xmlFile.replace(os.path.sep, "/"),
                                  parentGrp='',
                                  Namespace='',
                                  Root='MaterialNodes')

                    inprogressBar.updateProgress(
                        percent=30, doingWhat='connectAll shaders...')

                    shd.connectAll(XMLPath=xmlFile.replace(os.path.sep, "/"),
                                   parentGrp='',
                                   Namespace='',
                                   Root='MaterialNodes')

                elif type == 'renderglobals':  ## this render globals
                    inprogressBar.updateProgress(
                        percent=45,
                        doingWhat=
                        'recreating renderglobals and render passes now.....')

                    readXML.readCoreData(
                        pathToXML=xmlFile.replace(os.path.sep, "/"))

                    inprogressBar.updateProgress(
                        percent=50,
                        doingWhat=
                        'renderglobals and render passes recreated.....')
                else:
                    pass
            else:
                debug(
                    app=self,
                    method='processTemplates',
                    message=
                    'FAILED Can not find a valid published xml file for %s ...'
                    % os.path.isfile(xmlFile.replace(os.path.sep, "/")),
                    verbose=False)
                pass
Exemple #42
0
class PrimaryPublishHook(Hook):
    """
    Single hook that implements publish of the primary task
    """    
    def execute(self, task, work_template, comment, thumbnail_path, sg_task, progress_cb, **kwargs):
        """
        Main hook entry point
        :task:          Primary task to be published.  This is a
                        dictionary containing the following keys:
                        {   
                            item:   Dictionary
                                    This is the item returned by the scan hook 
                                    {   
                                        name:           String
                                        description:    String
                                        type:           String
                                        other_params:   Dictionary
                                    }
                                   
                            output: Dictionary
                                    This is the output as defined in the configuration - the 
                                    primary output will always be named 'primary' 
                                    {
                                        name:             String
                                        publish_template: template
                                        tank_type:        String
                                    }
                        }
                        
        :work_template: template
                        This is the template defined in the config that
                        represents the current work file
               
        :comment:       String
                        The comment provided for the publish
                        
        :thumbnail:     Path string
                        The default thumbnail provided for the publish
                        
        :sg_task:       Dictionary (shotgun entity description)
                        The shotgun task to use for the publish    
                        
        :progress_cb:   Function
                        A progress callback to log progress during pre-publish.  Call:
                        
                            progress_cb(percentage, msg)
                             
                        to report progress to the UI
        
        :returns:       Path String
                        Hook should return the path of the primary publish so that it
                        can be passed as a dependency to all secondary publishes
        
                        Hook should raise a TankError if publish of the 
                        primary task fails
        """
        # get the engine name from the parent object (app/engine/etc.)
        engine_name = self.parent.engine.name
       
        # depending on engine:
        if engine_name == "tk-maya":
            return self._do_maya_publish(task, work_template, comment, thumbnail_path, sg_task, progress_cb)
        else:
            raise TankError("Unable to perform publish for unhandled engine %s \n Check with your TD that the config is loading the right primary publish hook." % engine_name)
       
    def _do_maya_publish(self, task, work_template, comment, thumbnail_path, sg_task, progress_cb):
        """
        Publish the main Maya scene
        """
        import maya.cmds as cmds
        
        progress_cb(0.0, "Finding scene dependencies", task)
        dependencies = self._maya_find_additional_scene_dependencies()
        debug(None, method = '_do_maya_publish', message = 'dependencies: %s' % dependencies, verbose = False)
        ## Get scene path
        scene_path = os.path.abspath(cmds.file(query=True, sn= True))
        debug(None, method = '_do_maya_publish', message = 'scene_path: %s' % scene_path, verbose = False)
        
        ## Test if it's a valid scene path
        if not work_template.validate(scene_path):
            raise TankError("File '%s' is not a valid work path, unable to publish!" % scene_path)
        
        ## Use templates to convert to publish path:
        output = task["output"]
        fields = work_template.get_fields(scene_path)
        fields["TankType"] = output["tank_type"]
        
        ## Now update the name to be the actual assetName from shotgun and remove the _ for saving
        fields['name'] = fields['Shot'].replace('_', '')
    
        publish_template = output["publish_template"]
        publish_path = publish_template.apply_fields(fields)

        if os.path.exists(publish_path):
            debug(None, method = '_do_maya_publish', message = 'Publish path exists! Altering file v# now', verbose = False)
            ## If it already exists version up one. 
            ## We should never fail a publish just because a published asset already exists
            
            cmds.warning('Found existing publish_path: %s' % publish_path)
            cmds.warning('Adjusting publish_path now...')
            path = '\\'.join(publish_path.split('\\')[0:-1])
            getfiles = os.listdir(path)
            debug(None, method = '_do_maya_publish', message = 'path: %s' % path, verbose = False)
            
            if 'Keyboard' in getfiles:
                getfiles.remove('Keyboard')
            
            ## legacy check remove any ma files from the list as we're now publishing only to mb!
            for each in getfiles:
                if not each.endswith('mb'):
                    getfiles.remove(each)
            
            ## Now process the rest of the list..
            ## Get the max of the list
            highestVersFile = max(getfiles).split('.')[1].split('v')[-1]

            ## Update the fields with a new version number
            fields["version"]  = int(highestVersFile) + 1
            
            ## Apply the fields to the templates paths..
            publish_path = publish_template.apply_fields(fields)
            
            ## Output the new publish path to the scripteditor
            cmds.warning('NewPublishPath: %s' % publish_path)
            debug(None, method = '_do_maya_publish', message = 'NEW publish_path: %s' % publish_path, verbose = False)
            
        ## Save the scene so we have a valid os.rename path
        progress_cb(10.0, "Saving the current working scene")
        self.parent.log_debug("Saving the current working scene...")
        publish_name = self._get_publish_name(publish_path, publish_template, fields)
        if fields['version'] < 10:
            padding = '00'
        elif fields['version'] < 100:
            padding = '0'
        else:
            padding = ''
        cmds.file(rename = '%s.v%s%s' % (publish_name, padding, fields['version']))
        cmds.file(save=True, force= True)
        debug(None, method = '_do_maya_publish', message = 'Saved scene to %s.v%s%s' % (publish_name, padding, fields['version']), verbose = False)
        print 'Saved scene to %s.v%s%s' % (publish_name, padding, fields['version'])
        
        progress_cb(50.0, "Publishing the file to publish area")
        
        try:
            publish_folder = os.path.dirname(publish_path)
            self.parent.ensure_folder_exists(publish_folder)
            debug(None, method = '_do_maya_publish', message = 'publish_folder: %s' % publish_folder, verbose = False)
            
            getCurrentScenePath = os.path.abspath(cmds.file(query=True, sn= True))
            os.rename(getCurrentScenePath, publish_path)
            self.parent.log_debug("Publishing %s --> %s..." % (getCurrentScenePath, publish_path))
            debug(None, method = '_do_maya_publish', message = "Publishing %s --> %s..." % (getCurrentScenePath, publish_path), verbose = False)
            progress_cb(65.0, "Moved the publish")       
             
        except Exception, e:
            raise TankError("Failed to copy file: \n%s \nto\n%s\nError: %s" % (getCurrentScenePath, publish_path, e))
        
        print
        debug(None, method = '_do_maya_publish', message = "publish_path: %s" % publish_path, verbose = False)
        debug(None, method = '_do_maya_publish', message = "publishName: %s_LAYOUT" % publish_name, verbose = False)
        debug(None, method = '_do_maya_publish', message = "sg_task: %s" % sg_task, verbose = False)
        debug(None, method = '_do_maya_publish', message = "publish_version: %s" % fields["version"], verbose = False)
        debug(None, method = '_do_maya_publish', message = "tank_type: %s" % output["tank_type"], verbose = False)
        debug(None, method = '_do_maya_publish', message = "comment: %s" % comment, verbose = False)
        debug(None, method = '_do_maya_publish', message = "thumbnail_path: %s" % thumbnail_path, verbose = False)
        debug(None, method = '_do_maya_publish', message = "dependencies: %s" % dependencies, verbose = False)
        
        # finally, register the publish:
        progress_cb(75.0, "Registering the publish")
        self._register_publish(publish_path, 
                               '%s_LAYOUT'%publish_name, 
                               sg_task, 
                               fields["version"], 
                               output["tank_type"],
                               comment,
                               thumbnail_path, 
                               dependencies)
        
        progress_cb(100)
        
        ## Now put it back to Ascii
        cmds.file(save=True, force=True, type = 'mayaAscii')
        
        return publish_path
Exemple #43
0
    def opened(self):
        '''Call callback on_opened'''

        debug('Connected to the data server :D')
        if 'on_opened' in dir(self):
            self.on_opened()
Exemple #44
0
    def run_app(self):
        """
        Callback from when the menu is clicked.
        """
        ## Tell the artist to be patient... eg not genY
        inprogressBar = pbui.ProgressBarUI(title='Rebuilding Lighting Scene:')
        inprogressBar.show()
        inprogressBar.updateProgress(percent=1,
                                     doingWhat='Processing scene info...')
        ## Instantiate the API
        tk = sgtk.sgtk_from_path("T:/software/bubblebathbay")
        debug(app=self,
              method='run_app',
              message='API instanced...\n%s' % tk,
              verbose=False)
        debug(app=self,
              method='run_app',
              message='Fetch Lighting Assets launched...',
              verbose=False)

        context = self.context  ## To get the step
        debug(app=self,
              method='run_app',
              message='Context Step...\n%s' % context.step['name'],
              verbose=False)
        if context.step['name'] != 'Light':
            inprogressBar.close()
            cmds.warning(
                "Current context is not a valid Lighting context. Please make sure you are under a valid shotgun Lighting context!"
            )

        scene_path = '%s' % os.path.abspath(cmds.file(query=True, sn=True))
        debug(app=self,
              method='run_app',
              message='scene_path... %s' % scene_path,
              verbose=False)

        ## Build an entity type to get some values from.
        entity = self.context.entity  ## returns {'type': 'Shot', 'name': 'ep100_sh010', 'id': 1166}
        debug(app=self,
              method='run_app',
              message='entity... %s' % entity,
              verbose=False)

        ## Filter for the matching ID for the shot
        sg_filters = [["id", "is", entity["id"]]]
        debug(app=self,
              method='run_app',
              message='sg_filters... %s' % sg_filters,
              verbose=False)

        ## Build an entity type to get some values from.
        sg_entity_type = self.context.entity["type"]  ## returns Shot
        debug(app=self,
              method='run_app',
              message='sg_entity_type...\n%s' % sg_entity_type,
              verbose=False)

        ## DATA
        ## NOTES SO HERE WE DON'T NEED TO CALL THE ASSETS FIELD FROM SHOTGUN
        ## WE CAN JUST GRAB THE LATEST PUBLISH FILE FROM EACH OF THE TEMPLATE STEPS
        inprogressBar.updateProgress(percent=3,
                                     doingWhat='Processing scene info...')
        shadersTemplate = tk.templates[self.get_setting(
            'maya_shot_SHD_XML_template')]
        debug(app=self,
              method='run_app',
              message='shadersTemplate...\n%s' % shadersTemplate,
              verbose=False)

        lightingTemplate = tk.templates[self.get_setting(
            'maya_shot_lighting_template')]
        debug(app=self,
              method='run_app',
              message='lightingTemplate...\n%s' % lightingTemplate,
              verbose=False)

        renderglobalsTemplate = tk.templates[self.get_setting(
            'maya_shot_renderglobals_template')]
        debug(app=self,
              method='run_app',
              message='renderglobalsTemplate...\n%s' % lightingTemplate,
              verbose=False)

        ## PROCESS TEMPLATE NOW
        inprogressBar.updateProgress(percent=5,
                                     doingWhat='Processing shaders xml...')
        debug(app=self,
              method='run_app',
              message='Processing template... %s' % shadersTemplate,
              verbose=False)

        ## SHADERS
        self.processTemplates(tk=tk,
                              templateFile=shadersTemplate,
                              id=entity["id"],
                              shotNum=entity["name"],
                              inprogressBar=inprogressBar,
                              type='shaders')
        ## Attach ocean shader now...
        shd.oceanAttach(self)

        ## LIGHTS
        inprogressBar.updateProgress(percent=32,
                                     doingWhat='Processing lights xml...')
        self.processTemplates(tk=tk,
                              templateFile=lightingTemplate,
                              id=entity["id"],
                              shotNum=entity["name"],
                              inprogressBar=inprogressBar,
                              type='lighting')

        ## Render globals
        inprogressBar.updateProgress(
            percent=40, doingWhat='Processing renderglobals xml...')
        self.processTemplates(tk=tk,
                              templateFile=renderglobalsTemplate,
                              id=entity["id"],
                              shotNum=entity["name"],
                              inprogressBar=inprogressBar,
                              type='renderglobals')

        ## Attach subDiv
        settings.attachMentalRaySubDiv()

        ## Now cleanup
        inprogressBar.updateProgress(percent=90, doingWhat='Cleaning...')
        cleanup.shotCleanupLights()

        inprogressBar.close()
        inprogressBar = None
        ############################################
        ## CORE ACHIVES
        ## Now process the assembly References
        debug(app=self,
              method='run_app',
              message='Processing mentalCore assemblies..',
              verbose=False)
        inprogressBar = pbui.ProgressBarUI(title='Rebuilding Core Archives:')
        inprogressBar.show()
        inprogressBar.updateProgress(percent=0,
                                     doingWhat='Processing core archives...')
        if cmds.objExists('CORE_ARCHIVES_hrc') or cmds.objExists(
                'CORE_ARCHIVES_hrc'):
            inprogressBar.updateProgress(percent=100, doingWhat='Complete...')
            inprogressBar.close()
            inprogressBar = None
        else:
            ## Get the assembly paths from the transforms in the scene with the correct tags to load now..
            self.getAssemblyPaths = coreLib.getCorePaths()
            debug(app=self,
                  method='run_app',
                  message='self.getAssemblyPaths.. %s' % self.getAssemblyPaths,
                  verbose=False)

            if self.getAssemblyPaths:
                ## Now load the assemblies from the paths
                coreLib.loadCoreArchives(paths=self.getAssemblyPaths)
                debug(
                    app=self,
                    method='run_app',
                    message=
                    'self.loadCoreArchives Successful all assemblies loaded moving on to reconnect now...',
                    verbose=False)
                inprogressBar.updateProgress(
                    percent=40, doingWhat='Core archives loaded...')

                ## Now connect the assemblies.
                inprogressBar.updateProgress(
                    percent=60, doingWhat='Reconnecting core archives...')
                coreLib.doReconnect()
                debug(
                    app=self,
                    method='run_app',
                    message=
                    'Ahh core archive assemblies reconnected successfully!!...',
                    verbose=False)

                cleanup.shotCleanupPlacements()

                cmds.group([
                    'CORE_ARCHIVES_hrc', 'REBUILT_CORE_ARCHIVES_hrc',
                    'placements_hrc', 'LIGHTS_hrc'
                ],
                           n='LIGHTING_hrc',
                           em=False)
            else:
                pass

            inprogressBar.updateProgress(percent=100, doingWhat='COMPLETE...')
            inprogressBar.close()
            inprogressBar = None
Exemple #45
0
    def _publish_ocean_for_item(self, item, output, work_template, primary_publish_path, sg_task, comment, thumbnail_path, progress_cb):
        """
        Export an xml file for the specified item and publish it
        to Shotgun.
        """
        debug(app = None, method = '_publish_ocean_for_item', message = 'Processing item: %s' % item['name'], verbose = False)
        
        group_name = '%s' % ''.join(item["name"].strip("|").split('_hrc')[0].split('_'))
        debug(app = None, method = '_publish_ocean_for_item', message = 'group_name: %s' % group_name, verbose = False)

        tank_type = output["tank_type"]
        debug(app = None, method = '_publish_ocean_for_item', message = 'tank_type: %s' % tank_type, verbose = False)
        
        publish_template = output["publish_template"]
        debug(app = None, method = '_publish_ocean_for_item', message = 'publish_template: %s' % publish_template, verbose = False)

        # get the current scene path and extract fields from it
        # using the work template:
        scene_path = os.path.abspath(cmds.file(query=True, sn= True))
        fields = work_template.get_fields(scene_path)
        publish_version = fields["version"]

        # update fields with the group name:
        fields["grp_name"] = group_name

        ## create the publish path by applying the fields 
        ## with the publish template:
        publish_path = publish_template.apply_fields(fields)
        debug(app = None, method = '_publish_ocean_for_item', message = 'FINAL publish_path: %s' % publish_path, verbose = False)

        ## Make the directory now...
        if not os.path.isdir(os.path.dirname(publish_path)):
            debug(app = None, method = '_publish_ocean_for_item', message = 'Building dir: %s' % os.path.dirname(publish_path), verbose = False)
            os.mkdir(os.path.dirname(publish_path))

        try:
            self.parent.log_debug("Executing command: OCEAN EXPORT!")
            print '====================='
            print 'Exporting the ocean and fluids %s' % publish_path
            
            ## Now export
            cmds.select(item['name'], r = True)
            cmds.file(publish_path, force = True, options =  "v=0;", typ = "mayaAscii", es = True)
            
            ## Now register publish with shotgun
            self._register_publish(publish_path, 
                                  group_name, 
                                  sg_task, 
                                  publish_version, 
                                  tank_type,
                                  comment,
                                  thumbnail_path, 
                                  [primary_publish_path])
            print 'Finished ocean and fluids export...'
            print '====================='
        except Exception, e:
            raise TankError("Failed to export %s" % group_name)
Exemple #46
0
    def closed(self, code, reason):
        '''Call callback on_closed.'''

        debug(("Closed down :(", code, reason))
        if 'on_closed' in dir(self):
            self.on_closed(self.data)
Exemple #47
0
    def execute(self, tasks, work_template, comment, thumbnail_path, sg_task, primary_publish_path, progress_cb, **kwargs):
        """
        Main hook entry point
        :tasks:         List of secondary tasks to be published.  Each task is a 
                        dictionary containing the following keys:
                        {
                            item:   Dictionary
                                    This is the item returned by the scan hook 
                                    {   
                                        name:           String
                                        description:    String
                                        type:           String
                                        other_params:   Dictionary
                                    }

                            output: Dictionary
                                    This is the output as defined in the configuration - the 
                                    primary output will always be named 'primary' 
                                    {
                                        name:             String
                                        publish_template: template
                                        tank_type:        String
                                    }
                        }

        :work_template: template
                        This is the template defined in the config that
                        represents the current work file

        :comment:       String
                        The comment provided for the publish

        :thumbnail:     Path string
                        The default thumbnail provided for the publish

        :sg_task:       Dictionary (shotgun entity description)
                        The shotgun task to use for the publish    

        :primary_publish_path: Path string
                        This is the path of the primary published file as returned
                        by the primary publish hook

        :progress_cb:   Function
                        A progress callback to log progress during pre-publish.  Call:

                            progress_cb(percentage, msg)

                        to report progress to the UI

        :returns:       A list of any tasks that had problems that need to be reported 
                        in the UI.  Each item in the list should be a dictionary containing 
                        the following keys:
                        {
                            task:   Dictionary
                                    This is the task that was passed into the hook and
                                    should not be modified
                                    {
                                        item:...
                                        output:...
                                    }

                            errors: List
                                    A list of error messages (strings) to report    
                        }
        """""
        results = []
        errors = []
        ## PROCESS STUFF BEFORE DOWNGRADING
        shadingDone = False
        for task in tasks:
            debug(app = None, method = 'lightingSecPublish.execute', message = 'task: %s' % task, verbose = False)
            item = task["item"]
            debug(app = None, method = 'lightingSecPublish.execute', message = 'item: %s' % item, verbose = False)
            output = task["output"]
            # report progress:
            
            ### SHD XML
            if output["name"] == 'shd_xml':
                progress_cb(0, "Publishing SHD xml now...")
                # type: mesh_grp
                ## Because shading exports from the shaders and not the actual groups we can just run this step ONCE!
                ## If we do this for every item we're wasting serious time outputting the same thing over and over.
                if not shadingDone: 
                    try:
                        debug(app = None, method = 'lightingSecPublish.execute', message = 'item: %s' % item, verbose = False)
                        self._publish_shading_xml_for_item(item, output, work_template, primary_publish_path, sg_task, comment, thumbnail_path, progress_cb)
                        ## Now fix the fileNodes back to a work folder structure not the publish folder structure
                        self.repathFileNodesForWork()
                        shadingDone =  True
                        debug(app = None, method = 'lightingSecPublish.execute', message = 'shadingDone: %s' % shadingDone, verbose = False)
                    except Exception, e:
                        errors.append("Publish failed - %s" % e)
                else:
                    pass
            
            ### LIGHTS XML
            elif output["name"] == 'lights_xml':
                progress_cb(0, "Publishing Light xml now...")
                # type: light_grp
                ## Because we have only found in the scan scene just the LIGHTS_hrc group there should only be one light item to process...
                try:
                    
                    self._publish_lighting_xml_for_item(item, output, work_template, primary_publish_path, sg_task, comment, thumbnail_path, progress_cb)
                    
                except Exception, e:
                    errors.append("Publish failed - %s" % e)
Exemple #48
0
    def submitFinalToDeadline(self, item, output, work_template, primary_publish_path, sg_task, comment, thumbnail_path, progress_cb):
        debug(app = None, method = 'submitPreviewToDeadline', message = 'Entered submitPreviewToDeadline' , verbose = False)
        render_name = '%s_%s' % ('_'.join( self._getMostRecentPublish()[0].split('.')[:-1] ), item['name'])
        debug(app = None, method = 'submitPreviewToDeadline', message = 'render_name: %s' % render_name, verbose = False)

        tank_type = output["tank_type"]
        debug(app = None, method = 'submitPreviewToDeadline', message = 'tank_type: %s' % tank_type, verbose = False)
        
        publish_template = output["publish_template"]
        debug(app = None, method = 'submitPreviewToDeadline', message = 'publish_template: %s' % publish_template, verbose = False)

        # get the current scene path and extract fields from it
        # using the work template:
        scene_path = os.path.abspath(cmds.file(query=True, sn= True))
        fields = work_template.get_fields(scene_path)
        publish_version = fields["version"]

        # update fields with the group name:
        fields["grp_name"] = render_name

        ## BUILD THE Job Info File for deadline
        ##deadlinecommand.exe [Job Info File] [Plug-in Info File]
        publish_path = publish_template.apply_fields(fields)
        debug(app = None, method = 'submitPreviewToDeadline', message = 'FINAL publish_path: %s' % publish_path, verbose = False)
        
        ## define log paths etc
        logTo = r'%s/deadlinebg_output' % tempfile.gettempdir().replace('\\', '/')
        jobInfoPath = r'%s/maya_job_info.job' % tempfile.gettempdir().replace('\\', '/')
        pluginInfoPath = r'%s/maya_plugin_info.job' % tempfile.gettempdir().replace('\\', '/')
        debug(app = None, method = 'submitPreviewToDeadline', message = 'logTo: %s' % logTo, verbose = False)
        debug(app = None, method = 'submitPreviewToDeadline', message = 'jobInfoPath: %s' % jobInfoPath, verbose = False)
        debug(app = None, method = 'submitPreviewToDeadline', message = 'pluginInfoPath: %s' % pluginInfoPath, verbose = False)
        
        ## Job Info stuff
        jobname = '.'.join( self._getMostRecentPublish()[0].split('.')[:-1] )
        debug(app = None, method = 'submitPreviewToDeadline', message = 'jobname: %s' % jobname, verbose = False)
        projectPath  = cmds.workspace(query = True, fullName = True)
        debug(app = None, method = 'submitPreviewToDeadline', message = 'projectPath: %s' % projectPath, verbose = False)
        outputFilePath  = publish_path
        debug(app = None, method = 'submitPreviewToDeadline', message = 'outputFilePath: %s' % outputFilePath, verbose = False)
        ## check and build output path if it doesn't exist
        if not os.path.isdir(outputFilePath):
            os.mkdir(outputFilePath)
        sceneFilePath = os.path.join(self._getMostRecentPublish()[1], self._getMostRecentPublish()[0]).replace('\\', '/')
        debug(app = None, method = 'submitPreviewToDeadline', message = 'sceneFilePath: %s' % sceneFilePath, verbose = False)
        ouputFileName = '_'.join((self._getMostRecentPublish()[0].split('.')))
        debug(app = None, method = 'submitPreviewToDeadline', message = 'ouputFileName: %s' % ouputFileName, verbose = False)
        comment  = comment
        debug(app = None, method = 'submitPreviewToDeadline', message = 'comment: %s' % comment, verbose = False)
        version = '2013.5'
        debug(app = None, method = 'submitPreviewToDeadline', message = 'version: %s' % version, verbose = False)
        pool = 'maya'
        debug(app = None, method = 'submitPreviewToDeadline', message = 'pool: %s' % pool, verbose = False)
        machineLimit = 5
        debug(app = None, method = 'submitPreviewToDeadline', message = 'machineLimit: %s' % machineLimit, verbose = False)
        priority = 100
        debug(app = None, method = 'submitPreviewToDeadline', message = 'priority: %s' % priority, verbose = False)
        taskTimeoutMinutes = 240
        debug(app = None, method = 'submitPreviewToDeadline', message = 'taskTimeoutMinutes: %s' % taskTimeoutMinutes, verbose = False)
        minRenderTimeMinutes = 0
        debug(app = None, method = 'submitPreviewToDeadline', message = 'minRenderTimeMinutes: %s' % minRenderTimeMinutes, verbose = False)
        concurrentTasks = 1
        debug(app = None, method = 'submitPreviewToDeadline', message = 'concurrentTasks: %s' % concurrentTasks, verbose = False)
        department = getuser()
        debug(app = None, method = 'submitPreviewToDeadline', message = 'department: %s' % department, verbose = False)
        limitGroups = 0
        debug(app = None, method = 'submitPreviewToDeadline', message = 'limitGroups: %s' % limitGroups, verbose = False)
        renderer = 'MentalRay'
        debug(app = None, method = 'submitPreviewToDeadline', message = 'renderer: %s' % renderer, verbose = False)
        autoMemoryLimit = 1
        debug(app = None, method = 'submitPreviewToDeadline', message = 'autoMemoryLimit: %s' % autoMemoryLimit, verbose = False)
        memoryLimit = 1
        debug(app = None, method = 'submitPreviewToDeadline', message = 'memoryLimit: %s' % memoryLimit, verbose = False)
        camera = self._getShotCam()
        debug(app = None, method = 'submitPreviewToDeadline', message = 'camera: %s' % camera, verbose = False)
        startFrame = cmds.playbackOptions(query = True, animationStartTime = True)
        endFrame = cmds.playbackOptions(query = True, animationEndTime = True)

        ## Process the data into the right formats
        submitString = [
                        'Plugin=MayaBatch',
                        'Name=%s' % jobname,
                        'Comment=%s' % comment,
                        'Department=%s' % department,
                        'Pool=%s' % pool,
                        'Group=none',
                        'Priority=%s' % priority,
                        'TaskTimeoutMinutes=%s' % taskTimeoutMinutes,
                        'EnableAutoTimeout=False',
                        'ConcurrentTasks=%s' % concurrentTasks,
                        'LimitConcurrentTasksToNumberOfCpus=True',
                        'MachineLimit=%s' % machineLimit,
                        'Whitelist=',
                        'LimitGroups=',
                        'JobDependencies=',
                        'OnJobComplete=Nothing',
                        'Frames=%s-%s' % (int(startFrame), int(endFrame)),
                        'ChunkSize=1',
                        'OutputDirectory0=%s/' % outputFilePath,
                        ]
                        
        ## write to file
        jobInfoFile = open(jobInfoPath, "w")
        for eachLine in submitString:
            jobInfoFile.write('%s\n' % eachLine)
        jobInfoFile.close()
        debug(app = None, method = 'submitFinalToDeadline', message = 'Wrote jobInfoFile successfully...', verbose = False)
        
        ### Plugin Info File
        _MAYA_PLUGIN_INFO_ATTRS =   [
                                    'SceneFile=%s' %(str(cmds.file(q = 1, sceneName = 1)).replace('\\', '/')),
                                    'Version=%s' % version,
                                    'Build=64bit',
                                    'ProjectPath=//192.168.5.253/BBB_main/bbb',
                                    'StrictErrorChecking=False',
                                    'LocalRendering=True',
                                    'MaxProcessors=0',
                                    'OutputFilePath=%s/' % outputFilePath,
                                    'Renderer=%s' % renderer,
                                    'MentalRayVerbose=Progress Messages',
                                    'AutoMemoryLimit=True',
                                    'MemoryLimit=0',
                                    'CommandLineOptions=',
                                    'UseOnlyCommandLineOptions=0',
                                    'IgnoreError211=False',
                                    'Camera=%s' % [cam for cam in cmds.ls(type = 'camera')  if 'shotCam_bake' in cam][0].replace('Shape', ''),
                                    ]

                                    
        ## write to file
        pluginInfoFile = open(pluginInfoPath, "w")
        for eachLine in _MAYA_PLUGIN_INFO_ATTRS:
            pluginInfoFile.write('%s\n' % eachLine)
        pluginInfoFile.close()
        debug(app = None, method = 'submitFinalToDeadline', message = 'Wrote pluginInfoFile successfully...', verbose = False)
        
        try:
            self.parent.log_debug("Executing command: RENDER FINAL!")
            print '====================='
            print 'Submitting to deadlines %s' % publish_path

            subprocess.call( 'Deadlinecommand.exe %s %s %s' % (jobInfoPath, pluginInfoPath, sceneFilePath) )

            ## Now register publish with shotgun
            self._register_publish(publish_path,
                                  render_name,
                                  sg_task,
                                  publish_version,
                                  tank_type,
                                  comment,
                                  thumbnail_path,
                                  [primary_publish_path])

            print 'Finished submitting render preview to deadline.....'
            print '====================='
        except Exception, e:
            raise TankError("Failed to export %s" % render_name)
Exemple #49
0
             self._publish_ocean_for_item(item, output, work_template, primary_publish_path, sg_task, comment, thumbnail_path, progress_cb)
         except Exception, e:
             errors.append("Publish failed - %s" % e)
     # elif output["name"] == 'renderPreview':
     #     progress_cb(0, "Publishing Render Preview to Deadline now...")
     #     ## Export the renderPreview  found to submit to deadline
     #     try:
     #         debug(app = None, method = 'lightingSecPublish.execute.renderPreview', message = 'item: %s' % item, verbose = False)
     #         self.submitPreviewToDeadline(item, output, work_template, primary_publish_path, sg_task, comment, thumbnail_path, progress_cb)
     #     except Exception, e:
     #         errors.append("Publish failed - %s" % e)
     elif output["name"] == 'renderFinal':
         progress_cb(0, "Publishing Render Final to Deadline now...")
         ## Export the renderFinal found to submit to deadline
         try:
             debug(app = None, method = 'lightingSecPublish.execute.renderFinal', message = 'item: %s' % item, verbose = False)
             self.submitFinalToDeadline(item, output, work_template, primary_publish_path, sg_task, comment, thumbnail_path, progress_cb)
         except Exception, e:
             errors.append("Publish failed - %s" % e)
     elif output["name"] == 'renderglobals_xml':
         progress_cb(0, "Publishing renderglobals_xml now...")
         ## Export the renderglobals_xml
         try:
             debug(app = None, method = 'lightingSecPublish.execute.renderglobals_xml', message = 'item: %s' % item, verbose = False)
             self._publish_renderglobals_xml_for_item(item, output, work_template, primary_publish_path, sg_task, comment, thumbnail_path, progress_cb)
         except Exception, e:
             errors.append("Publish failed - %s" % e)
     else:
         # don't know how to publish this output types!
         errors.append("Don't know how to publish this item!")
 
Exemple #50
0
    def _publish_nukeCamera_for_item(self, item, output, work_template, primary_publish_path, sg_task, comment, thumbnail_path, progress_cb):
        """
        Export an xml file for the specified item and publish it to Shotgun.
        """        
        debug(app = None, method = '_publish_nukeCamera_for_item', message = 'item["name"]: %s' % item["name"], verbose = False)
        
        tank_type = output["tank_type"]
        debug(app = None, method = '_publish_nukeCamera_for_item', message = 'tank_type: %s' % tank_type, verbose = False)
        
        publish_template = output["publish_template"]
        debug(app = None, method = '_publish_nukeCamera_for_item', message = 'publish_template: %s' % publish_template, verbose = False)

        # get the current scene path and extract fields from it
        # using the work template:
        scene_path = os.path.abspath(cmds.file(query=True, sn= True))
        fields = work_template.get_fields(scene_path)
        publish_version = fields["version"]
        
        ## create the publish path by applying the fields 
        ## with the publish template:            
        try:
            print '====================='
            print 'Exporting the nukeCamera'
            startFrame = cmds.playbackOptions(query =True, animationStartTime = True) 
            debug(app = None, method = '_publish_nukeCamera_for_item', message = 'startFrame: %s' % startFrame, verbose = False)

            endFrame = cmds.playbackOptions(query =True, animationEndTime= True)
            debug(app = None, method = '_publish_nukeCamera_for_item', message = 'endFrame: %s' % endFrame, verbose = False)
            
            cleanup.turnOffModelEditors()
            
            shotCams = []
            for eachCamera in cmds.listRelatives(item["name"], children = True):
                if cmds.getAttr('%s.type' % eachCamera) == 'shotCam':
                    debug(app = None, method = '_publish_nukeCamera_for_item', message = 'eachCamera: %s' % eachCamera, verbose = False)
                    shotCams.extend([eachCamera])
            debug(app = None, method = '_publish_nukeCamera_for_item', message = 'shotCams: %s' % shotCams, verbose = False)
            
            debug(app = None, method = '_publish_nukeCamera_for_item', message = 'len(shotCams): %s' % len(shotCams), verbose = False)
            group_name = ''
            if len(shotCams) == 1:
                # update fields with the group name:
                group_name = '%s_NUKECAM' % shotCams[0]
                fields["grp_name"] = group_name
                debug(app = None, method = '_publish_nukeCamera_for_item', message = 'grp_name: %s' % group_name, verbose = False)
                
                fields["cam_name"] = shotCams[0]
                debug(app = None, method = '_publish_nukeCamera_for_item', message = 'cam_name: %s' % shotCams[0], verbose = False)
    
                publish_path = publish_template.apply_fields(fields)                 
                debug(app = None, method = '_publish_nukeCamera_for_item', message = 'FINAL publish_path: %s' % publish_path, verbose = False)
                
                ## Make the directory now...
                if not os.path.isdir(os.path.dirname(publish_path)):
                    debug(app = None, method = '_publish_nukeCamera_for_item', message = 'Building dir: %s' % os.path.dirname(publish_path), verbose = False)
                    os.mkdir(os.path.dirname(publish_path))

                frame_start = cmds.playbackOptions(query = True, animationStartTime = True)
                frame_end = cmds.playbackOptions(query = True, animationEndTime = True)
    
                cmds.select(shotCams[0], r = True)
                #Switching to alembic output for camera.
                rootList = ''
                for eachRoot in cmds.ls(sl= True):
                    rootList = '-root %s %s' % (str(cmds.ls(eachRoot, l = True)[0]), rootList)
                
                debug(app = None, method = '_publish_nukeCamera_for_item', message = 'rootList: %s' % rootList, verbose = False)
                abc_export_cmd = "preRollStartFrame -15 -ro -attr SubDivisionMesh -attr smoothed -attr mcAssArchive -wholeFrameGeo -worldSpace -writeVisibility -uvWrite -fr %d %d %s -file %s" % (frame_start, frame_end, rootList, publish_path)
                cmds.AbcExport(verbose = False, j = abc_export_cmd)
                ##fm2n.FromMaya2Nuke(exportPath = os.path.dirname(publish_path), nukePath = 'C:\\"Program Files\"\Nuke7.0v6\\', nukeExec = 'Nuke7.0.exe', scriptName = '%s' % shotCams[0], startFrame = startFrame, endFrame = endFrame, camera = shotCams[0])
                #fm2n.FromMaya2Nuke(exportPath = os.path.dirname(publish_path), nukePath = '', nukeExec = '', scriptName = '%s' % shotCams[0], startFrame = startFrame, endFrame = endFrame)
                debug(app = None, method = '_publish_nukeCamera_for_item', message = 'Export Complete...', verbose = False)

                ## Now register publish with shotgun
                self._register_publish(publish_path,
                                      group_name,
                                      sg_task,
                                      publish_version, 
                                      tank_type,
                                      comment,
                                      thumbnail_path,
                                      [primary_publish_path])
                debug(app = None, method = '_publish_nukeCamera_for_item', message = '_register_publish complete for %s...' % shotCams[0], verbose = False)

                print 'Finished camera export for %s...' % shotCams[0]
                print '====================='
                cleanup.turnOnModelEditors()
            else:
                cmds.warning('Found more than one shotCam, using the first in the list only!!')
                pass
        except Exception, e:
            raise TankError("Failed to export NukeCamera")
Exemple #51
0
 def _write_menu_lst(self, data, path):
     debug.verbose('writing %s' % path)
     debug.debug(data)
     f = open(path, 'w')
     f.write(data)
     f.close()
Exemple #52
0
    def _publish_renderglobals_xml_for_item(self, item, output, work_template, primary_publish_path, sg_task, comment, thumbnail_path, progress_cb):
        """
        Export an xml file for the specified item and publish it
        to Shotgun.
        """
        group_name = '%s_LIGHTING_RENDERGLOBALS_XML' % ''.join(item["name"].strip("|").split('_hrc')[0].split('_'))
        debug(app = None, method = '_publish_renderglobals_xml_for_item', message = 'group_name: %s' % group_name, verbose = False)
        
        tank_type = output["tank_type"]
        debug(app = None, method = '_publish_renderglobals_xml_for_item', message = 'tank_type: %s' % tank_type, verbose = False)

        publish_template = output["publish_template"]
        debug(app = None, method = '_publish_renderglobals_xml_for_item', message = 'publish_template: %s' % publish_template, verbose = False)

        # get the current scene path and extract fields from it
        # using the work template:
        scene_path = os.path.abspath(cmds.file(query=True, sn= True))
        fields = work_template.get_fields(scene_path)
        publish_version = fields["version"]

        # update fields with the group name:
        fields["grp_name"] = group_name

        ## create the publish path by applying the fields 
        ## with the publish template:
        publish_path = publish_template.apply_fields(fields)
        debug(app = None, method = '_publish_renderglobals_xml_for_item', message = 'FINAL publish_path: %s' % publish_path, verbose = False)

        try:
            self.parent.log_debug("Executing command: SHADING XML EXPORT PREP!")
            print '====================='
            print 'Exporting the renderglobals xml %s' % publish_path
            
            if not os.path.isdir(os.path.dirname(publish_path)):
                debug(app = None, method = '_publish_renderglobals_xml_for_item', message = 'PATH NOT FOUND.. MAKING DIRS NOW...', verbose = False)
                os.makedirs(os.path.dirname(publish_path))
                
            ## Now write to xml
            debug(app = None, method = '_publish_renderglobals_xml_for_item', message = 'writeXML now...', verbose = False)
            writeXML.writeRenderGlobalData(pathToXML = publish_path)
            
            self._register_publish(publish_path, 
                                  group_name, 
                                  sg_task, 
                                  publish_version, 
                                  tank_type,
                                  comment,
                                  thumbnail_path, 
                                  [primary_publish_path])
            print 'Finished xml export...'
            print '====================='
        except Exception, e:
            raise TankError("Failed to export xml")
Exemple #53
0
 def run(self):
     debug("News: UpdateThread")
     # download cannot contain any tkinter changes
     self.widget.download()
			self.send("atz")   # initialize
			time.sleep(1)
			self.ELMver = self.get()

			if self.ELMver is None :
				self.error("ELMver did not return")
				return
			
			debug("atz response: " + self.ELMver)
		
		except Exception, inst: 
			self.error(inst)
			return

		self.send("ate0")  # echo off
		debug("ate0 response: " + self.get())
		debug("Connected to ECU")
		self.state  = State.Connected

	def error(self, msg=None):
		""" called when connection error has been encountered """
		debug("Connection Error:", True)

		if msg is not None: debug(msg, True);
		if self.port is not None: self.port.close();
		
		self.state = State.Unconnected

	def get_port_name(self):
		return str(self.port) if (self.port is not None) else "No Port"
Exemple #55
0
 def initiate_paxos(self, v):
     debug('PAXOS NODE id={}: Initiating Paxos algorithm! Default v={}'.
           format(self.id, v))
     self.P_propose(v)
 def resetBox(self, strPathBoxName):
     "reset the sub tree of one box"
     debug.debug("INF: choregraphetools.FrameNumber.resetBox( '%s' )" %
                 strPathBoxName)
     self.animations_FrameNumber[strPathBoxName] = 0
 def __init__(self, amentry):
     self.amentry_id = AddActiveMissionEntry(amentry)
     debug.debug("New Mission entry: %s (%s)" %
                 (self.amentry_id, amentry.get('MISSION_SHORTDESC', '')))
Exemple #58
0
    def P_rx_prepare_response(self, d):

        # Phase 2a. If the proposer receives a response to its prepare requests
        # (numbered n) from a majority of acceptors, then it sends an accept
        # request to each of those acceptors for a proposal numbered n with
        # a value v, where v is the value of the highest-numbered proposal
        # among the responses, or is any value if the responses reported no proposals.

        debug('P{}: rxd prepresp from {} with n={}, p={}'.format(
            self.id, d['from'], d['n'], d['p']))

        n = d['n']
        if n not in self.prepare_responses:
            print(
                "!!!!!!!!!P{}: Shouldn't happen: Somehow I rxd a repsonse to a prepare-request with an n I didn't send: {}"
                .format(self.id, d))

        self.prepare_responses[n][
            d['from']] = d  # for this n, remember who voted for what.

        if len(
                self.prepare_responses[n]
        ) > self.MAJORITY:  # if we get a majority response for this n, good.
            debug('P{}: For n={}, I have prepresps from {} > majority={}!'.
                  format(self.id, n, len(self.prepare_responses[n]),
                         self.MAJORITY))
            # Fetch the 'real' proposals - the non-None proposals from each prepare response. Also, disallow proposals that propose the value 'None'.
            # This allows us to get a node up-to-date just by trying to propose the value 'None', without risking reaching consensus
            # on 'None' if the network has not yet reached consensus.
            # IOW, a proposal isn't considered if it's None, or if it's non-None but its proposed value is None.
            proposals = [
                r['p'] for r in self.prepare_responses[n].values()
                if not (r['p'] is None or
                        (r['p'] is not None and r['p'].v is None))
            ]
            if len(proposals) > 0:
                highest_numbered_proposal = max(proposals)
                v = highest_numbered_proposal.v
                debug(
                    'P{}: Highest-numbered proposal\'s val in prepresps is v={}'
                    .format(self.id, v))
            else:
                debug('P{}: No vals in prepresps, I pick v={}'.format(
                    self.id, self.vdefault))
                v = self.vdefault

            p = Proposal(n, v)

            for to in self.prepare_responses[n].keys(
            ):  # to each Acceptor I've heard from
                debug('P{}: txing accept req to A={} with proposal={}'.format(
                    self.id, to, p))
                r = {
                    'from': self.id,
                    'to': to,
                    'type': 'accept request',
                    'p': p
                }
                if to == self.id:  # my own self-vote
                    self.A_rx_accept_request(r)
                else:

                    self.send(r)
def LoadLastMission(which=None):
    """ Makes a mission an active mission. """
    print("#given mission argument: ", which)
    plr = getMissionPlayer()
    if which is None:
        which = str(players[plr].lastMission)
        print("#loading mission: ", which)
    if VS.networked():
        custom.run('mission_lib', ['LoadLastMission', which], None)
        return

    last_constructor = players[plr].last_constructor
    last_args = players[plr].last_args
    last_briefing_vars = players[plr].last_briefing_vars
    last_briefing = players[plr].last_briefing
    ret = True
    if which in last_constructor and which in last_args:
        if last_constructor[which] == None:
            if type(last_args[which]) == str:
                script = "%(args)s"
            else:
                script = "%(args)r()"
            vars = dict(args=last_args[which])
        else:
            script = '''#
import %(constructor)s
temp=%(constructor)s.%(constructor)s%(args)s
mission_lib.AddMissionHooks(temp)
temp=0
'''
            cons = last_constructor[which]
            if type(cons) != str:
                cons = cons.__name__
            if type(last_args[which]) == str:
                args = last_args[which]
            else:
                args = repr(last_args[which])
            vars = dict(constructor=cons, args=args)
        script = script % vars
        if script[:1] == '#':
            prescript = '''#
import mission_lib
mission_lib.SetMissionHookArgs(%(amentry)r)
%(postscript)s'''
            amentry = last_briefing_vars[0].get(which, dict())
            try:
                amentry.update(
                    iter(last_briefing_vars[1].get(which, dict()).items()))
                amentry.update([
                    #('MISSION_NAME',which),
                    ('DESCRIPTION', last_briefing[0].get(which, '')),
                    ('ACCEPT_MESSAGE', last_briefing[1].get(which, ''))
                ])
            except:
                debug.error("TRACING BACK")
                import sys
                debug.error(sys.exc_info()[0])
                debug.error(sys.exc_info()[1])
                debug.error("BACKTRACE done")
                ret = False
            vars = dict(amentry=amentry, postscript=script)
            script = prescript % vars
        debug.debug("Loading mission:\n%s" % script)
        VS.LoadNamedMissionScript(which, script)
    else:
        debug.debug('No last mission with name "' + str(which) + '"')
        ret = False
    RemoveLastMission(which)
    return ret
Exemple #60
0
def commander(c, genList, circleList, starList):
    # this function opens the commands.txt file and converts it into a list of
    # commands on how to play the level accompanying the song and put them into
    # a list. If a command is unrecognized, the game will close (for now).
    # It will take the commands and their parameters and organize them into a
    # 2D list
    # RETURNS: A list of lists
    # DEFAULT VALUES

    try:
        genFile = open(genList, 'r')
    except pygame.error:
        debug(c.DEBUG, ('Cannot open file: ', genList))
        raise SystemExit(str(geterror()))

    try:
        circleFile = open(circleList, 'r')
    except pygame.error:
        debug(c.DEBUG, ('Cannot open file: ', circleList))
        raise SystemExit(str(geterror()))

    try:
        starFile = open(starList, 'r')
    except pygame.error:
        debug(c.DEBUG, ('Cannot open file: ', starList))
        raise SystemExit(str(geterror()))

    # list of general commands
    genCommands = genFile.read()
    genCommands = genCommands.split()
    genCommandList = []

    # list of circle commands
    circleCommands = circleFile.read()
    circleCommands = circleCommands.split()
    circleCommandList = []

    # list of star commands
    starCommands = starFile.read()
    starCommands = starCommands.split()
    starCommandList = []

    for action in genCommands:
        # the action[0] just checks the first letter in the action.
        # Option 1: set the BPM
        if action[0] == 'B':
            try:
                bpm = action.replace('BPM', '')
                bpm = float(bpm)
            except Exception:
                print "Invalid BPM possible. See commands.txt"
                sys.exit(UserWarning)
            # calculate the global wait times.
            # (how many frames until the next action is committed.)
            cWait = c.FPS / (bpm / 60.0)
            fWait = cWait
            # calculate the move speed of circle and star
            # this equation sets the speed as a fraction of the radius of the
            # ring, so that it takes X seconds to reach the ring.
            # the X seconds will either be user defined, or bpm/60
            # (how quickly an action is completed)
            cSpeed = (c.RING_SIZE / c.FPS) * (bpm / 60.0)
            fSpeed = (c.RING_RADIUS / c.FPS) * (bpm / 60.0)
            # the BPM list is formatted as such:'B', WC, WF, CSP, and FSP
            genCommandList.append(['B', cWait, fWait, cSpeed, fSpeed])
        #=======================================================================
        # elif action[0] == 'P':
        #     if action == 'Play:':
        #         commandList.append(['P'])
        #     else:
        #         print "Invalid Play action given. See commands.txt"
        #         sys.exit(UserWarning)
        #=======================================================================
        elif action[0] == 'J':
            if "JumpTo," in action:
                try:
                    startTime = float(action.replace('JumpTo,', ''))
                except Exception:
                    print "Invalid start time given. Must be in seconds. See commands.txt"
                    sys.exit(UserWarning)
                genCommandList.append(['J', startTime])
            else:
                print "Invalid start time given. Must be 'JumpTo,X'. See commands.txt"
                sys.exit(UserWarning)
        elif action[0] == 'W':
            if action[1] == 'G':
                # a global constant wait time between each action
                gWait = action.replace('W', '')
                gWait = gWait.replace('G', '')
                try:
                    gWait = c.FPS * float(gWait)
                except Exception:
                    print "Invalid WG# given. See commands.txt"
                    sys.exit(UserWarning)
                genCommandList.append(['WG', gWait])
            elif action[1] == 'C':
                # a global constant wait time before each circle creation
                cWait = action.replace('W', '')
                cWait = cWait.replace('C', '')
                try:
                    cWait = c.FPS * float(cWait)
                except Exception:
                    print "Invalid WC# given. See commands.txt"
                    sys.exit(UserWarning)
                genCommandList.append(['WC', cWait])
            elif action[1] == 'F':
                # a global constant wait time before each star creation
                fWait = action.replace('W', '')
                fWait = fWait.replace('F', '')
                try:
                    fWait = c.FPS * float(fWait)
                except Exception:
                    print "Invalid WF# given. See commands.txt"
                    sys.exit(UserWarning)
                genCommandList.append(['WF', fWait])

    for action in circleCommands:
        if action[0] == 'C':
            color = ''
            # we test to see if the action is for changing speed, or making circ
            if action[1] == 'S':
                try:
                    cSpeed = action.replace('CSP', '')
                    cSpeed = float(cSpeed)
                    cSpeed = (c.RING_SIZE / c.FPS) / cSpeed
                    debug(c.DEBUG, cSpeed)
                except Exception:
                    print "Invalid CSP given. See commands.txt"
                    sys.exit(UserWarning)
                circleCommandList.append(['CS', cSpeed])
            # if it does not begin with CS, that means it is for making a circle
            else:
                cSpeed = action.replace('C', '')
                cSpeed = cSpeed.replace(',', '')
                # now we iterate through the string, creating a color variable
                # and remove the letters, leaving speed with only numbers.
                # if no speed was given, then the len will be 0, so we can exit.
                while len(cSpeed) != 0 and cSpeed[0].isalpha():
                    debug(c.DEBUG, ('cSpeed2: ', cSpeed))
                    color = color + cSpeed[0]
                    cSpeed = cSpeed.replace(cSpeed[0], '')
                # if anything is left in the variable cSpeed, then it SHOULD be
                # the circle speed number
                if len(cSpeed) != 0:
                    try:
                        cSpeed = float(cSpeed)
                        cSpeed = (c.RING_SIZE / c.FPS) / cSpeed
                    except Exception:
                        print "Invalid CSpeed given. See commands.txt"
                        sys.exit(UserWarning)
                # grab the right colors!
                R, G, B = 0, 0, 0
                debug(c.DEBUG, ("COLOR: ", color))
                if color.find('R') != -1:
                    R = 255
                if color.find('G') != -1:
                    G = 255
                if color.find('B') != -1:
                    B = 255
                if (R, G, B) == (0, 0, 0):
                    print "No colors found. See commands.txt"
                    sys.exit(UserWarning)
                circleCommandList.append(['C', (R, G, B), cSpeed])
        elif action[0] == 'W':
            if action[1] == 'C':
                # a global constant wait time before each circle creation
                cWait = action.replace('W', '')
                cWait = cWait.replace('C', '')
                try:
                    cWait = c.FPS * float(cWait)
                except Exception:
                    print "Invalid WC# given. See commands.txt"
                    sys.exit(UserWarning)
                circleCommandList.append(['WC', cWait])
            elif action[1].isdigit():
                waitTime = action.replace('W', '')
                # an instance wait, for only that call.
                try:
                    # how many frames before the next action occurs
                    waitTime = c.FPS * float(waitTime)
                except Exception:
                    print "Invalid W# given. See commands.txt"
                    sys.exit(UserWarning)
                circleCommandList.append(['W', waitTime])
        elif action[0] == ':':
            if action == ':Stop':
                circleCommandList.append(['S'])
            else:
                print "Invalid Stop given. See commands.txt"
                sys.exit(UserWarning)

    for action in starCommands:
        if action[0] == 'F':
            # we test to see if the action is for changing speed, or making star
            if action[1] == 'S':
                try:
                    fSpeed = action.replace('FSP', '')
                    fSpeed = float(fSpeed)
                    fSpeed = (c.RING_RADIUS / c.FPS) / fSpeed
                except Exception:
                    print "Invalid FSP given. See commands.txt"
                    sys.exit(UserWarning)
                starCommandList.append(['FS', fSpeed])
            else:
                if action.find(',') != -1:
                    # if both an angle and speed is defined, we must split the
                    # numbers by the comma.
                    starTemp = action.replace('F', '')
                    fAngle, fSpeed = starTemp.split(',')
                    try:
                        fAngle = float(fAngle)
                        fSpeed = float(fSpeed)
                        fSpeed = (c.RING_RADIUS / c.FPS) / fSpeed
                    except Exception:
                        print "Invalid  Fx/# given. See commands.txt"
                        sys.exit(UserWarning)
                    starCommandList.append(['F', fAngle, fSpeed])
                else:
                    fAngle = action.replace('F', '')
                    try:
                        fAngle = float(fAngle)
                    except Exception:
                        print "Invalid  Fx given. See commands.txt"
                        sys.exit(UserWarning)
                    starCommandList.append(['F', fAngle, ''])
        elif action[0] == 'W':
            if action[1].isdigit():
                waitTime = action.replace('W', '')
                # an instance wait, for only that call.
                try:
                    # how many frames before the next action occurs
                    waitTime = c.FPS * float(waitTime)
                except Exception:
                    print "Invalid W# given. See commands.txt"
                    sys.exit(UserWarning)
                starCommandList.append(['W', waitTime])
            elif action[1] == 'F':
                # a global constant wait time before each star creation
                fWait = action.replace('W', '')
                fWait = fWait.replace('F', '')
                try:
                    fWait = c.FPS * float(fWait)
                except Exception:
                    print "Invalid WF# given. See commands.txt"
                    sys.exit(UserWarning)
                starCommandList.append(['WF', fWait])
            elif action[1].isdigit():
                waitTime = action.replace('W', '')
                # an instance wait, for only that call.
                try:
                    # how many frames before the next action occurs
                    waitTime = c.FPS * float(waitTime)
                except Exception:
                    print "Invalid W# given. See commands.txt"
                    sys.exit(UserWarning)
                starCommandList.append(['W', waitTime])
        elif action[0] == ':':
            if action == ':Stop':
                starCommandList.append(['S'])
            else:
                print "Invalid Stop given. See commands.txt"
                sys.exit(UserWarning)

    debug(c.DEBUG, (genCommandList, circleCommandList, starCommandList))
    return genCommandList, circleCommandList, starCommandList