Example #1
0
    def execute(self):

        file_path = self.kwargs.get("path")
        site = self.kwargs.get("site")
        project_code = self.kwargs.get("project_code")
        base_dir = self.kwargs.get("base_dir")
        search_type = self.kwargs.get("search_type")
        process = self.kwargs.get("process")
        watch_script_path = self.kwargs.get("script_path")
        if not process:
            process = "publish"

        basename = os.path.basename(file_path)

        context = self.kwargs.get("context")
        if not context:
            context = '%s/%s' % (process, basename)

        # find the relative_dir and relative_path
        relative_path = file_path.replace("%s/" % base_dir, "")
        relative_dir = os.path.dirname(relative_path)

        file_name = os.path.basename(file_path)
        log_path = '%s/TACTIC_log.txt' % (base_dir)
        self.create_checkin_log()

        # Define asset type of the file
        asset_type = self.get_asset_type(file_path)
        description = "drop folder check-in of %s" % file_name

        from client.tactic_client_lib import TacticServerStub
        server = TacticServerStub.get(protocol='local')
        server.set_project(project_code)

        transaction = Transaction.get(create=True)
        server.start(title='Check-in of media',
                     description='Check-in of media')

        server_return_value = {}

        try:
            filters = [
                ['name', '=', file_name],
                #[ 'relative_dir', '=', relative_dir ]
            ]
            sobj = server.query(search_type, filters=filters, single=True)

            if not sobj:
                # create sobject if it does not yet exist
                sobj = SearchType.create(search_type)
                if SearchType.column_exists(search_type, "name"):
                    sobj.set_value("name", basename)
                if SearchType.column_exists(search_type, "media_type"):
                    sobj.set_value("media_type", asset_type)

                if SearchType.column_exists(search_type, "relative_dir"):
                    sobj.set_value("relative_dir", relative_dir)

                if SearchType.column_exists(search_type, "keywords"):
                    relative_path = relative_path
                    keywords = Common.extract_keywords_from_path(relative_path)
                    keywords = " ".join(keywords)
                    sobj.set_value("keywords", keywords)

                sobj.commit()
                search_key = sobj.get_search_key()
            else:
                search_key = sobj.get("__search_key__")

            #task = server.create_task(sobj.get('__search_key__'),process='publish')
            #server.update(task, {'status': 'New'})
            """
            #TEST: simulate different check-in duration
            from random import randint
            sec = randint(1, 5)
            print "checking in for ", sec, "sec"
            server.eval("@SOBJECT(sthpw/login)")
            import shutil
            dir_name,base_name = os.path.split(file_path)
            dest_dir = 'C:/ProgramData/Southpaw/watch_temp'
            if not os.path.exists(dest_dir):
                os.makedirs(dest_dir)
            shutil.move(file_path, '%s/%s'%(dest_dir, base_name))
            time.sleep(sec)
            # move back the file in a few seconds 
            shutil.move('%s/%s'%(dest_dir, base_name), file_path)
            """
            server_return_value = server.simple_checkin(
                search_key,
                context,
                file_path,
                description=description,
                mode='move')

            if watch_script_path:
                cmd = PythonCmd(script_path=watch_script_path,
                                search_type=search_type,
                                drop_path=file_path,
                                search_key=search_key)
                cmd.execute()

        except Exception as e:
            print "Error occurred", e
            error_message = str(e)

            import traceback
            tb = sys.exc_info()[2]
            stacktrace = traceback.format_tb(tb)
            stacktrace_str = "".join(stacktrace)
            print "-" * 50
            print stacktrace_str

            version_num = 'Error:'
            system_time = strftime("%Y/%m/%d %H:%M", gmtime())
            pre_log=file_name+(50-len(file_name))*' '+system_time+(33-len(system_time))*' '+version_num+(15-len(version_num))*' ' +error_message+'\n'\
                    + stacktrace_str + '\n' + watch_script_path
            # Write data into TACTIC_log file under /tmp/drop
            f = open(log_path, 'a')
            f.write(pre_log)
            f.close()

            #server.abort()
            transaction.rollback()
            raise

        else:
            transaction.commit()

        #server.finish()

        if server_return_value:
            # Create the TACTIC_log file to record every check-in.
            # Search for all required data
            checkin_time = server_return_value.get('timestamp')
            version_nu = server_return_value.get('version')
            version_num = str(version_nu)
            try:
                value = parser.parse(checkin_time)
                value = value.strftime("%Y/%m/%d %H:%M")
            except:
                value = checkin_time

            pre_log = file_name + (50 - len(file_name)) * ' ' + value + (
                33 - len(value)) * ' ' + version_num + (
                    15 - len(version_num)) * ' ' + 'ok\n'
            # Write data into TACTIC_log file under /tmp/drop
            f = open(log_path, 'a')
            f.write(pre_log)
            f.close()

            # Invoke Trigger:called_triggers
            from pyasm.command import Trigger
            Trigger.call_all_triggers()

            # Delete the source file after check-in step.
            print "File handled."
            if os.path.exists(file_path):
                if os.path.isdir(file_path):
                    os.rmdirs(file_path)
                else:
                    os.unlink(file_path)
                print "Source file [%s] deleted: " % file_name
Example #2
0
    def handle_keywords(self):

        input = self.get_input()
        caller = self.get_caller()

        sobj = caller

        base_search_type = sobj.get_base_search_type()

        has_keywords_data = False
        rename_collection = False
        if sobj:
            has_keywords_data = sobj.column_exists("keywords_data")

        is_collection = input.get("sobject").get("_is_collection")

        if input.get("is_delete") == True:
            # Collection relationships being removed
            mode = "delete"
            self.update_collection_keywords(mode, base_search_type, input)
            return

        # Collection relationships being created or added
        elif input.get("is_insert"):
            mode = "insert"
            self.update_collection_keywords(mode, base_search_type, input)

        # If keywords_data column exists and collection is being changed
        # or folder structure changed
        if has_keywords_data:

            update_data = input.get("update_data")

            # If Relative dir is changed or file is renamed, update path keywords
            if ("relative_dir" in update_data or "name"
                    in update_data) and not input.get("mode") == "insert":

                file_path = input.get("sobject").get("relative_dir")
                asset_name = input.get("sobject").get("name")

                project_code = Project.get_project_code()

                # Ignore the common keywords path
                ignore_path = "%s/asset" % project_code
                if ignore_path in file_path:
                    file_path = file_path.replace(ignore_path, "")

                path_keywords = Common.extract_keywords_from_path(file_path)
                path_keywords.append(asset_name.lower())
                path_keywords = " ".join(path_keywords)

                keywords_data = sobj.get_json_value("keywords_data", {})

                keywords_data['path'] = path_keywords
                sobj.set_json_value("keywords_data", keywords_data)
                sobj.commit(triggers=False)
                self.set_searchable_keywords(sobj)

            else:
                if "user_keywords" in update_data:
                    has_user_keywords = True

                    user_keywords = input.get("update_data").get(
                        "user_keywords")

                    if not user_keywords:
                        user_keywords = ""

                else:
                    has_user_keywords = False

                if is_collection:
                    if input.get("mode") == "update" and "name" in update_data:
                        rename_collection = True

                    # New Collection created
                    if input.get("is_insert"):
                        collection_keywords = update_data.get("user_keywords")
                        collection_name = update_data.get("name")

                        keywords_data = sobj.get_json_value(
                            "keywords_data", {})
                        if collection_keywords:
                            keywords_data['user'] = "******" % (
                                collection_name, collection_keywords)
                        else:
                            keywords_data['user'] = "******" % collection_name

                        sobj.set_json_value("keywords_data", keywords_data)
                        sobj.commit(triggers=False)
                        self.set_searchable_keywords(sobj)

                    # If collection is renamed
                    elif rename_collection:
                        collection_name = update_data.get("name")
                        keywords_data = sobj.get_json_value(
                            "keywords_data", {})

                        if 'user' in keywords_data:
                            user = keywords_data.get('user')
                            old_collection_name = input.get("prev_data").get(
                                "name")

                            user = user.replace(old_collection_name, "")
                            keywords_data['user'] = user

                            sobj.set_json_value("keywords_data", keywords_data)
                            sobj.commit(triggers=False)

                            self.update_user_keywords(sobj, user,
                                                      base_search_type)

                    # If user_keywords column is changed
                    elif has_user_keywords:

                        self.update_user_keywords(sobj, user_keywords,
                                                  base_search_type)

                # If regular asset keywords being changed
                else:
                    if has_user_keywords:

                        self.update_user_keywords(sobj, user_keywords,
                                                  base_search_type)
Example #3
0
    def handle_keywords(self):

        input = self.get_input()
        caller = self.get_caller()

        sobj = caller

        base_search_type = sobj.get_base_search_type()


        has_keywords_data = False
        rename_collection = False
        if sobj:
            has_keywords_data = sobj.column_exists("keywords_data")

        is_collection = input.get("sobject").get("_is_collection")
        
        if input.get("is_delete") == True:
            # Collection relationships being removed
            mode = "delete"
            self.update_collection_keywords(mode, base_search_type, input)
            return


        # Collection relationships being created or added
        elif input.get("is_insert"):
            mode = "insert"
            self.update_collection_keywords(mode, base_search_type, input)



        # If keywords_data column exists and collection is being changed 
        # or folder structure changed
        if has_keywords_data:
            
            update_data = input.get("update_data")

            # If Relative dir is changed or file is renamed, update path keywords
            if ("relative_dir" in update_data or "name" in update_data) and not input.get("mode") == "insert":
                
                file_path = input.get("sobject").get("relative_dir")
                asset_name = input.get("sobject").get("name")

                project_code = Project.get_project_code()

                # Ignore the common keywords path
                ignore_path = "%s/asset" % project_code
                if ignore_path in file_path:
                    file_path = file_path.replace(ignore_path, "")

                path_keywords = Common.extract_keywords_from_path(file_path)
                path_keywords.append(asset_name.lower())
                path_keywords = " ".join(path_keywords)

                keywords_data = sobj.get_json_value("keywords_data", {})

                keywords_data['path'] = path_keywords
                sobj.set_json_value("keywords_data", keywords_data)
                sobj.commit(triggers=False)
                self.set_searchable_keywords(sobj)

            else:
                if "user_keywords" in update_data:
                    has_user_keywords = True

                    user_keywords = input.get("update_data").get("user_keywords")

                    if not user_keywords:
                        user_keywords = ""

                else:
                    has_user_keywords = False

                if is_collection:
                    if input.get("mode") == "update" and "name" in update_data:
                        rename_collection = True

                    # New Collection created
                    if input.get("is_insert"):
                        collection_keywords = update_data.get("user_keywords")
                        collection_name = update_data.get("name")

                        keywords_data = sobj.get_json_value("keywords_data", {})
                        if collection_keywords:
                            keywords_data['user'] = "******" % (collection_name, collection_keywords)
                        else:
                            keywords_data['user'] = "******" % collection_name
                            
                        sobj.set_json_value("keywords_data", keywords_data)
                        sobj.commit(triggers=False)
                        self.set_searchable_keywords(sobj)  

                    # If collection is renamed
                    elif rename_collection:
                        collection_name = update_data.get("name")
                        keywords_data = sobj.get_json_value("keywords_data", {})

                        if 'user' in keywords_data:
                            user = keywords_data.get('user')
                            old_collection_name = input.get("prev_data").get("name")

                            user = user.replace(old_collection_name, "")
                            keywords_data['user'] = user
                            
                            sobj.set_json_value("keywords_data", keywords_data)
                            sobj.commit(triggers=False)

                            self.update_user_keywords(sobj, user, base_search_type)

                    # If user_keywords column is changed 
                    elif has_user_keywords:
                        
                        self.update_user_keywords(sobj, user_keywords, base_search_type)

                # If regular asset keywords being changed
                else:
                    if has_user_keywords:

                        self.update_user_keywords(sobj, user_keywords, base_search_type)
Example #4
0
    def execute(self):

        file_path = self.kwargs.get("path")
        site = self.kwargs.get("site")
        project_code = self.kwargs.get("project_code")
        base_dir = self.kwargs.get("base_dir")
        search_type = self.kwargs.get("search_type")
        process = self.kwargs.get("process")
        watch_script_path = self.kwargs.get("script_path")
        if not process:
            process = "publish"

        basename = os.path.basename(file_path)

        context = self.kwargs.get("context")
        if not context:
            context = '%s/%s'  % (process, basename)


        # find the relative_dir and relative_path
        relative_path = file_path.replace("%s/" % base_dir, "")
        relative_dir = os.path.dirname(relative_path)

        file_name = os.path.basename(file_path)
        log_path = '%s/TACTIC_log.txt' %(base_dir)
        self.create_checkin_log()

        # Define asset type of the file
        asset_type = self.get_asset_type(file_path)
        description = "drop folder check-in of %s" %file_name

        from client.tactic_client_lib import TacticServerStub
        server = TacticServerStub.get(protocol='local')
        server.set_project(project_code)

        transaction = Transaction.get(create=True)
        server.start(title='Check-in of media', description='Check-in of media')

        server_return_value = {}

        try:
            filters = [
                    [ 'name', '=', file_name ],
                    #[ 'relative_dir', '=', relative_dir ]
                ]
            sobj = server.query(search_type, filters=filters, single=True)

            if not sobj:
                # create sobject if it does not yet exist
                sobj = SearchType.create(search_type)
                if SearchType.column_exists(search_type, "name"):
                    sobj.set_value("name", basename)
                if SearchType.column_exists(search_type, "media_type"):
                    sobj.set_value("media_type", asset_type)


                if SearchType.column_exists(search_type, "relative_dir"):
                    sobj.set_value("relative_dir", relative_dir)

                if SearchType.column_exists(search_type, "keywords"):
                    relative_path = relative_path
                    keywords = Common.extract_keywords_from_path(relative_path)
                    keywords = " ".join( keywords )
                    sobj.set_value("keywords", keywords)

                sobj.commit()
                search_key = sobj.get_search_key()
            else:
                search_key = sobj.get("__search_key__")


            #task = server.create_task(sobj.get('__search_key__'),process='publish')
            #server.update(task, {'status': 'New'})
            
            """
            #TEST: simulate different check-in duration
            from random import randint
            sec = randint(1, 5)
            print "checking in for ", sec, "sec"
            server.eval("@SOBJECT(sthpw/login)")
            import shutil
            dir_name,base_name = os.path.split(file_path)
            dest_dir = 'C:/ProgramData/Southpaw/watch_temp'
            if not os.path.exists(dest_dir):
                os.makedirs(dest_dir)
            shutil.move(file_path, '%s/%s'%(dest_dir, base_name))
            time.sleep(sec)
            # move back the file in a few seconds 
            shutil.move('%s/%s'%(dest_dir, base_name), file_path)
            """
            server_return_value = server.simple_checkin(search_key,  context, file_path, description=description, mode='move')

            if watch_script_path:
                cmd = PythonCmd(script_path=watch_script_path,search_type=search_type,drop_path=file_path,search_key=search_key)
                cmd.execute()



            
        except Exception as e:
            print "Error occurred", e
            error_message=str(e)

            import traceback
            tb = sys.exc_info()[2]
            stacktrace = traceback.format_tb(tb)
            stacktrace_str = "".join(stacktrace)
            print "-"*50
            print stacktrace_str


            version_num='Error:'
            system_time=strftime("%Y/%m/%d %H:%M", gmtime())
            pre_log=file_name+(50-len(file_name))*' '+system_time+(33-len(system_time))*' '+version_num+(15-len(version_num))*' ' +error_message+'\n'\
                    + stacktrace_str + '\n' + watch_script_path
            # Write data into TACTIC_log file under /tmp/drop
            f = open(log_path, 'a')
            f.write(pre_log)
            f.close()

            #server.abort()
            transaction.rollback()
            raise
        
        else:
            transaction.commit()
        
        #server.finish()

        if server_return_value:
            # Create the TACTIC_log file to record every check-in. 
            # Search for all required data
            checkin_time=server_return_value.get('timestamp')
            version_nu=server_return_value.get('version')
            version_num=str(version_nu)
            try:
                value = parser.parse(checkin_time)
                value = value.strftime("%Y/%m/%d %H:%M")
            except:
                value = checkin_time

            pre_log=file_name+(50-len(file_name))*' '+value+(33-len(value))*' '+version_num+(15-len(version_num))*' ' +'ok\n'      
            # Write data into TACTIC_log file under /tmp/drop
            f = open(log_path, 'a')
            f.write(pre_log)
            f.close()

            # Invoke Trigger:called_triggers
            from pyasm.command import Trigger
            Trigger.call_all_triggers()

            # Delete the source file after check-in step.
            print "File handled."
            if os.path.exists(file_path):
                if os.path.isdir(file_path):
                    os.rmdirs(file_path)
                else:
                    os.unlink(file_path)
                print "Source file [%s] deleted: " %file_name