def verify(self, login_name, password): if login_name.find("\\") != -1: domain, base_login_name = login_name.split("\\") else: base_login_name = login_name domain = None # confirm that there is a domain present if required require_domain = Config.get_value("active_directory", "require_domain") domain_component = Config.get_value("active_directory","domain_component") script_path = Config.get_value("active_directory","allow_script") if script_path: flag = False try: from tactic.command import PythonCmd from pyasm.command import Command kwargs = {'login' : login_name} cmd = PythonCmd(script_path=script_path, **kwargs) #flag = Command.execute_cmd(cmd) flag = cmd.execute() except Exception, e: print e raise if flag != True: return False
def delete_files(my, nodes): # clean out all of the files for node in nodes: name = my.xml.get_node_name(node) if name == "include": path = my.xml.get_attribute(node, "path") if not path: print("WARNING: No path found for search type in manifest") continue path = "%s/%s" % (my.plugin_dir, path) if path.endswith(".py"): from tactic.command import PythonCmd cmd = PythonCmd(file_path=path) manifest = cmd.execute() if manifest: xml = Xml() xml.read_string(manifest) include_nodes = xml.get_nodes("manifest/*") my.delete_files(include_nodes) elif name == "python": # don't delete python node file pass else: path = my.get_path_from_node(node) if path and os.path.exists(path): print "Deleting: ", path os.unlink(path)
def handle_include(my, node): path = my.xml.get_attribute(node, "path") if not path: raise TacticException("No path found for include in manifest") path = "%s/%s" % (my.plugin_dir, path) if path.endswith(".py"): from tactic.command import PythonCmd cmd = PythonCmd(file_path=path) manifest = cmd.execute() if not manifest: print "No manifest discovered in [%s]" %path return xml = Xml() xml.read_string(manifest) nodes = xml.get_nodes("manifest/*") sobjects = [] for i, node in enumerate(nodes): name = my.xml.get_node_name(node) if name == 'sobject': dumped_sobjects = my.handle_sobject(node) if not dumped_sobjects: dumped_sobjects = [] sobjects.extend(dumped_sobjects) elif name == 'search_type': my.handle_search_type(node) elif name == 'include': my.handle_include(node)
def verify(my, login_name, password): if login_name.find("\\") != -1: domain, base_login_name = login_name.split("\\") else: base_login_name = login_name domain = None # confirm that there is a domain present if required require_domain = Config.get_value("active_directory", "require_domain") domain_component = Config.get_value("active_directory", "domain_component") script_path = Config.get_value("active_directory", "allow_script") if script_path: flag = False try: from tactic.command import PythonCmd from pyasm.command import Command kwargs = {'login': login_name} cmd = PythonCmd(script_path=script_path, **kwargs) #flag = Command.execute_cmd(cmd) flag = cmd.execute() except Exception, e: print e raise if flag != True: return False
def get_result(my, sobject): result = None sobject_dict = sobject.get_sobject_dict() filter_data = my.filter_data.get_data() try: cmd = PythonCmd(code=my.code, sobject=sobject_dict, filter_data=filter_data) result = cmd.execute() except Exception, e: return str(e)
def get_text_value(my): sobject = my.get_current_sobject() sobject_dict = sobject.get_sobject_dict() try: cmd = PythonCmd(code=my.code, sobject=sobject_dict) result = cmd.execute() except Exception, e: return str(e)
def get_display(self): script_path = self.get_option("script_path") script_code = self.get_option("script_code") from tactic.command import PythonCmd if script_path: cmd = PythonCmd(script_path=script_path, values=self.values, search=search, show_title=self.show_title) elif script_code: cmd = PythonCmd(script_code=script_code, values=self.values, search=search, show_title=self.show_title) cmd.execute()
def get_result(self, sobject): result = None sobject_dict = sobject.get_sobject_dict() filter_data = self.filter_data.get_data() try: cmd = PythonCmd(code=self.code, sobject=sobject_dict, filter_data=filter_data) result = cmd.execute() except Exception as e: return str(e) return result
def alter_search(my, search): script_path = my.get_option("alter_search_script_path") script_code = my.get_option("alter_search_script_code") from tactic.command import PythonCmd if script_path: cmd = PythonCmd(script_path=script_path, values=my.values, search=search, show_title=my.show_title) elif script_code: cmd = PythonCmd(script_code=script_code, values=my.values, search=search, show_title=my.show_title) cmd.execute()
def get_display(my): script_path = my.kwargs.get("script_path") if not script_path: return {} python_cmd = PythonCmd(**my.kwargs) ret_val = python_cmd.execute() return ret_val
def preprocess(my): code = my.kwargs.get('data') if not code: my.data = {} return # preprocess using mako #include_mako = my.kwargs.get("include_mako") #if not include_mako: # include_mako = my.view_attrs.get("include_mako") from tactic.command import PythonCmd python_cmd = PythonCmd(code=code) my.data = python_cmd.execute()
def handle_include(my, node): path = my.xml.get_attribute(node, "path") if not path: raise TacticException("No path found for search type in manifest") path = "%s/%s" % (my.plugin_dir, path) if path.endswith(".py"): from tactic.command import PythonCmd cmd = PythonCmd(file_path=path) manifest = cmd.execute() xml = Xml() xml.read_string(manifest) nodes = xml.get_nodes("manifest/*") nodes.reverse() my.handle_nodes(nodes)
def handle_python(my, node): '''during uninstall, handle the python undo_path''' path = my.xml.get_attribute(node, "undo_path") # if no path, then nothing to undo if not path: print "No undo_path defined for this python node" return if not path.endswith('.py'): raise TacticException("Path should have the .py extension for python in manifest") path = "%s/%s" % (my.plugin_dir, path) if not os.path.exists(path): raise TacticException("Undo Path [%s] does not exist python in manifest" %path) if path.endswith(".py"): from tactic.command import PythonCmd cmd = PythonCmd(file_path=path) cmd.execute()
def run_callback(my, pipeline, process, status): # get the node triggers # TODO: make this more efficient search = Search("config/process") search.add_filter("pipeline_code", pipeline.get_code()) search.add_filter("process", process) process_sobj = search.get_sobject() print "callback process: ", process, pipeline.get_code() assert(process_sobj) triggers = {} if process_sobj: triggers = process_sobj.get_json_value("workflow") if not triggers: triggers = {} ret_val = None action = triggers.get("on_%s" % status) js_action = triggers.get("cbjs_%s" % status) action_path = triggers.get("on_%s_path" % status) kwargs, input = my.build_trigger_input() if action or action_path: if action: cmd = PythonCmd(code=action, input=input, **kwargs) else: cmd = PythonCmd(script_path=script_path, input=input, **kwargs) ret_val = cmd.execute() elif js_action: from tactic.command import JsCmd if action: cmd = JsCmd(code=action, input=input, **kwargs) else: cmd = JsCmd(script_path=script_path, input=input, **kwargs) ret_val = cmd.execute() else: # or call a trigger event = "process|%s" % status # how to get the value here? process_code = process_sobj.get_code() triggers = Trigger.call(my, event, kwargs, process=process_code) if triggers: ret_val = triggers[0].get_ret_val() return ret_val
def get_text_value(self): sobject = self.get_current_sobject() sobject_dict = sobject.get_sobject_dict() try: cmd = PythonCmd(code=self.code, sobject=sobject_dict) result = cmd.execute() except Exception as e: return str(e) if result == "": return result sobject.set_value(self.get_name(), result, temp=True) display_format = self.get_option("display_format") if display_format: expr = "@FORMAT(@GET(.%s), '%s')" % (self.get_name(), display_format) result = Search.eval(expr, sobject, single=True) return result
def handle_include(my, node): path = my.xml.get_attribute(node, "path") if not path: raise TacticException("No path found for search type in manifest") path = "%s/%s" % (my.plugin_dir, path) if path.endswith(".py"): from tactic.command import PythonCmd cmd = PythonCmd(file_path=path) manifest = cmd.execute() if not manifest: return xml = Xml() xml.read_string(manifest) nodes = xml.get_nodes("manifest/*") nodes.reverse() my.handle_nodes(nodes)
def run_callback(my, pipeline, process, status): # get the node triggers # TODO: make this more efficient search = Search("config/process") search.add_filter("pipeline_code", pipeline.get_code()) search.add_filter("process", process) process_sobj = search.get_sobject() #print "callback process: ", process, pipeline.get_code() if not process_sobj: raise TacticException('Process item [%s] has not been created. Please save your pipeline in the Project Workflow Editor to refresh the processes.'%process) triggers = {} if process_sobj: triggers = process_sobj.get_json_value("workflow") if not triggers: triggers = {} ret_val = None action = triggers.get("on_%s" % status) js_action = triggers.get("cbjs_%s" % status) action_path = triggers.get("on_%s_path" % status) kwargs, input = my.build_trigger_input() if action or action_path: from tactic.command import PythonCmd if action: cmd = PythonCmd(code=action, input=input, **kwargs) else: cmd = PythonCmd(script_path=script_path, input=input, **kwargs) ret_val = cmd.execute() elif js_action: from tactic.command import JsCmd if action: cmd = JsCmd(code=action, input=input, **kwargs) else: cmd = JsCmd(script_path=script_path, input=input, **kwargs) ret_val = cmd.execute() else: # or call a trigger event = "process|%s" % status # how to get the value here? process_code = process_sobj.get_code() triggers = Trigger.call(my, event, kwargs, process=process_code) if triggers: ret_val = triggers[0].get_ret_val() return ret_val
def handle_action(my): my.log_message(my.sobject, my.process, "in_progress") process_obj = my.pipeline.get_process(my.process) # get the node's triggers search = Search("config/process") search.add_filter("process", my.process) search.add_filter("pipeline_code", my.pipeline.get_code()) process_sobj = search.get_sobject() #process_sobj = my.pipeline.get_process_sobject(my.process) triggers = {} if process_sobj: triggers = process_sobj.get_json_value("workflow") if not triggers: triggers = {} action = triggers.get("on_action") cbjs_action = triggers.get("cbjs_action") action_path = triggers.get("on_action_path") kwargs, input = my.build_trigger_input() if action or action_path: from tactic.command import PythonCmd if action: cmd = PythonCmd(code=action, input=input, **kwargs) else: cmd = PythonCmd(script_path=action_path, input=input, **kwargs) ret_val = cmd.execute() elif cbjs_action: from tactic.command import JsCmd if cbjs_action: cmd = JsCmd(code=cbjs_action, input=input, **kwargs) else: cmd = JsCmd(script_path=script_path, input=input, **kwargs) ret_val = cmd.execute() else: # or call an action trigger Trigger.call(my, "process|action", input, process=process_sobj.get_code()) Trigger.call(my, "process|complete", my.input)
if manifest: xml = Xml() xml.read_string(manifest) nodes = xml.get_nodes("manifest/*") my.import_manifest(nodes) elif node_name == 'python': path = my.xml.get_attribute(node, "path") path = "%s/%s" % (my.plugin_dir, path) # just run the python script from tactic.command import PythonCmd cmd = PythonCmd(file_path=path) cmd.execute() ''' TODO: do we store the transaction here??? try: from pyasm.search import Transaction transaction = Transaction.get() transaction_str = transaction.xml.to_string() my.plugin.set_value("transaction", transaction_str) my.plugin.commit()
class PluginInstaller(PluginBase): def execute(my): # install the plugin mode = my.kwargs.get("mode") if not mode: mode = 'install' # register mode actually reads the manifest. if mode == 'install': nodes = my.xml.get_nodes("manifest/*") else: # DEPRECATED # dynamically create the plugin node in the manifest plugin_node = my.xml.create_element("sobject") my.xml.set_attribute(plugin_node, "path", "config_plugin.spt") nodes = [] nodes.insert(0, plugin_node) # register the plugin. register = my.kwargs.get("register") code = None plugin = None if register in ['true', True]: node = my.xml.get_node("manifest/data") data = my.xml.get_node_values_of_children(node) code = data.get("code") version = data.get("version") # first check if a plugin with this code already exists plugin = Search.get_by_code("config/plugin", code) if plugin: # uninstall the plugin??? pass else: # create a new one plugin = SearchType.create("config/plugin") plugin.set_value("code", code) # update the information if version: plugin.set_value("version", version) # NOTE: is this really needed? plugin.set_value("manifest", my.manifest) if my.plugin_dir.startswith(my.base_dir): rel_dir = my.plugin_dir.replace(my.base_dir, "") rel_dir = rel_dir.lstrip("/") plugin.set_value("rel_dir", rel_dir) plugin.commit() my.plugin = plugin my.import_manifest(nodes) # Users see Activate in the UI label = mode if mode == 'install': label = 'activate' if code: my.add_description('%s plugin [%s]' % (label.capitalize(), code)) def import_manifest(my, nodes): paths_read = [] for node in nodes: node_name = my.xml.get_node_name(node) if node_name == 'search_type': search_type = my.xml.get_attribute(node, 'code') # implicitly add the entry to the schema table. # Reset the cache every time to ensure that any updates to # the scehma are reflected here. schema = Schema.get(reset_cache=True) xml = schema.get_xml() schema_node = xml.get_node("schema/search_type[@name='%s']" % search_type) parent = xml.get_node("schema") if schema_node == None: schema_node = xml.create_element("search_type") xml.set_attribute(schema_node, "name", search_type) #parent = xml.get_parent(node) xml.append_child(parent, schema_node) schema.set_value('schema', xml.to_string() ) schema.commit() # TODO: connections? path = my.xml.get_attribute(node, "path") if not path: path = "%s.spt" % search_type.replace("/", "_") path = "%s/%s" % (my.plugin_dir, path) if path in paths_read: continue if my.verbose: print "Reading search_type: ", path # NOTE: priviledged knowledge of the order or return values jobs = my.import_data(path, commit=True) paths_read.append(path) if not jobs: continue search_type_obj = jobs[0] if len(jobs) == 1: # only the search type was defined table = None else: table = jobs[1] try: # check to see if the search type already exists search_type_chk = SearchType.get(search_type) if search_type_chk: if my.verbose: print 'WARNING: Search Type [%s] is already registered' % search_type_chk.get_value("search_type") else: search_type_obj.commit() except SearchException, e: if e.__str__().find('not registered') != -1: search_type_obj.commit() # check if table exists has_table = False if has_table: if my.verbose: print 'WARNING: Table [%s] already exists' elif table: #print table.get_statement() if table: database = table.get_database() table_name = table.get_table() TableUndo.log(search_type, database, table_name) elif node_name == 'sobject': path = my.xml.get_attribute(node, "path") search_type = my.xml.get_attribute(node, "search_type") seq_max = my.xml.get_attribute(node, "seq_max") try: if seq_max: seq_max = int(seq_max) except ValueError: seq_max = 0 if not path: if search_type: path = "%s.spt" % search_type.replace("/","_") if not path: raise TacticException("No path specified") path = "%s/%s" % (my.plugin_dir, path) if path in paths_read: continue unique = my.xml.get_attribute(node, "unique") if unique == 'true': unique = True else: unique = False if my.verbose: print "Reading: ", path # jobs doesn't matter for sobject node jobs = my.import_data(path, unique=unique) # compare sequence st_obj = SearchType.get(search_type) SearchType.sequence_nextval(search_type) cur_seq_id = SearchType.sequence_currval(search_type) sql = DbContainer.get("sthpw") if seq_max > 0 and seq_max > cur_seq_id: # TODO: SQL Server - Reseed the sequences instead of passing. if sql.get_database_type() == 'SQLServer': pass else: SearchType.sequence_setval(search_type, seq_max) else: cur_seq_id -= 1 # TODO: SQL Server - Reseed the sequences instead of passing. if sql.get_database_type() == 'SQLServer': pass else: # this is a db requirement if cur_seq_id > 0: SearchType.sequence_setval(search_type, cur_seq_id) paths_read.append(path) elif node_name == 'include': path = my.xml.get_attribute(node, "path") path = "%s/%s" % (my.plugin_dir, path) from tactic.command import PythonCmd cmd = PythonCmd(file_path=path) manifest = cmd.execute() if manifest: xml = Xml() xml.read_string(manifest) nodes = xml.get_nodes("manifest/*") my.import_manifest(nodes)
def execute(my): # get the pipeline pipeline = my.input.get("pipeline") process = my.input.get("process") sobject = my.input.get("sobject") process_obj = pipeline.get_process(process) node_type = process_obj.get_type() #print "action: ", process, node_type if node_type not in ["node", "manual", "approval"]: my.set_all_tasks(sobject, process, "in_progress") # get the node's triggers search = Search("config/process") search.add_filter("process", process) process_sobj = search.get_sobject() triggers = {} if process_sobj: triggers = process_sobj.get_json_value("workflow") if not triggers: triggers = {} process_obj = pipeline.get_process(process) node_type = process_obj.get_type() if node_type == "condition": my.handle_condition_node(sobject, pipeline, process, triggers) elif node_type == "action": action = triggers.get("on_action") cbjs_action = triggers.get("cbjs_action") action_path = triggers.get("on_action_path") kwargs, input = my.build_trigger_input() if action or action_path: if action: cmd = PythonCmd(code=action, input=input, **kwargs) else: cmd = PythonCmd(script_path=script_path, input=input, **kwargs) ret_val = cmd.execute() elif cbjs_action: from tactic.command import JsCmd if action: cmd = JsCmd(code=action, input=input, **kwargs) else: cmd = JsCmd(script_path=script_path, input=input, **kwargs) ret_val = cmd.execute() else: # or call a trigger Trigger.call(my, "process|action", input, process=process_sobj.get_code()) Trigger.call(my, "process|complete", my.input)
def handle_condition_node(my, sobject, pipeline, process, triggers): ret_val = my.run_callback(pipeline, process, "action") # if a None return value was given, then probably no condition exists # yet, so just let if flow through if ret_val == None: ret_val = True # run the completion trigger for this node Trigger.call(my, "process|complete", my.input) if ret_val == True: success_cbk = triggers.get("on_success") if success_cbk: cmd = PythonCmd(code=success_cbk, sobject=sobject) cmd.execute() return else: event = "process|pending" attr = "success" direction = "output" processes = pipeline.get_output_processes(process, from_attr=attr) if not processes: attr = None elif ret_val == False: fail_cbk = triggers.get("on_fail") if fail_cbk: cmd = PythonCmd(code=fail_cbk, sobject=sobject) cmd.execute() return else: event = "process|revise" # check to see if there is an output process attr = "fail" processes = pipeline.get_output_processes(process, from_attr=attr) if processes: direction = "output" else: direction = "input" attr = None else: event = "process|pending" if isinstance(ret_val, basestring): ret_val = [ret_val] output_processes = [] for attr in ret_val: outputs = pipeline.get_output_processes(process, from_attr=attr) if outputs: output_processes.extend(outputs) # if there are no output attrs, then check the node names if not output_processes: outputs = pipeline.get_output_processes(process) for output in outputs: if output.get_name() in ret_val: output_processes.append(output) for output_process in output_processes: output_process_name = output_process.get_name() output = { 'sobject': sobject, 'pipeline': pipeline, 'process': output_process_name, } Trigger.call(my, event, output) return # by default, go back to incoming or outcoming if direction == "input": processes = pipeline.get_input_processes(process, to_attr=attr) else: processes = pipeline.get_output_processes(process, from_attr=attr) for process in processes: process_name = process.get_name() output = { 'sobject': sobject, 'pipeline': pipeline, 'process': process_name, } Trigger.call(my, event, output)
def get_from_db_naming(self, protocol): project_code = Project.get_project_code() if project_code in ["admin", "sthpw"]: return None # get the naming object naming = Naming.get(self.sobject, self.snapshot) if not naming: return None if not self.verify_checkin_type(naming): return None if protocol == 'sandbox': mode = 'sandbox_dir' else: mode = 'dir' # Provide a mechanism for a custom class naming_class = naming.get_value("class_name", no_exception=True) #naming_class = "pyasm.biz.TestFileNaming" if naming_class: kwargs = { 'sobject': self.sobject, 'snapshot': self.snapshot, 'file_object': self._file_object, #'ext': self.get_ext(), 'file_type': self.file_type, 'mode': mode } naming = Common.create_from_class_path(naming_class, [], kwargs) dirname = naming.get_dir() if dirname: return dirname # provide a mechanism for a custom client side script script_path = naming.get_value("script_path", no_exception=True) if script_path: project_code = self.sobject.get_project_code() input = { 'sobject': self.sobject, 'snapshot': self.snapshot, 'file_object': self._file_object, #'ext': self.get_ext(), 'file_type': self.file_type, 'mode': mode, 'project': project_code } from tactic.command import PythonCmd cmd = PythonCmd(script_path=script_path, input=input) results = cmd.execute() if results: return results naming_util = NamingUtil() naming_expr = '' if protocol == 'sandbox': naming_expr = naming.get_value("sandbox_dir_naming") if not naming_expr: naming_expr = naming.get_value("dir_naming") # so it can take the default if not naming_expr: return None file_type = self.get_file_type() alias = naming.get_value("base_dir_alias", no_exception=True) # build the dir name dir_name = naming_util.naming_to_dir(naming_expr, self.sobject, self.snapshot, file=self._file_object, file_type=file_type) return dir_name
def process_path(self, path, pattern, checkin_type): is_matched = self.process_pattern(path, pattern) if not is_matched: return status = self.kwargs.get("mode") action_type = self.kwargs.get("action_type") if action_type == 'ignore': print "ignoring: ", path (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) = os.stat(path) self.data[path] = { 'size': size, 'ctime': ctime, 'status': status } return search_key = self.kwargs.get("search_key") search_type = self.kwargs.get("search_type") # create an sobject to store data if search_key: self.sobject = Search.get_by_search_key(search_key) self.parent = self.sobject.get_parent() elif search_type: code = self.sobject_tags.get("code") id = self.sobject_tags.get("id") if code: self.sobject = Search.get_by_code(search_type, code) elif id: self.sobject = Search.get_by_id(search_type, code) else: self.sobject = None if not self.sobject: # create a new sobject self.sobject = SearchType.create(search_type) self.parent = None # create a snapshot to store snapshot data snapshot = SearchType.create("sthpw/snapshot") # extras keywords = [] search_type_sobj = SearchType.get(search_type) # create the new sobject for name, value in self.sobject_tags.items(): if search_type_sobj.column_exists(name): self.sobject.set_value(name, value) keywords.append(value) extra_names = self.kwargs.get("extra_name") extra_values = self.kwargs.get("extra_value") for name, value in zip(extra_names, extra_values): if not name: continue if search_type_sobj.column_exists(name): self.sobject.set_value(name, value) keywords.append(value) extra_keywords = self.kwargs.get("keywords") if extra_keywords: extra_keywords = extra_keywords.split(",") for k in extra_keywords: keywords.append(k) #has_keyword = True #if has_keyword: if search_type_sobj.column_exists('keywords'): keywords = " ".join(keywords) self.sobject.set_value("keywords", keywords) self.validation_script = self.kwargs.get("validation_script") if self.validation_script: from tactic.command import PythonCmd input = { "path": path, "sobject": self.sobject, "parent": self.parent, "snapshot": snapshot } validation_cmd = PythonCmd(script_path=self.validation_script, input=input, IngestException=IngestException) try: result = validation_cmd.execute() except IngestException as e: print e.message result = None except Exception as e: print "ERROR: ", e.message result = None if not result: self.paths_invalid.append(path) return # handle metadata for the files # FIXME: this should go directly onto the snapshot (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) = os.stat(path) metadata = snapshot.get_value("metadata") if not metadata: metadata = {} else: metadata = jsonloads(metadata) metadata['size'] = size metadata['ctime'] = ctime metadata['mtime'] = mtime # add metadata if there is any self.sobject_tags['metadata'] = metadata mode = self.kwargs.get("mode") self.data[path] = { 'size': size, 'ctime': ctime, 'status': mode } # skip if no action takes place if mode == 'scan': return # TODO: not sure if we need to make the distinction between # a between a process script and a validation script. They # basically do the same thing??? self.process_script = self.kwargs.get("process_script") if self.process_script: from tactic.command import PythonCmd input = { "path": path, "sobject": self.sobject, "parent": self.parent, "snapshot": snapshot } process_cmd = PythonCmd(script_path=self.process_script, input=input) result = process_cmd.execute() if not result: return #print "keywords: ", keywords # # action # # first commit the data that has changed to the sobject self.sobject.commit() # check in the files to the new sobject context = self.snapshot_tags.get("context") if not context: context = "publish" #checkin.execute() from tactic_client_lib import TacticServerStub server = TacticServerStub.get(protocol='local') if checkin_type == "directory": print "dir checkin: ", self.sobject_tags.get("code"), context, path server.directory_checkin( self.sobject.get_search_key(), context, path, mode='copy') elif checkin_type == "file": # use the client api snapshot = server.simple_checkin( self.sobject.get_search_key(), context, path, mode='copy', metadata=metadata) """
def get_from_db_naming(my, search_type): project_code = Project.get_project_code() if project_code in ["admin", "sthpw"]: return "" file_type = my.get_file_type() filename = my.file_object.get_full_file_name() naming = Naming.get(my.sobject, my.snapshot, file_path=filename) if not naming: return None if naming and my.checkin_type: checkin_type = naming.get_value('checkin_type') if checkin_type and my.checkin_type != checkin_type: print "mismatched checkin_type!" naming = None return None naming_util = NamingUtil() # Provide a mechanism for a custom class naming_class = naming.get_value("class_name", no_exception=True) if naming_class: kwargs = { 'sobject': my.sobject, 'snapshot': my.snapshot, 'file_object': my.file_object, 'ext': my.get_ext(), 'mode': 'file' } naming = Common.create_from_class_path(naming_class, kwargs) filename = naming.get_file() if filename: return filename # provide a mechanism for a custom client side script script_path = naming.get_value("script_path", no_exception=True) if script_path: project_code = my.sobject.get_project_code() input = { 'sobject': my.sobject, 'snapshot': my.snapshot, 'file_object': my.file_object, 'ext': my.get_ext(), 'mode': 'file', 'project': project_code } from tactic.command import PythonCmd cmd = PythonCmd(script_path=script_path, input=input) results = cmd.execute() if results: return results naming_value = naming.get_value("file_naming") if not naming_value: is_versionless = naming.get_value("latest_versionless") or naming.get_value("current_versionless") if not is_versionless: return "" # FIXME: # if this is a versionless naming, then empty uses a default # This is put here because the check-in type is determined by the # naming here. Normally, this is passed through with "naming_expr" # but in snapshot.py, it is not yet known that this is an "auto" # checkin_type because it is defined in the naming and not the # process server = Config.get_value("install", "server") if server: naming_value= "{basefile}_{snapshot.process}_%s.{ext}" % server else: naming_value = "{basefile}_{snapshot.process}.{ext}" # check for manual_version manual_version = naming.get_value('manual_version') if manual_version == True: # if the file version is not the same as the snapshot version # then check to see if the snapshot already exists filename = my.file_object.get_full_file_name() version = my.get_version_from_file_name(filename) context = my.snapshot.get_context() if version > 0 and version != my.snapshot.get_value("version"): existing_snap = Snapshot.get_snapshot(\ my.snapshot.get_value("search_type"),\ my.snapshot.get_value("search_id"), context=context, \ version=version, show_retired=True) if existing_snap: raise TacticException('You have chosen manual version in Naming for this SObject. A snapshot with context "%s" and version "%s" already exists.' % (context, version) ) my.snapshot.set_value("version", version) my.snapshot.commit() file_type = my.get_file_type() return naming_util.naming_to_file(naming_value, my.sobject,my.snapshot,my.file_object,ext=my.get_ext(),file_type=file_type)
def execute(my): file_path = my.kwargs.get("path") project_code = my.kwargs.get("project_code") base_dir = my.kwargs.get("base_dir") search_type = my.kwargs.get("search_type") process = my.kwargs.get("process") watch_script_path = my.kwargs.get("script_path") if not process: process = "publish" basename = os.path.basename(file_path) context = my.kwargs.get("context") if not context: context = '%s/%s' % (process, basename) # find the relative_dir and relative_path relative_path = file_path.replace("%s/" % base_dir, "") relative_dir = os.path.dirname(relative_path) file_name = os.path.basename(file_path) log_path = '%s/TACTIC_log.txt' % (base_dir) my.create_checkin_log() # Define asset type of the file asset_type = my.get_asset_type(file_path) description = "drop folder check-in of %s" % file_name from client.tactic_client_lib import TacticServerStub server = TacticServerStub.get(protocol='local') server.set_project(project_code) transaction = Transaction.get(create=True) server.start(title='Check-in of media', description='Check-in of media') server_return_value = {} try: filters = [ ['name', '=', file_name], #[ 'relative_dir', '=', relative_dir ] ] sobj = server.query(search_type, filters=filters, single=True) if not sobj: # create sobject if it does not yet exist sobj = SearchType.create(search_type) if SearchType.column_exists(search_type, "name"): sobj.set_value("name", basename) if SearchType.column_exists(search_type, "media_type"): sobj.set_value("media_type", asset_type) if SearchType.column_exists(search_type, "relative_dir"): sobj.set_value("relative_dir", relative_dir) if SearchType.column_exists(search_type, "keywords"): relative_path = relative_path keywords = Common.get_keywords_from_path(relative_path) keywords = " ".join(keywords) sobj.set_value("keywords", keywords) sobj.commit() search_key = sobj.get_search_key() else: search_key = sobj.get("__search_key__") #task = server.create_task(sobj.get('__search_key__'),process='publish') #server.update(task, {'status': 'New'}) server_return_value = server.simple_checkin( search_key, context, file_path, description=description, mode='move') if watch_script_path: cmd = PythonCmd(script_path=watch_script_path, search_type=search_type, drop_path=file_path, search_key=search_key) cmd.execute() except Exception, e: print "Error occurred", e error_message = str(e) import traceback tb = sys.exc_info()[2] stacktrace = traceback.format_tb(tb) stacktrace_str = "".join(stacktrace) print "-" * 50 print stacktrace_str version_num = 'Error:' system_time = strftime("%Y/%m/%d %H:%M", gmtime()) pre_log=file_name+(50-len(file_name))*' '+system_time+(33-len(system_time))*' '+version_num+(15-len(version_num))*' ' +error_message+'\n'\ + stacktrace_str + '\n' + watch_script_path # Write data into TACTIC_log file under /tmp/drop f = open(log_path, 'a') f.write(pre_log) f.close() #server.abort() transaction.rollback() raise
def get_from_db_naming(my, search_type): project_code = Project.get_project_code() if project_code in ["admin", "sthpw"]: return "" file_type = my.get_file_type() filename = my.file_object.get_full_file_name() naming = Naming.get(my.sobject, my.snapshot, file_path=filename) if not naming: return None if naming and my.checkin_type: checkin_type = naming.get_value('checkin_type') if checkin_type and my.checkin_type != checkin_type: print "mismatched checkin_type!" naming = None return None naming_util = NamingUtil() # Provide a mechanism for a custom class naming_class = naming.get_value("class_name", no_exception=True) if naming_class: kwargs = { 'sobject': my.sobject, 'snapshot': my.snapshot, 'file_object': my.file_object, 'ext': my.get_ext(), 'mode': 'file' } naming = Common.create_from_class_path(naming_class, kwargs) filename = naming.get_file() if filename: return filename # provide a mechanism for a custom client side script script_path = naming.get_value("script_path", no_exception=True) if script_path: project_code = my.sobject.get_project_code() input = { 'sobject': my.sobject, 'snapshot': my.snapshot, 'file_object': my.file_object, 'ext': my.get_ext(), 'mode': 'file', 'project': project_code } from tactic.command import PythonCmd cmd = PythonCmd(script_path=script_path, input=input) results = cmd.execute() if results: return results naming_value = naming.get_value("file_naming") if not naming_value: is_versionless = naming.get_value( "latest_versionless") or naming.get_value( "current_versionless") if not is_versionless: return "" # FIXME: # if this is a versionless naming, then empty uses a default # This is put here because the check-in type is determined by the # naming here. Normally, this is passed through with "naming_expr" # but in snapshot.py, it is not yet known that this is an "auto" # checkin_type because it is defined in the naming and not the # process server = Config.get_value("install", "server") if server: naming_value = "{basefile}_{snapshot.process}_%s.{ext}" % server else: naming_value = "{basefile}_{snapshot.process}.{ext}" # check for manual_version manual_version = naming.get_value('manual_version') if manual_version == True: # if the file version is not the same as the snapshot version # then check to see if the snapshot already exists filename = my.file_object.get_full_file_name() version = my.get_version_from_file_name(filename) context = my.snapshot.get_context() if version > 0 and version != my.snapshot.get_value("version"): existing_snap = Snapshot.get_snapshot(\ my.snapshot.get_value("search_type"),\ my.snapshot.get_value("search_id"), context=context, \ version=version, show_retired=True) if existing_snap: raise TacticException( 'You have chosen manual version in Naming for this SObject. A snapshot with context "%s" and version "%s" already exists.' % (context, version)) my.snapshot.set_value("version", version) my.snapshot.commit() file_type = my.get_file_type() return naming_util.naming_to_file(naming_value, my.sobject, my.snapshot, my.file_object, ext=my.get_ext(), file_type=file_type)
def execute(self): file_path = self.kwargs.get("path") site = self.kwargs.get("site") project_code = self.kwargs.get("project_code") base_dir = self.kwargs.get("base_dir") search_type = self.kwargs.get("search_type") process = self.kwargs.get("process") watch_script_path = self.kwargs.get("script_path") if not process: process = "publish" basename = os.path.basename(file_path) context = self.kwargs.get("context") if not context: context = '%s/%s' % (process, basename) # find the relative_dir and relative_path relative_path = file_path.replace("%s/" % base_dir, "") relative_dir = os.path.dirname(relative_path) file_name = os.path.basename(file_path) log_path = '%s/TACTIC_log.txt' %(base_dir) self.create_checkin_log() # Define asset type of the file asset_type = self.get_asset_type(file_path) description = "drop folder check-in of %s" %file_name from client.tactic_client_lib import TacticServerStub server = TacticServerStub.get(protocol='local') server.set_project(project_code) transaction = Transaction.get(create=True) server.start(title='Check-in of media', description='Check-in of media') server_return_value = {} try: filters = [ [ 'name', '=', file_name ], #[ 'relative_dir', '=', relative_dir ] ] sobj = server.query(search_type, filters=filters, single=True) if not sobj: # create sobject if it does not yet exist sobj = SearchType.create(search_type) if SearchType.column_exists(search_type, "name"): sobj.set_value("name", basename) if SearchType.column_exists(search_type, "media_type"): sobj.set_value("media_type", asset_type) if SearchType.column_exists(search_type, "relative_dir"): sobj.set_value("relative_dir", relative_dir) if SearchType.column_exists(search_type, "keywords"): relative_path = relative_path keywords = Common.extract_keywords_from_path(relative_path) keywords = " ".join( keywords ) sobj.set_value("keywords", keywords) sobj.commit() search_key = sobj.get_search_key() else: search_key = sobj.get("__search_key__") #task = server.create_task(sobj.get('__search_key__'),process='publish') #server.update(task, {'status': 'New'}) """ #TEST: simulate different check-in duration from random import randint sec = randint(1, 5) print "checking in for ", sec, "sec" server.eval("@SOBJECT(sthpw/login)") import shutil dir_name,base_name = os.path.split(file_path) dest_dir = 'C:/ProgramData/Southpaw/watch_temp' if not os.path.exists(dest_dir): os.makedirs(dest_dir) shutil.move(file_path, '%s/%s'%(dest_dir, base_name)) time.sleep(sec) # move back the file in a few seconds shutil.move('%s/%s'%(dest_dir, base_name), file_path) """ server_return_value = server.simple_checkin(search_key, context, file_path, description=description, mode='move') if watch_script_path: cmd = PythonCmd(script_path=watch_script_path,search_type=search_type,drop_path=file_path,search_key=search_key) cmd.execute() except Exception as e: print "Error occurred", e error_message=str(e) import traceback tb = sys.exc_info()[2] stacktrace = traceback.format_tb(tb) stacktrace_str = "".join(stacktrace) print "-"*50 print stacktrace_str version_num='Error:' system_time=strftime("%Y/%m/%d %H:%M", gmtime()) pre_log=file_name+(50-len(file_name))*' '+system_time+(33-len(system_time))*' '+version_num+(15-len(version_num))*' ' +error_message+'\n'\ + stacktrace_str + '\n' + watch_script_path # Write data into TACTIC_log file under /tmp/drop f = open(log_path, 'a') f.write(pre_log) f.close() #server.abort() transaction.rollback() raise else: transaction.commit() #server.finish() if server_return_value: # Create the TACTIC_log file to record every check-in. # Search for all required data checkin_time=server_return_value.get('timestamp') version_nu=server_return_value.get('version') version_num=str(version_nu) try: value = parser.parse(checkin_time) value = value.strftime("%Y/%m/%d %H:%M") except: value = checkin_time pre_log=file_name+(50-len(file_name))*' '+value+(33-len(value))*' '+version_num+(15-len(version_num))*' ' +'ok\n' # Write data into TACTIC_log file under /tmp/drop f = open(log_path, 'a') f.write(pre_log) f.close() # Invoke Trigger:called_triggers from pyasm.command import Trigger Trigger.call_all_triggers() # Delete the source file after check-in step. print "File handled." if os.path.exists(file_path): if os.path.isdir(file_path): os.rmdirs(file_path) else: os.unlink(file_path) print "Source file [%s] deleted: " %file_name
return # TODO: not sure if we need to make the distinction between # a between a process script and a validation script. They # basically do the same thing??? my.process_script = my.kwargs.get("process_script") if my.process_script: from tactic.command import PythonCmd input = { "path": path, "sobject": my.sobject, "parent": my.parent, "snapshot": snapshot } process_cmd = PythonCmd(script_path=my.process_script, input=input) result = process_cmd.execute() if not result: return #print "keywords: ", keywords # # action #
def process_path(my, path, pattern, checkin_type): is_matched = my.process_pattern(path, pattern) if not is_matched: return status = my.kwargs.get("mode") action_type = my.kwargs.get("action_type") if action_type == 'ignore': print "ignoring: ", path (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) = os.stat(path) my.data[path] = { 'size': size, 'ctime': ctime, 'status': status } return search_key = my.kwargs.get("search_key") search_type = my.kwargs.get("search_type") # create an sobject to store data if search_key: my.sobject = Search.get_by_search_key(search_key) my.parent = my.sobject.get_parent() elif search_type: code = my.sobject_tags.get("code") id = my.sobject_tags.get("id") if code: my.sobject = Search.get_by_code(search_type, code) elif id: my.sobject = Search.get_by_id(search_type, code) else: my.sobject = None if not my.sobject: # create a new sobject my.sobject = SearchType.create(search_type) my.parent = None # create a snapshot to store snapshot data snapshot = SearchType.create("sthpw/snapshot") # extras keywords = [] search_type_sobj = SearchType.get(search_type) # create the new sobject for name, value in my.sobject_tags.items(): if search_type_sobj.column_exists(name): my.sobject.set_value(name, value) keywords.append(value) extra_names = my.kwargs.get("extra_name") extra_values = my.kwargs.get("extra_value") for name, value in zip(extra_names, extra_values): if not name: continue if search_type_sobj.column_exists(name): my.sobject.set_value(name, value) keywords.append(value) extra_keywords = my.kwargs.get("keywords") if extra_keywords: extra_keywords = extra_keywords.split(",") for k in extra_keywords: keywords.append(k) #has_keyword = True #if has_keyword: if search_type_sobj.column_exists('keywords'): keywords = " ".join(keywords) my.sobject.set_value("keywords", keywords) my.validation_script = my.kwargs.get("validation_script") if my.validation_script: from tactic.command import PythonCmd input = { "path": path, "sobject": my.sobject, "parent": my.parent, "snapshot": snapshot } validation_cmd = PythonCmd(script_path=my.validation_script, input=input, IngestException=IngestException) try: result = validation_cmd.execute() except IngestException, e: print e.message result = None except Exception, e: print "ERROR: ", e.message result = None
def get_from_db_naming(my, protocol): project_code = Project.get_project_code() if project_code in ["admin", "sthpw"]: return None # get the naming object naming = Naming.get(my.sobject, my.snapshot) if not naming: return None if protocol == 'sandbox': mode = 'sandbox_dir' else: mode = 'dir' # Provide a mechanism for a custom class naming_class = naming.get_value("class_name", no_exception=True) #naming_class = "pyasm.biz.TestFileNaming" if naming_class: kwargs = { 'sobject': my.sobject, 'snapshot': my.snapshot, 'file_object': my._file_object, #'ext': my.get_ext(), 'file_type': my.file_type, 'mode': mode } naming = Common.create_from_class_path(naming_class, [], kwargs) dirname = naming.get_dir() if dirname: return dirname # provide a mechanism for a custom client side script script_path = naming.get_value("script_path", no_exception=True) if script_path: project_code = my.sobject.get_project_code() input = { 'sobject': my.sobject, 'snapshot': my.snapshot, 'file_object': my._file_object, #'ext': my.get_ext(), 'file_type': my.file_type, 'mode': mode, 'project': project_code } from tactic.command import PythonCmd cmd = PythonCmd(script_path=script_path, input=input) results = cmd.execute() if results: return results naming_util = NamingUtil() naming_expr = '' if protocol == 'sandbox': naming_expr = naming.get_value("sandbox_dir_naming") if not naming_expr: naming_expr = naming.get_value("dir_naming") # so it can take the default if not naming_expr: return None file_type = my.get_file_type() # build the dir name dir_name = naming_util.naming_to_dir(naming_expr, my.sobject, my.snapshot, file=my._file_object, file_type=file_type) return dir_name
def execute(self): file_path = self.kwargs.get("path") site = self.kwargs.get("site") project_code = self.kwargs.get("project_code") base_dir = self.kwargs.get("base_dir") search_type = self.kwargs.get("search_type") process = self.kwargs.get("process") watch_script_path = self.kwargs.get("script_path") if not process: process = "publish" basename = os.path.basename(file_path) context = self.kwargs.get("context") if not context: context = '%s/%s' % (process, basename) # find the relative_dir and relative_path relative_path = file_path.replace("%s/" % base_dir, "") relative_dir = os.path.dirname(relative_path) file_name = os.path.basename(file_path) log_path = '%s/TACTIC_log.txt' % (base_dir) self.create_checkin_log() # Define asset type of the file asset_type = self.get_asset_type(file_path) description = "drop folder check-in of %s" % file_name from client.tactic_client_lib import TacticServerStub server = TacticServerStub.get(protocol='local') server.set_project(project_code) transaction = Transaction.get(create=True) server.start(title='Check-in of media', description='Check-in of media') server_return_value = {} try: filters = [ ['name', '=', file_name], #[ 'relative_dir', '=', relative_dir ] ] sobj = server.query(search_type, filters=filters, single=True) if not sobj: # create sobject if it does not yet exist sobj = SearchType.create(search_type) if SearchType.column_exists(search_type, "name"): sobj.set_value("name", basename) if SearchType.column_exists(search_type, "media_type"): sobj.set_value("media_type", asset_type) if SearchType.column_exists(search_type, "relative_dir"): sobj.set_value("relative_dir", relative_dir) if SearchType.column_exists(search_type, "keywords"): relative_path = relative_path keywords = Common.extract_keywords_from_path(relative_path) keywords = " ".join(keywords) sobj.set_value("keywords", keywords) sobj.commit() search_key = sobj.get_search_key() else: search_key = sobj.get("__search_key__") #task = server.create_task(sobj.get('__search_key__'),process='publish') #server.update(task, {'status': 'New'}) """ #TEST: simulate different check-in duration from random import randint sec = randint(1, 5) print "checking in for ", sec, "sec" server.eval("@SOBJECT(sthpw/login)") import shutil dir_name,base_name = os.path.split(file_path) dest_dir = 'C:/ProgramData/Southpaw/watch_temp' if not os.path.exists(dest_dir): os.makedirs(dest_dir) shutil.move(file_path, '%s/%s'%(dest_dir, base_name)) time.sleep(sec) # move back the file in a few seconds shutil.move('%s/%s'%(dest_dir, base_name), file_path) """ server_return_value = server.simple_checkin( search_key, context, file_path, description=description, mode='move') if watch_script_path: cmd = PythonCmd(script_path=watch_script_path, search_type=search_type, drop_path=file_path, search_key=search_key) cmd.execute() except Exception as e: print "Error occurred", e error_message = str(e) import traceback tb = sys.exc_info()[2] stacktrace = traceback.format_tb(tb) stacktrace_str = "".join(stacktrace) print "-" * 50 print stacktrace_str version_num = 'Error:' system_time = strftime("%Y/%m/%d %H:%M", gmtime()) pre_log=file_name+(50-len(file_name))*' '+system_time+(33-len(system_time))*' '+version_num+(15-len(version_num))*' ' +error_message+'\n'\ + stacktrace_str + '\n' + watch_script_path # Write data into TACTIC_log file under /tmp/drop f = open(log_path, 'a') f.write(pre_log) f.close() #server.abort() transaction.rollback() raise else: transaction.commit() #server.finish() if server_return_value: # Create the TACTIC_log file to record every check-in. # Search for all required data checkin_time = server_return_value.get('timestamp') version_nu = server_return_value.get('version') version_num = str(version_nu) try: value = parser.parse(checkin_time) value = value.strftime("%Y/%m/%d %H:%M") except: value = checkin_time pre_log = file_name + (50 - len(file_name)) * ' ' + value + ( 33 - len(value)) * ' ' + version_num + ( 15 - len(version_num)) * ' ' + 'ok\n' # Write data into TACTIC_log file under /tmp/drop f = open(log_path, 'a') f.write(pre_log) f.close() # Invoke Trigger:called_triggers from pyasm.command import Trigger Trigger.call_all_triggers() # Delete the source file after check-in step. print "File handled." if os.path.exists(file_path): if os.path.isdir(file_path): os.rmdirs(file_path) else: os.unlink(file_path) print "Source file [%s] deleted: " % file_name
def execute(my): file_path = my.kwargs.get("path") project_code = my.kwargs.get("project_code") base_dir = my.kwargs.get("base_dir") search_type = my.kwargs.get("search_type") process = my.kwargs.get("process") watch_script_path = my.kwargs.get("script_path") if not process: process = "publish" basename = os.path.basename(file_path) context = my.kwargs.get("context") if not context: context = '%s/%s' % (process, basename) # find the relative_dir and relative_path relative_path = file_path.replace("%s/" % base_dir, "") relative_dir = os.path.dirname(relative_path) file_name = os.path.basename(file_path) log_path = '%s/TACTIC_log.txt' %(base_dir) my.create_checkin_log() # Define asset type of the file asset_type = my.get_asset_type(file_path) description = "drop folder check-in of %s" %file_name from client.tactic_client_lib import TacticServerStub server = TacticServerStub.get(protocol='local') server.set_project(project_code) transaction = Transaction.get(create=True) server.start(title='Check-in of media', description='Check-in of media') server_return_value = {} try: filters = [ [ 'name', '=', file_name ], #[ 'relative_dir', '=', relative_dir ] ] sobj = server.query(search_type, filters=filters, single=True) if not sobj: # create sobject if it does not yet exist sobj = SearchType.create(search_type) if SearchType.column_exists(search_type, "name"): sobj.set_value("name", basename) if SearchType.column_exists(search_type, "media_type"): sobj.set_value("media_type", asset_type) if SearchType.column_exists(search_type, "relative_dir"): sobj.set_value("relative_dir", relative_dir) if SearchType.column_exists(search_type, "keywords"): relative_path = relative_path keywords = Common.get_keywords_from_path(relative_path) keywords = " ".join( keywords ) sobj.set_value("keywords", keywords) sobj.commit() search_key = sobj.get_search_key() else: search_key = sobj.get("__search_key__") #task = server.create_task(sobj.get('__search_key__'),process='publish') #server.update(task, {'status': 'New'}) server_return_value = server.simple_checkin(search_key, context, file_path, description=description, mode='move') if watch_script_path: cmd = PythonCmd(script_path=watch_script_path,search_type=search_type,drop_path=file_path,search_key=search_key) cmd.execute() except Exception, e: print "Error occurred", e error_message=str(e) import traceback tb = sys.exc_info()[2] stacktrace = traceback.format_tb(tb) stacktrace_str = "".join(stacktrace) print "-"*50 print stacktrace_str version_num='Error:' system_time=strftime("%Y/%m/%d %H:%M", gmtime()) pre_log=file_name+(50-len(file_name))*' '+system_time+(33-len(system_time))*' '+version_num+(15-len(version_num))*' ' +error_message+'\n'\ + stacktrace_str + '\n' + watch_script_path # Write data into TACTIC_log file under /tmp/drop f = open(log_path, 'a') f.write(pre_log) f.close() #server.abort() transaction.rollback() raise