def _test_csv_export(self): from tactic.ui.widget import CsvExportWdg view = 'table' search_type = 'sthpw/task' search_view = 'auto_search:table' #search_view = '' simple_search_view = 'simple_search' search_class = '' mode = 'export_matched' element_name = 'project_tasks' filter = [{ "prefix": "main_body", "main_body_enabled": "on", "main_body_column": "project_code", "main_body_relation": "is", "main_body_value": "{$PROJECT}" }, { "prefix": "main_body", "main_body_enabled": "on", "main_body_column": "search_type", "main_body_relation": "is not", "main_body_value": "sthpw/project" }] from pyasm.common import jsondumps, jsonloads values = {'json': jsondumps(filter)} element_names = ['code', 'id', 'description'] server = TacticServerStub(protocol='xmlrpc') current_project = 'vfx' server.set_project(current_project) rtn = server.get_widget('tactic.ui.widget.CsvExportWdg', args={'search_type':search_type, 'view':view,\ 'filter': filter, 'element_name': element_name, 'show_search_limit':'false', 'search_limit':-1, 'search_view':search_view, \ 'element_names': element_names, 'mode':mode, 'search_class':search_class, 'simple_search_view':simple_search_view,\ 'init_load_num':-1, 'test':True}, values=values ) expected_columns = ['code', 'id', 'description'] expected_sql = '''SELECT "sthpw"."public"."task".* FROM "sthpw"."public"."task" WHERE ( "task"."project_code" = \'%s\' AND ( "task"."search_type" != \'sthpw/project\' OR "task"."search_type" is NULL ) ) AND ("task"."s_status" != \'retired\' or "task"."s_status" is NULL) AND ("task"."s_status" != \'retired\' or "task"."s_status" is NULL) AND ("task"."s_status" != \'retired\' or "task"."s_status" is NULL) ORDER BY "task"."search_type", "task"."search_code"''' % current_project expr = "@COUNT(sthpw/task['project_code','%s']['search_type','!=','sthpw/project])" % current_project expected_count = Search.eval(expr, single=True) rtn = jsonloads(rtn) self.assertEquals(expected_columns, rtn.get('columns')) self.assertEquals(expected_sql, rtn.get('sql')) self.assertEquals(expected_count, rtn.get('count')) mode = 'export_displayed' selected_search_keys = [ 'sthpw/task?id=4385', 'sthpw/task?id=4386', 'sthpw/task?id=4387' ] rtn = server.get_widget('tactic.ui.widget.CsvExportWdg', args={'search_type':search_type, 'view':view,\ 'filter': filter, 'element_name': element_name, 'show_search_limit':'false', 'search_limit':-1, 'search_view':search_view, \ 'element_names': element_names, 'mode':mode, 'search_class':search_class, 'simple_search_view':simple_search_view,\ 'init_load_num':-1, 'test':True, 'selected_search_keys': selected_search_keys}, values=values ) expected_count = 3 rtn = jsonloads(rtn) self.assertEquals(expected_columns, rtn.get('columns')) self.assertEquals(expected_count, rtn.get('count'))
def __init__(my, data=[]): if not data: my.data = [] elif type(data) in types.StringTypes: try: # optimize the loading of json data json_data = Container.get("json_data") if json_data == None: json_data = {} Container.put("json_data", json_data) my.data = json_data.get(data) if my.data == None: my.data = jsonloads(data) json_data[data] = my.data except ValueError, e: if e.__str__().find('No JSON object') != -1: raise SetupException('Data is not decodable as JSON.') # try a straight eval my.data = eval(data) except Exception, e: if e.__str__().find('cannot parse JSON description') != -1: raise SetupException('Data is not valid JSON.')
def get_display(my): top = my.top # top.add_border() width = my.kwargs.get("width") if not width: width = "50px" height = my.kwargs.get("height") if not height: height = "50px" class_name = my.kwargs.get("class") if class_name: top.add_class(class_name) top.add_style("width: 100%") top.add_border() top.add_color("background", "background3") font_size = my.kwargs.get("font-size") if font_size: top.add_style("font-size: %s" % font_size) css = my.kwargs.get("css") if css: css = jsonloads(css) for name, value in css.items(): top.add_style(name, value) top.add_style("height: %s" % height) top.add_style("width: %s" % width) return top
def get_files(my): paths = [] # remember this here for now my.files = {} my.snapshots = {} search_key = my.kwargs.get("search_key") search_keys = my.kwargs.get("search_keys") if search_key: sobject = SearchKey.get_by_search_key(search_key) my.sobjects = [sobject] if search_keys: if isinstance(search_keys, basestring): search_keys = search_keys.replace("'", '"') search_keys = jsonloads(search_keys) my.sobjects = Search.get_by_search_keys(search_keys) if not my.sobjects: return [] my.sobject = my.sobjects[0] for sobject in my.sobjects: sobject_paths = my.get_sobject_files(sobject) paths.extend(sobject_paths) return paths
def run(code, kwargs): code = jsondumps(code) kwargs = jsondumps(kwargs) install_dir = tacticenv.get_install_dir() cmd = '%s/src/tactic/command/js_cmd.py' % install_dir python_exec = Config.get_value("services", "python") cmd_list = [python_exec, cmd, code, kwargs] import subprocess program = subprocess.Popen(cmd_list, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) ret_val, error = program.communicate() lines = [] start = False for line in ret_val.split("\n") : if line.startswith("~"*20): start = True continue if not start: continue lines.append(line) value = jsonloads("\n".join(lines)) return value
def __init__(self, data=[]): if not data: self.data = [] elif type(data) in types.StringTypes: try: # optimize the loading of json data json_data = Container.get("json_data") if json_data == None: json_data = {} Container.put("json_data", json_data) self.data = json_data.get(data) if self.data == None: self.data = jsonloads(data) json_data[data] = self.data except ValueError, e: if e.__str__().find('No JSON object') != -1: raise SetupException('Data is not decodable as JSON.') # try a straight eval self.data = eval(data) except Exception as e: if e.__str__().find('cannot parse JSON description') != -1: raise SetupException('Data is not valid JSON.')
def send_request(self, url, headers, data={}): ticket = Environment.get_ticket() method = headers.get("Method") if method == 'POST': data['login_ticket'] = ticket import urllib data = urllib.urlencode(data) request = urllib2.Request(url, data) else: url = "%s?login_ticket=%s" % (url, ticket) print("url: ", url) request = urllib2.Request(url) for key, value in headers.items(): request.add_header(key, value) try: response = urllib2.urlopen(request) except Exception as e: # try again print("WARNING: ", e) response = urllib2.urlopen(request) #print(response.info().headers) value = response.read() accept = headers.get("Accept") if accept == "application/json": value = jsonloads(value) return value
def execute(my): plugin = my.sobject web = WebContainer.get_web() value = web.get_form_value( my.get_input_name() ) if not value: return src_search_keys = jsonloads(value) manifest = plugin.get_xml_value("manifest") top_node = manifest.get_node("manifest") for search_key in src_search_keys: sobject = SearchKey.get_by_search_key(search_key) node = manifest.create_element("sobject") # For now, a plugin must contain project specfic entries search_type = sobject.get_base_search_type() code = sobject.get_value("code") manifest.set_attribute(node, "search_type", search_type) manifest.set_attribute(node, "code", code) #search_key = SearchKey.get_by_sobject(sobject) #manifest.set_attribute(node, "search_key", search_key) manifest.append_child(top_node, node) plugin.set_value("manifest", manifest.to_string() ) plugin.commit()
def run(code, kwargs): code = jsondumps(code) kwargs = jsondumps(kwargs) install_dir = tacticenv.get_install_dir() cmd = '%s/src/tactic/command/js_cmd.py' % install_dir python_exec = Config.get_value("services", "python") cmd_list = [python_exec, cmd, code, kwargs] import subprocess program = subprocess.Popen(cmd_list, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) ret_val, error = program.communicate() lines = [] start = False for line in ret_val.split("\n"): if line.startswith("~" * 20): start = True continue if not start: continue lines.append(line) value = jsonloads("\n".join(lines)) return value
def get_input_state(self, sobject, process): pipeline = Pipeline.get_by_sobject(sobject) # use the first input process input_processes = pipeline.get_input_processes(process) if not input_processes: return {} # get the first process for now p = input_processes[0] process_name = p.get_name() key = self.get_state_key(sobject, process_name) from tactic_client_lib import TacticServerStub server = TacticServerStub.get() from pyasm.common import jsonloads message = server.get_message(key) message = message.get("message") if not message: state = {} else: state = jsonloads(message) return state
def get_display(self): top = self.top #top.add_border() width = self.kwargs.get("width") if not width: width = '50px' height = self.kwargs.get("height") if not height: height = '50px' class_name = self.kwargs.get("class") if class_name: top.add_class(class_name) top.add_style("width: 100%") top.add_border() top.add_color("background", "background3") font_size = self.kwargs.get("font-size") if font_size: top.add_style("font-size: %s" % font_size) css = self.kwargs.get("css") if css: css = jsonloads(css) for name, value in css.items(): top.add_style(name, value) top.add_style("height: %s" % height) top.add_style("width: %s" % width) return top
def _test_csv_export(self): from tactic.ui.widget import CsvExportWdg view = 'table' search_type ='sthpw/task' search_view = 'auto_search:table' #search_view = '' simple_search_view = 'simple_search' search_class ='' mode = 'export_matched' element_name= 'project_tasks' filter = [{"prefix":"main_body","main_body_enabled":"on","main_body_column":"project_code","main_body_relation":"is","main_body_value":"{$PROJECT}"}, {"prefix":"main_body","main_body_enabled":"on","main_body_column":"search_type","main_body_relation":"is not","main_body_value":"sthpw/project"}] from pyasm.common import jsondumps, jsonloads values = {'json': jsondumps(filter)} element_names = ['code','id','description'] server = TacticServerStub(protocol='xmlrpc') current_project = 'vfx' server.set_project(current_project) rtn = server.get_widget('tactic.ui.widget.CsvExportWdg', args={'search_type':search_type, 'view':view,\ 'filter': filter, 'element_name': element_name, 'show_search_limit':'false', 'search_limit':-1, 'search_view':search_view, \ 'element_names': element_names, 'mode':mode, 'search_class':search_class, 'simple_search_view':simple_search_view,\ 'init_load_num':-1, 'test':True}, values=values ) expected_columns = ['code','id','description'] expected_sql = '''SELECT "sthpw"."public"."task".* FROM "sthpw"."public"."task" WHERE ( "task"."project_code" = \'%s\' AND ( "task"."search_type" != \'sthpw/project\' OR "task"."search_type" is NULL ) ) AND ("task"."s_status" != \'retired\' or "task"."s_status" is NULL) AND ("task"."s_status" != \'retired\' or "task"."s_status" is NULL) AND ("task"."s_status" != \'retired\' or "task"."s_status" is NULL) ORDER BY "task"."search_type", "task"."search_code"'''%current_project expr = "@COUNT(sthpw/task['project_code','%s']['search_type','!=','sthpw/project])"%current_project expected_count = Search.eval(expr, single=True) rtn = jsonloads(rtn) self.assertEquals(expected_columns, rtn.get('columns')) self.assertEquals(expected_sql, rtn.get('sql')) self.assertEquals(expected_count, rtn.get('count')) mode = 'export_displayed' selected_search_keys = ['sthpw/task?id=4385','sthpw/task?id=4386','sthpw/task?id=4387'] rtn = server.get_widget('tactic.ui.widget.CsvExportWdg', args={'search_type':search_type, 'view':view,\ 'filter': filter, 'element_name': element_name, 'show_search_limit':'false', 'search_limit':-1, 'search_view':search_view, \ 'element_names': element_names, 'mode':mode, 'search_class':search_class, 'simple_search_view':simple_search_view,\ 'init_load_num':-1, 'test':True, 'selected_search_keys': selected_search_keys}, values=values ) expected_count = 3 rtn = jsonloads(rtn) self.assertEquals(expected_columns, rtn.get('columns')) self.assertEquals(expected_count, rtn.get('count'))
def add_data(self, data): '''add data dictionary.''' if type(data) in types.StringTypes: try: data = jsonloads(data) except ValueError, e: if e.__str__().find('No JSON object') != -1: raise SetupException('Data is not decodable as JSON.') # try a straight eval data = eval(data)
class RestTest(unittest.TestCase): def test_all(my): test_env = UnittestEnvironment() test_env.create() try: my._setup() print print print my._test_accept() my._test_method() my._test_custom_handler() print print print finally: test_env.delete() def send_request(my, url, headers, data={}): ticket = Environment.get_ticket() method = headers.get("Method") if method == 'POST': data['login_ticket'] = ticket import urllib data = urllib.urlencode(data) request = urllib2.Request(url, data) else: url = "%s?login_ticket=%s" % (url, ticket) request = urllib2.Request(url) for key, value in headers.items(): request.add_header(key, value) try: response = urllib2.urlopen(request) except Exception, e: # try again print "WARNING: ", e response = urllib2.urlopen(request) #print response.info().headers value = response.read() accept = headers.get("Accept") if accept == "application/json": value = jsonloads(value) return value
def add_data(my, data): '''add data dictionary.''' if type(data) in types.StringTypes: try: data = jsonloads(data) except ValueError, e: if e.__str__().find('No JSON object') != -1: raise SetupException('Data is not decodable as JSON.') # try a straight eval data = eval(data)
def preprocess(self): self.options = self.get_option('options') if self.options: try: self.group_list = jsonloads(self.options) except: self.group_list = [{'label': 'Syntax Error', 'context':[]}] else: self.group_list = [{'label':'default', 'context': []}] super(TaskGroupCompletionWdg, self).preprocess()
def preprocess(my): my.options = my.get_option('options') if my.options: try: my.group_list = jsonloads(my.options) except: my.group_list = [{'label': 'Syntax Error', 'context': []}] else: my.group_list = [{'label': 'default', 'context': []}] super(TaskGroupCompletionWdg, my).preprocess()
def preprocess(my): my.options = my.get_option('options') if my.options: try: my.group_list = jsonloads(my.options) except: my.group_list = [{'label': 'Syntax Error', 'context':[]}] else: my.group_list = [{'label':'default', 'context': []}] super(TaskGroupCompletionWdg, my).preprocess()
def execute(self): web = WebContainer.get_web() value = web.get_form_value( self.get_input_name() ) if not value: value = self.get_data() if not value: return src_search_keys = jsonloads(value) #print "xxx: ", type(src_search_keys), src_search_keys # get all fo the sobjects from the search keys #src_sobjects = SearchKey.get_by_search_keys(src_search_keys) instance_type = self.get_option("instance_type") # path is used for self-relating in an instance table src_path = self.get_option("path") src_sobjects = [] src_instances = [] for src_search_key in src_search_keys: src_sobject = SearchKey.get_by_search_key(src_search_key) if src_sobject.get_base_search_type() == instance_type: src_instances.append(src_sobject) else: src_sobjects.append(src_sobject) dst_sobject = self.sobject # get all of the current instances and see if any were removed instances = dst_sobject.get_related_sobjects(instance_type) for instance in instances: exists = False for src_instance in src_instances: if src_instance.get_search_key() == instance.get_search_key(): exists = True if not exists: instance.delete() # add all the new sobjects for src_sobject in src_sobjects: instance = SearchType.create(instance_type) instance.add_related_connection(src_sobject, dst_sobject, src_path=src_path) instance.commit()
def execute(my): web = WebContainer.get_web() value = web.get_form_value( my.get_input_name() ) if not value: value = my.get_data() if not value: return src_search_keys = jsonloads(value) #print "xxx: ", type(src_search_keys), src_search_keys # get all fo the sobjects from the search keys #src_sobjects = SearchKey.get_by_search_keys(src_search_keys) instance_type = my.get_option("instance_type") # path is used for self-relating in an instance table src_path = my.get_option("path") src_sobjects = [] src_instances = [] for src_search_key in src_search_keys: src_sobject = SearchKey.get_by_search_key(src_search_key) if src_sobject.get_base_search_type() == instance_type: src_instances.append(src_sobject) else: src_sobjects.append(src_sobject) dst_sobject = my.sobject # get all of the current instances and see if any were removed instances = dst_sobject.get_related_sobjects(instance_type) for instance in instances: exists = False for src_instance in src_instances: if src_instance.get_search_key() == instance.get_search_key(): exists = True if not exists: instance.delete() # add all the new sobjects for src_sobject in src_sobjects: instance = SearchType.create(instance_type) instance.add_related_connection(src_sobject, dst_sobject, src_path=src_path) instance.commit()
def execute(self, func_name, args=[], kwargs={}): server = TacticServerStub.get() if args: args = jsonloads(args) if kwargs: kwargs = jsonloads(kwargs) if kwargs: # Quirk ... when there is a kwargs, the last args is the kwargs if args: args.pop() call = "server.%s(*args, **kwargs)" % func_name else: call = "server.%s(*args)" % func_name try: ret_val = eval(call) except Exception, e: print "ERROR: ", e raise
def execute_func(self, js, kwargs={}): js = ''' var func = function() { %s } var ret_val = func(); ret_val = JSON.stringify(ret_val); ''' % js ret_val = self.execute(js, kwargs) ret_val = jsonloads(ret_val) return ret_val
def execute(my, func_name, args=[], kwargs={}): server = TacticServerStub.get() if args: args = jsonloads(args) if kwargs: kwargs = jsonloads(kwargs) if kwargs: # Quirk ... when there is a kwargs, the last args is the kwargs if args: args.pop() call = "server.%s(*args, **kwargs)" % func_name else: call = "server.%s(*args)" % func_name try: ret_val = eval(call) except Exception, e: print "ERROR: ", e raise
def execute(self): trigger_sobj = self.get_trigger_sobj() data = trigger_sobj.get_value("data") #data = """ #{ "columns": [column1, column2] #""" data = jsonloads(data) column = data.get('column') src_status = data.get('src_status') item = self.get_caller() if isinstance(item, SObject): if isinstance(item, Task): if src_status != None: if item.get_value("status") != src_status: return item.set_now(column) item.commit() #Item can be a note when trigger input is adding or modifying notes else: process = item.get_value('process') expr = '@SOBJECT(parent.sthpw/task["process","%s"])'%process tasks = Search.eval(expr, sobjects=[item]) if tasks: for task in tasks: task.set_now(column) task.commit() #item can be a command such as check-in else: if hasattr(item, 'process'): process = item.process expr = '@SOBJECT(sthpw/task["process","%s"])'%process tasks = Search.eval(expr, sobjects=[item.sobject]) if tasks: for task in tasks: task.set_now(column) task.commit()
def loads(my, search_type, sobjects_str): sobject_list = jsonloads(sobjects_str) sobjects = [] for sobject_dict in sobject_list: sobject = SearchType.create(search_type) for name, value in sobject_dict.items(): if value == None: continue sobject.set_value(name, value) sobjects.append(sobject) return sobjects
def loads(self, search_type, sobjects_str): sobject_list = jsonloads(sobjects_str) sobjects = [] for sobject_dict in sobject_list: sobject = SearchType.create(search_type) for name, value in sobject_dict.items(): if value == None: continue sobject.set_value(name, value) sobjects.append(sobject) return sobjects
def execute(self): trigger_sobj = self.get_trigger_sobj() data = trigger_sobj.get_value("data") data = jsonloads(data) dst_status = data.get('dst_status') item = self.get_caller() parent = item.get_parent() if not parent: return parent.set_value("status", dst_status) parent.commit()
def execute(self): trigger_sobj = self.get_trigger_sobj() data = trigger_sobj.get_value("data") #data = """ #{ "columns": [column1, column2] #""" data = jsonloads(data) column = data.get('column') src_status = data.get('src_status') item = self.get_caller() if isinstance(item, SObject): if isinstance(item, Task): if src_status != None: if item.get_value("status") != src_status: return item.set_now(column) item.commit() #Item can be a note when trigger input is adding or modifying notes else: process = item.get_value('process') expr = '@SOBJECT(parent.sthpw/task["process","%s"])' % process tasks = Search.eval(expr, sobjects=[item]) if tasks: for task in tasks: task.set_now(column) task.commit() #item can be a command such as check-in else: if hasattr(item, 'process'): process = item.process expr = '@SOBJECT(sthpw/task["process","%s"])' % process tasks = Search.eval(expr, sobjects=[item.sobject]) if tasks: for task in tasks: task.set_now(column) task.commit()
def set_data(self, data): '''add data dictionary or a JSON string''' self.data = [] # protect against empty spaces/lines from xml if isinstance(data, basestring): data = data.strip() if not data: return if isinstance(data, basestring): try: data = data.replace("'", '"') data = jsonloads(data) except ValueError, e: if e.__str__().find('No JSON object') != -1: raise SetupException('Data is not decodable as JSON. [%s]'%data) # try a straight eval data = eval(data)
def execute(my): trigger_sobj = my.get_trigger_sobj() data = trigger_sobj.get_value("data") #data = """ #{ "columns": [column1, column2] #""" data = jsonloads(data) column = data.get('column') src_status = data.get('src_status') task = my.get_caller() if task.get_value("status") != src_status: return task.set_now(column) task.commit()
def set_data(my, data): '''add data dictionary or a JSON string''' my.data = [] # protect against empty spaces/lines from xml if isinstance(data, basestring): data = data.strip() if not data: return if isinstance(data, basestring): try: data = data.replace("'", '"') data = jsonloads(data) except ValueError, e: if e.__str__().find('No JSON object') != -1: raise SetupException( 'Data is not decodable as JSON. [%s]' % data) # try a straight eval data = eval(data)
def execute(self): input = self.get_input() search_key = input.get("search_key") task = Search.get_by_search_key(search_key) parent = task.get_parent() if not parent: raise TacticException("Task parent not found.") # get the definition of the trigger trigger_sobj = self.get_trigger_sobj() data = trigger_sobj.get_value("data") try: data = jsonloads(data) except: raise TacticException("Incorrect formatting of trigger [%s]." % trigger_sobj.get_value("code")) # check against source status if present src_status = data.get("src_status") if src_status: task_status = task.get_value("status") if task_status != src_status: return process_names = data.get("output") if not process_names: return # only create new task if another of the same # process does not already exist search = Search("sthpw/task") search.add_filters("process", process_names) search.add_parent_filter(parent) search.add_project_filter() tasks = search.get_sobjects() existing_processes = [x.get_value("process") for x in tasks] for process in process_names: if process in existing_processes: continue else: Task.create(parent, process, start_date=None, end_date=None)
def execute(my): input = my.get_input() search_key = input.get("search_key") task = Search.get_by_search_key(search_key) parent = task.get_parent() # get the definition of the trigger trigger_sobj = my.get_trigger_sobj() data = trigger_sobj.get_value("data") data = jsonloads(data) process = data.get("output") description = "" # FIXME: # find out if there is already a task of that process Task.create(parent, process, description, start_date=None, end_date=None)
def get_files(self): paths = [] # remember this here for now self.files = {} self.snapshots = {} search_key = self.kwargs.get("search_key") search_keys = self.kwargs.get("search_keys") if search_key: sobject = SearchKey.get_by_search_key(search_key) self.sobjects = [sobject] if search_keys: if isinstance(search_keys, basestring): search_keys = search_keys.replace("'", '"') search_keys = jsonloads(search_keys) self.sobjects = Search.get_by_search_keys(search_keys) if not self.sobjects: return [] self.sobject = self.sobjects[0] for sobject in self.sobjects: if sobject.get_base_search_type() in ['sthpw/task', 'sthpw/note']: parent = sobject.get_parent() sobject_paths = self.get_sobject_files(parent) paths.extend(sobject_paths) else: sobject_paths = self.get_sobject_files(sobject) paths.extend(sobject_paths) return paths
def POST(self): from pyasm.web import WebContainer web = WebContainer.get_web() method = web.get_form_value("method") print "method: ", method # make sure there are no special characters in there ie: () p = re.compile('^\w+$') if not re.match(p, method): raise Exception("Mathod [%s] does not exist" % method) from tactic_client_lib import TacticServerStub server = TacticServerStub.get() if not eval("server.%s" % method): raise Exception("Mathod [%s] does not exist" % method) keys = web.get_form_keys() kwargs = {} for key in keys: if key in ["method", "login_ticket", "password"]: continue if key == 'kwargs': args = web.get_form_value(key) args = jsonloads(args) for name, value in args.items(): kwargs[name] = value else: kwargs[key] = web.get_form_value(key) call = "server.%s(**kwargs)" % method return eval(call)
def preprocess(my): my.preprocessed = True my.today = datetime.date.today() wday = int(my.today.strftime("%w")) web = WebContainer.get_web() start_date = web.get_form_value('start_date') web_data = web.get_form_value('web_data') if web_data: web_data = jsonloads(web_data) workhour_data = None if web_data: web_data = web_data[0] workhour_data = web_data.get('workhour_data') if start_date: start_date = my._get_date_obj(start_date) elif workhour_data: workhour_data = jsonloads(workhour_data) start_date = workhour_data.get('start_date') start_date = my._get_date_obj(start_date) else: if my.days_per_page < 7: start_date = my.today else: start_date = my.today - datetime.timedelta(days=wday) my.start_date = start_date end_date = start_date + datetime.timedelta(days=my.days_per_page - 1) # this may not be necessary any more """ if not my.sobjects: sk = my.kwargs.get('search_key') task = SearchKey.get_by_search_key(sk) my.sobjects = [task] """ task_codes = [x.get_code() for x in my.sobjects] search = Search("sthpw/work_hour") if my.kwargs.get('show_all_users') == 'false': search.add_user_filter() search.add_filter("day", start_date, ">=") search.add_filter("day", end_date, "<=") search.add_filters("task_code", task_codes) entries = search.get_sobjects() # NOTE: # This widget assumes one entry per day. This is not the case # when time for each entry must be recorded and you may have # multiple entries per day # organize into days my.entries = {} for entry in entries: day = entry.get_value("day") if not day: continue day = parser.parse(day) day = day.strftime("%Y_%m_%d") task_code = entry.get_value("task_code") task_entries = my.entries.get(task_code) if task_entries == None: task_entries = {} my.entries[task_code] = task_entries entry_list = task_entries.get(day) if entry_list == None: entry_list = [] task_entries[day] = entry_list entry_list.append(entry) # break into 2 categories for key, sub_dict in my.entries.items(): if my.use_straight_time: for key2, entry_list in sub_dict.items(): entry_list_dict = {my.OT: [], my.ST: []} for entry in entry_list: if entry.get_value('category') == my.OT: entry_list_dict[my.OT].append(entry) elif entry.get_value('category') == my.ST: entry_list_dict[my.ST].append(entry) else: # in case they haven't run the upgrade script # (potentially include some custom-entered category) entry_list_dict[my.ST].append(entry) sub_dict[key2] = entry_list_dict else: for key2, entry_list in sub_dict.items(): entry_list_dict = {my.STT: [], my.ENT: []} for entry in entry_list: entry_list_dict[my.STT].append(entry) entry_list_dict[my.ENT].append(entry) sub_dict[key2] = entry_list_dict my.dates = list( rrule.rrule(rrule.DAILY, dtstart=start_date, until=end_date)) for idx in xrange(0, 8): my.summary_st[idx] = {} for idx in xrange(0, 8): my.summary_ot[idx] = {}
def execute(my): value = my.get_value() if value: data = jsonloads(value) else: data = {} task = my.sobject parent = task.get_parent() my.unit = my.get_option("unit") if not my.unit: my.unit = "hour" my.use_straight_time = my.get_option("use_straight_time") #my.use_straight_time = 'false' if my.use_straight_time == 'false': my.use_straight_time = False else: my.use_straight_time = True # Do this for now. EXIT if the parent of task can't be found.. if not parent: return # TODO: make it work if my.sobject is not an instance of a Task use_task_code = True # match assigned to avoid deleting work hours entries made on the same task by other users user = Environment.get_user_name() entries = parent.get_related_sobjects("sthpw/work_hour") # filter out just for this task if use_task_code: entries = [ x for x in entries if x.get_value('task_code') == task.get_code() and x.get_value('login') == user ] entry_dict = {} for key, value in data.items(): if my.use_straight_time: if not (key.startswith("day_") or key.startswith("otday_")): continue else: if not (key.startswith("day_") or key.startswith("sttday_") or key.startswith("entday_")): continue start_value = data tmp, year, month, day = key.split("_") date = "%s-%s-%s 00:00:00" % (year, month, day) #OVER_TIME_TYPE = 'ot' exists = False # TODO: we should allow multiple entiries per task per day and just # have a special UI to edit individual entries post creation. for entry in entries: entry_day = entry.get_value("day") if entry_day == date: if my.use_straight_time: if key.startswith("day_"): if entry.get_value("category") in [ '', WorkHoursElementWdg.ST ]: exists = True break if key.startswith("otday_"): if WorkHoursElementWdg.OT == entry.get_value( "category"): exists = True break else: # only supports regular hours for start and end time if key.startswith("sttday_"): if entry.get_value("category") in [ '', WorkHoursElementWdg.ST ]: exists = True break elif key.startswith("entday_"): if entry.get_value("category") in [ '', WorkHoursElementWdg.ST ]: exists = True break if not exists: entry = entry_dict.get(date) if not entry: # create a new one entry = SearchType.create("sthpw/work_hour") if parent: entry.set_parent(parent) entry.set_value("task_code", task.get_code()) entry.set_value("process", task.get_value('process')) entry.set_value("day", date) entry.set_user() entry.set_project() if not my.use_straight_time: # only enter standard time for now entry.set_value("project_code", task.get_value('project_code')) entry.set_value("category", WorkHoursElementWdg.ST) if not value: continue date_part = '' if key.startswith("entday_"): date_part = key.replace('entday_', '') date_part = date_part.replace('_', '-') value = value.zfill(4) time = parser.parse('%s %s' % (date_part, value)) entry.set_value("end_time", time) elif key.startswith("sttday_"): date_part = key.replace('sttday_', '') date_part = date_part.replace('_', '-') value = value.zfill(4) time = parser.parse('%s %s' % (date_part, value)) entry.set_value("start_time", time) entry_dict[date] = entry #entry.commit() else: if value == '' or value == '0': if exists: entry.delete() elif value == '%s' % entry.get_value('straight_time'): # prevent commit the same value again continue else: # entry.set_value("straight_time", value) entry.set_value("project_code", task.get_value('project_code')) if key.startswith("otday_"): entry.set_value("category", WorkHoursElementWdg.OT) else: entry.set_value("category", WorkHoursElementWdg.ST) entry.commit() for key, entry in entry_dict.items(): # set the straight_time as well st_time = str(entry.get_value('start_time')) end_time = str(entry.get_value('end_time')) if st_time and end_time: st_time_obj = parser.parse(st_time) end_time_obj = parser.parse(end_time) delta = (end_time_obj - st_time_obj ).seconds / WorkHoursElementWdg.UNIT_DICT[my.unit] entry.set_value("straight_time", delta) entry.commit()
########################################################### # # Copyright (c) 2012, Southpaw Technology # All Rights Reserved # # PROPRIETARY INFORMATION. This software is proprietary to # Southpaw Technology, and is not to be reproduced, transmitted, # or disclosed in any way without written permission. # # # import tacticenv from pyasm.common import jsonloads, jsondumps print "function: ", function print "kwargs: ", kwargs, type(kwargs) print "protocol: ", protocol print "server: ", server kwargs = jsonloads(kwargs) # convert the args to a dict method = eval('''server.%s''' % function) ret_val = method(**kwargs) ret_val = jsondumps(ret_val)
def get_display(my): top = my.top top.add_class("spt_changelist_content") my.set_as_panel(top) top.add_color("color", "color") top.add_color("background", "background") #top.add_border() #top.add_style("padding", "10px") top.add_style("min-width: 600px") top.add_style("min-height: 400px") top.add_behavior( { 'type': 'load', 'cbjs_action': scm_get_onload_js() } ) sync_dir = Environment.get_sandbox_dir() # HARD CODED project = Project.get() depot = project.get_value("location", no_exception=True) if not depot: depot = project.get_code() location = '//%s' % depot changelist = my.kwargs.get("changelist") if not changelist: changelist = WidgetSettings.get_value_by_key("current_changelist") else: WidgetSettings.set_value_by_key("current_changelist", changelist) if not changelist: changelist = 'default' changelists = my.kwargs.get("changelists") if not changelists: changelists = [] elif isinstance(changelists, basestring): changelists = changelists.replace("'", '"') changelists = jsonloads(changelists) top.add_behavior( { 'type': 'load', 'sync_dir': sync_dir, 'depot': depot, 'cbjs_action': ''' spt.scm.sync_dir = bvr.sync_dir; spt.scm.depot = bvr.depot; ''' } ) inner = DivWdg() top.add(inner) table = Table() inner.add(table) table.add_style("width: 100%") table.add_color("background", "background", -3) table.add_row() th = table.add_header("") th = table.add_header("Changelist") th.add_style("text-align: left") th = table.add_header("Description") th.add_style("text-align: left") th = table.add_header("# Items") th.add_style("text-align: left") th = table.add_header("Status") th.add_style("text-align: left") th = table.add_header("View") th.add_style("text-align: left") th = table.add_header("Delete") th.add_style("text-align: left") #table.set_unique_id() #table.add_smart_styles("spt_changelist_item", { # 'text-align: right' # } )) bgcolor = table.get_color("background", -8) table.add_relay_behavior( { 'type': 'mouseover', 'bvr_match_class': 'spt_changelist_item', 'bgcolor': bgcolor, 'cbjs_action': ''' bvr.src_el.setStyle("background-color", bvr.bgcolor); ''' } ) table.add_relay_behavior( { 'type': 'mouseout', 'bvr_match_class': 'spt_changelist_item', 'cbjs_action': ''' bvr.src_el.setStyle("background-color", ''); ''' } ) table.add_relay_behavior( { 'type': 'mouseup', 'bvr_match_class': "spt_changelist_radio", 'cbjs_action': ''' var changelist = bvr.src_el.value; var top = bvr.src_el.getParent(".spt_changelist_content"); top.setAttribute("spt_changelist", changelist); spt.app_busy.show("Loading Changelists Information"); spt.changelist.load(bvr.src_el, changelist); spt.app_busy.hide(); ''' } ) for c in changelists: num_items = len(c.get("info")) name = c.get("change") tr = table.add_row() tr.add_class("spt_changelist_item") radio = RadioWdg("changelist") radio.add_class("spt_changelist_radio") table.add_cell(radio) radio.set_option("value", name) if name == changelist: radio.set_checked() table.add_cell(name) table.add_cell(c.get("desc")) table.add_cell(num_items) table.add_cell(c.get("status")) if num_items: icon = IconButtonWdg(title="View", icon=IconWdg.ZOOM) icon.add_behavior( { 'type': 'click_up', 'changelist': c.get("change"), 'cbjs_action': ''' var top = bvr.src_el.getParent(".spt_changelist_content"); top.setAttribute("spt_changelist", bvr.changelist); spt.app_busy.show("Loading Changelist"); spt.changelist.load(bvr.src_el, bvr.changelist); spt.app_busy.hide(); ''' } ) else: icon = '' table.add_cell(icon) if not num_items and name != 'default': icon = IconButtonWdg(title="Delete Changelist", icon=IconWdg.DELETE) else: icon = "" table.add_cell(icon) inner.add("<hr/>") files = my.kwargs.get("files") if isinstance(files, basestring): files = files.replace("'", '"') files = jsonloads(files) if files == None: inner.add_behavior( { 'type': 'load', 'location': location, 'changelist': changelist, 'sync_dir': sync_dir, 'cbjs_action': ''' spt.changelist = {} spt.changelist.load = function(el, changelist) { var applet = spt.Applet.get(); var changelists = spt.scm.run("get_changelists_by_user",[]); var dflt = { 'change': 'default', 'status': 'pending' } changelists.push(dflt); changelists = changelists.reverse(); for (var i = 0; i < changelists.length; i++) { var info = spt.scm.run("get_changelist_files",[changelists[i].change]); changelists[i]['info'] = info; } // get the current chnage list var files = spt.scm.run("get_changelist_files",[changelist]); var path_info = {}; var sizes = []; for ( var i = 0; i < files.length; i++) { // FIXME: perforce specific var path = files[i].depotFile; path = path.replace(bvr.location, bvr.sync_dir); files[i].path = path; var size; if (applet.exists(path)) { var info = applet.get_path_info(path); size = info.size; } else { size = 0; } sizes.push(size); path_info[path] = 'editable'; } //var ret_val = spt.scm.status(bvr.sync_dir); //console.log(ret_val); var class_name = 'tactic.ui.checkin.changelist_wdg.ChangelistWdg'; var kwargs = { files: files, sizes: sizes, changelist: changelist, changelists: changelists, path_info: path_info, } var top = el.getParent(".spt_changelist_content"); spt.panel.load(top, class_name, kwargs); } spt.changelist.load(bvr.src_el, bvr.changelist); ''' } ) content = DivWdg() inner.add(content) content.add_class("spt_changelist_content") if files == None: loading_wdg = DivWdg() content.add(loading_wdg) loading_wdg.add("<b>Loading ...</b>") loading_wdg.add_style("padding: 30px") elif files: title_wdg = DivWdg() title_wdg.add_style("height: 15px") title_wdg.add("Changelist: [%s]" % changelist) content.add(title_wdg) title_wdg.add_gradient("background", "background", -5) title_wdg.add_style("padding: 5px") title_wdg.add_border() #button = SingleButtonWdg(tip='Add New Changelist', icon=IconWdg.ADD) #content.add(button) content.add("<br/>") paths = [x.get("path") for x in files] sizes = my.kwargs.get("sizes") path_info = my.kwargs.get("path_info") from scm_dir_list_wdg import ScmDirListWdg # dummy search_key search_key = "sthpw/virtual?code=xx001" dir_list_wdg = ScmDirListWdg( base_dir=sync_dir, paths=paths, sizes=sizes, path_info=path_info, all_open=True, #search_key=search_key ) content.add(dir_list_wdg) else: content.add("Changelist: [%s]" % changelist) content.add("<br/>"*2) no_files_wdg = DivWdg() content.add(no_files_wdg) no_files_wdg.add_style("padding: 20px") no_files_wdg.add_border() no_files_wdg.add("No files in changelist") no_files_wdg.add_color("color", "color3") no_files_wdg.add_color("background", "background3") no_files_wdg.add_style("font-weight: bold") no_files_wdg.add_style("text-align: center") return top
def get_display(my): top = my.top top.add_class("spt_changelist_content") my.set_as_panel(top) top.add_color("color", "color") top.add_color("background", "background") #top.add_border() #top.add_style("padding", "10px") top.add_style("min-width: 600px") top.add_style("min-height: 400px") top.add_behavior({'type': 'load', 'cbjs_action': scm_get_onload_js()}) sync_dir = Environment.get_sandbox_dir() # HARD CODED project = Project.get() depot = project.get_value("location", no_exception=True) if not depot: depot = project.get_code() location = '//%s' % depot changelist = my.kwargs.get("changelist") if not changelist: changelist = WidgetSettings.get_value_by_key("current_changelist") else: WidgetSettings.set_value_by_key("current_changelist", changelist) if not changelist: changelist = 'default' changelists = my.kwargs.get("changelists") if not changelists: changelists = [] elif isinstance(changelists, basestring): changelists = changelists.replace("'", '"') changelists = jsonloads(changelists) top.add_behavior({ 'type': 'load', 'sync_dir': sync_dir, 'depot': depot, 'cbjs_action': ''' spt.scm.sync_dir = bvr.sync_dir; spt.scm.depot = bvr.depot; ''' }) inner = DivWdg() top.add(inner) table = Table() inner.add(table) table.add_style("width: 100%") table.add_color("background", "background", -3) table.add_row() th = table.add_header("") th = table.add_header("Changelist") th.add_style("text-align: left") th = table.add_header("Description") th.add_style("text-align: left") th = table.add_header("# Items") th.add_style("text-align: left") th = table.add_header("Status") th.add_style("text-align: left") th = table.add_header("View") th.add_style("text-align: left") th = table.add_header("Delete") th.add_style("text-align: left") #table.set_unique_id() #table.add_smart_styles("spt_changelist_item", { # 'text-align: right' # } )) bgcolor = table.get_color("background", -8) table.add_relay_behavior({ 'type': 'mouseover', 'bvr_match_class': 'spt_changelist_item', 'bgcolor': bgcolor, 'cbjs_action': ''' bvr.src_el.setStyle("background-color", bvr.bgcolor); ''' }) table.add_relay_behavior({ 'type': 'mouseout', 'bvr_match_class': 'spt_changelist_item', 'cbjs_action': ''' bvr.src_el.setStyle("background-color", ''); ''' }) table.add_relay_behavior({ 'type': 'mouseup', 'bvr_match_class': "spt_changelist_radio", 'cbjs_action': ''' var changelist = bvr.src_el.value; var top = bvr.src_el.getParent(".spt_changelist_content"); top.setAttribute("spt_changelist", changelist); spt.app_busy.show("Loading Changelists Information"); spt.changelist.load(bvr.src_el, changelist); spt.app_busy.hide(); ''' }) for c in changelists: num_items = len(c.get("info")) name = c.get("change") tr = table.add_row() tr.add_class("spt_changelist_item") radio = RadioWdg("changelist") radio.add_class("spt_changelist_radio") table.add_cell(radio) radio.set_option("value", name) if name == changelist: radio.set_checked() table.add_cell(name) table.add_cell(c.get("desc")) table.add_cell(num_items) table.add_cell(c.get("status")) if num_items: icon = IconButtonWdg(title="View", icon=IconWdg.ZOOM) icon.add_behavior({ 'type': 'click_up', 'changelist': c.get("change"), 'cbjs_action': ''' var top = bvr.src_el.getParent(".spt_changelist_content"); top.setAttribute("spt_changelist", bvr.changelist); spt.app_busy.show("Loading Changelist"); spt.changelist.load(bvr.src_el, bvr.changelist); spt.app_busy.hide(); ''' }) else: icon = '' table.add_cell(icon) if not num_items and name != 'default': icon = IconButtonWdg(title="Delete Changelist", icon=IconWdg.DELETE) else: icon = "" table.add_cell(icon) inner.add("<hr/>") files = my.kwargs.get("files") if isinstance(files, basestring): files = files.replace("'", '"') files = jsonloads(files) if files == None: inner.add_behavior({ 'type': 'load', 'location': location, 'changelist': changelist, 'sync_dir': sync_dir, 'cbjs_action': ''' spt.changelist = {} spt.changelist.load = function(el, changelist) { var applet = spt.Applet.get(); var changelists = spt.scm.run("get_changelists_by_user",[]); var dflt = { 'change': 'default', 'status': 'pending' } changelists.push(dflt); changelists = changelists.reverse(); for (var i = 0; i < changelists.length; i++) { var info = spt.scm.run("get_changelist_files",[changelists[i].change]); changelists[i]['info'] = info; } // get the current chnage list var files = spt.scm.run("get_changelist_files",[changelist]); var path_info = {}; var sizes = []; for ( var i = 0; i < files.length; i++) { // FIXME: perforce specific var path = files[i].depotFile; path = path.replace(bvr.location, bvr.sync_dir); files[i].path = path; var size; if (applet.exists(path)) { var info = applet.get_path_info(path); size = info.size; } else { size = 0; } sizes.push(size); path_info[path] = 'editable'; } //var ret_val = spt.scm.status(bvr.sync_dir); //console.log(ret_val); var class_name = 'tactic.ui.checkin.changelist_wdg.ChangelistWdg'; var kwargs = { files: files, sizes: sizes, changelist: changelist, changelists: changelists, path_info: path_info, } var top = el.getParent(".spt_changelist_content"); spt.panel.load(top, class_name, kwargs); } spt.changelist.load(bvr.src_el, bvr.changelist); ''' }) content = DivWdg() inner.add(content) content.add_class("spt_changelist_content") if files == None: loading_wdg = DivWdg() content.add(loading_wdg) loading_wdg.add("<b>Loading ...</b>") loading_wdg.add_style("padding: 30px") elif files: title_wdg = DivWdg() title_wdg.add_style("height: 15px") title_wdg.add("Changelist: [%s]" % changelist) content.add(title_wdg) title_wdg.add_gradient("background", "background", -5) title_wdg.add_style("padding: 5px") title_wdg.add_border() #button = SingleButtonWdg(tip='Add New Changelist', icon=IconWdg.ADD) #content.add(button) content.add("<br/>") paths = [x.get("path") for x in files] sizes = my.kwargs.get("sizes") path_info = my.kwargs.get("path_info") from scm_dir_list_wdg import ScmDirListWdg # dummy search_key search_key = "sthpw/virtual?code=xx001" dir_list_wdg = ScmDirListWdg( base_dir=sync_dir, paths=paths, sizes=sizes, path_info=path_info, all_open=True, #search_key=search_key ) content.add(dir_list_wdg) else: content.add("Changelist: [%s]" % changelist) content.add("<br/>" * 2) no_files_wdg = DivWdg() content.add(no_files_wdg) no_files_wdg.add_style("padding: 20px") no_files_wdg.add_border() no_files_wdg.add("No files in changelist") no_files_wdg.add_color("color", "color3") no_files_wdg.add_color("background", "background3") no_files_wdg.add_style("font-weight: bold") no_files_wdg.add_style("text-align: center") return top
command = k.get("command") kwargs = k.get("kwargs") login = k.get("login") project_code = k.get("project_code") from pyasm.security import Batch Batch(project_code=project_code, login_code=login) cmd = Common.create_from_class_path(command, kwargs=kwargs) Command.execute_cmd(cmd) __all__.append("QueueTest") class QueueTest(Command): def execute(my): # this command has only a one in 10 chance of succeeding import random value = random.randint(0, 10) if value != 5: sdaffsfda if __name__ == '__main__': import sys args = sys.argv[1:] k = args[0] k = jsonloads(k) run_batch(k)
value = jsonloads("\n".join(lines)) return value run = staticmethod(run) if __name__ == '__main__': project_code = "vfx" site = "vfx_test" import sys args = sys.argv[1:] code = args[0] code = jsonloads(code) kwargs = args[1] kwargs = jsonloads(kwargs) from pyasm.security import Batch Batch(site=site, project_code=project_code) cmd = JsCmd(code=code, input=kwargs) Command.execute_cmd(cmd) ret_val = cmd.info.get("spt_ret_val") print "~" * 20 print ret_val
site = k.get("site") from pyasm.security import Batch Batch(site=site, project_code=project_code, login_code=login) cmd = Common.create_from_class_path(command, kwargs=kwargs) Command.execute_cmd(cmd) __all__.append("QueueTest") class QueueTest(Command): def execute(self): # this command has only a one in 10 chance of succeeding import random value = random.randint(0, 10) if value != 5: sdaffsfda if __name__ == '__main__': import sys args = sys.argv[1:] k = args[0] k = jsonloads(k) run_batch(k)
def execute(my): trigger_sobj = my.get_trigger_sobj() data = trigger_sobj.get_value("data") #data = """[ #{ "prefix": "rule", "name": "status", "value": "Approved" }, #{ "prefix": "rule", "name": "pipeline" "value": "model" }, #{ "prefix": "action", "type": "output", "name": "status", "value": "Pending" } #] #""" data = jsonloads(data) print "data: ", data from tactic.ui.filter import FilterData filter_data = FilterData(data) task = my.get_caller() # check that the process is correct trigger_info = filter_data.get_values_by_index("trigger") process = trigger_info.get("process") if task.get_value("process") != process: return parent = None rules = filter_data.get_values_by_prefix("rule") # go through each rule and determine if this trigger applies is_valid = True for rule in rules: attribute = rule.get('name') value = rule.get('value') if attribute in ['status']: # if condition does not match if task.get_value(attribute) != value: is_valid = False elif attribute in ['pipeline']: attribute = 'pipeline_code' if parent == None: parent = task.get_parent() if parent == None: continue if parent.get_value(attribute) != value: is_valid = False else: is_valid = False if not is_valid: return # update the data #input = my.get_input() #update_data = input.get('update_data') #status = update_data.get('status') #search_key = input.get('search_key') #task = Search.get_by_search_key(search_key) # get the next process tasks output_tasks = task.get_output_tasks() input_tasks = task.get_input_tasks() actions = filter_data.get_values_by_prefix("action") #print "actions: ", actions for action in actions: type = action.get("type") attribute = action.get('name') value = action.get('value') if type == 'output': for output_task in output_tasks: #output_status = output_task.get_value("status") output_task.set_value(attribute, value) output_task.commit() elif type == 'input': for output_task in output_tasks: print "a : ", attribute, value #output_status = output_task.get_value("status") output_task.set_value(attribute, value) output_task.commit() elif type == 'process': process = action.get("process") for input_task in input_tasks: task_process = input_task.get_value("process") if task_process == process: input_task.set_value(attribute, value) input_task.commit() break for output_task in output_tasks: task_process = output_task.get_value("process") if task_process == process: output_task.set_value(attribute, value) output_task.commit() break
def execute(my): filenames = my.kwargs.get("filenames") upload_dir = Environment.get_upload_dir() base_dir = upload_dir search_type = my.kwargs.get("search_type") key = my.kwargs.get("key") relative_dir = my.kwargs.get("relative_dir") if not relative_dir: project_code = Project.get_project_code() search_type_obj = SearchType.get(search_type) table = search_type_obj.get_table() relative_dir = "%s/%s" % (project_code, table) server = TacticServerStub.get() parent_key = my.kwargs.get("parent_key") category = my.kwargs.get("category") keywords = my.kwargs.get("keywords") extra_data = my.kwargs.get("extra_data") if extra_data: extra_data = jsonloads(extra_data) else: extra_data = {} # TODO: use this to generate a category category_script_path = my.kwargs.get("category_script_path") """ ie: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") return date.split(" ")[0] """ if not SearchType.column_exists(search_type, "name"): raise TacticException('The Ingestion puts the file name into the name column which is the minimal requirement. Please first create a "name" column for this sType.') for count, filename in enumerate(filenames): # first see if this sobjects still exists search = Search(search_type) search.add_filter("name", filename) if relative_dir and search.column_exists("relative_dir"): search.add_filter("relative_dir", relative_dir) sobject = search.get_sobject() # else create a new one if not sobject: sobject = SearchType.create(search_type) sobject.set_value("name", filename) if relative_dir and sobject.column_exists("relative_dir"): sobject.set_value("relative_dir", relative_dir) # extract metadata file_path = "%s/%s" % (base_dir, File.get_filesystem_name(filename)) # TEST: convert on upload try: convert = my.kwargs.get("convert") if convert: message_key = "IngestConvert001" cmd = ConvertCbk(**convert) cmd.execute() except Exception, e: print "WARNING: ", e if not os.path.exists(file_path): raise Exception("Path [%s] does not exist" % file_path) # get the metadata from this image if SearchType.column_exists(search_type, "relative_dir"): if category and category not in ['none', None]: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") if not date: date_str = "No-Date" else: date_str = str(date) # this can't be parsed correctly by dateutils parts = date_str.split(" ") date_str = parts[0].replace(":", "-") date_str = "%s %s" % (date_str, parts[1]) from dateutil import parser orig_date = parser.parse(date_str) if category == "by_day": date_str = orig_date.strftime("%Y/%Y-%m-%d") elif category == "by_month": date_str = orig_date.strftime("%Y-%m") elif category == "by_week": date_str = orig_date.strftime("%Y/Week-%U") full_relative_dir = "%s/%s" % (relative_dir, date_str) sobject.set_value("relative_dir", full_relative_dir) if parent_key: parent = Search.get_by_search_key(parent_key) if parent: sobject.set_sobject_value(sobject) if SearchType.column_exists(search_type, "keywords"): if keywords: sobject.set_value("keywords", keywords) for key, value in extra_data.items(): if SearchType.column_exists(search_type, key): sobject.set_value(key, value) """ if category: if SearchType.column_exists(search_type, "category"): sobject.set_value("category", category) if SearchType.column_exists(search_type, "relative_dir"): full_relative_dir = "%s/%s" % (relative_dir, category) sobject.set_value("relative_dir", category) """ sobject.commit() search_key = sobject.get_search_key() # use API to check in file process = my.kwargs.get("process") if not process: process = "publish" if process == "icon": context = "icon" else: context = "%s/%s" % (process, filename) server.simple_checkin(search_key, context, filename, mode='uploaded') percent = int((float(count)+1) / len(filenames)*100) print "checking in: ", filename, percent msg = { 'progress': percent, 'description': 'Checking in file [%s]' % filename, } server.log_message(key, msg, status="in progress")
def _process_video(self, file_name): if not HAS_FFMPEG: return thumb_web_size = self.get_web_file_size() thumb_icon_size = (120, 100) exts = File.get_extensions(file_name) base, ext = os.path.splitext(file_name) icon_file_name = "%s_icon.png" % base web_file_name = "%s_web.jpg" % base tmp_icon_path = "%s/%s" % (self.tmp_dir, icon_file_name) tmp_web_path = "%s/%s" % (self.tmp_dir, web_file_name) #cmd = '''"%s" -i "%s" -r 1 -ss 00:00:01 -t 1 -s %sx%s -vframes 1 "%s"''' % (ffmpeg, self.file_path, thumb_web_size[0], thumb_web_size[1], tmp_web_path) #os.system(cmd) import subprocess try: # Attempt to resize only if necessary. Requires ffprobe call. # (More recent version of ffmpeg support the argument # -vf scale="'if(gt(iw, 640), 640, iw)':'if(gt(ih, 6400), 6400, -1)'" # allowing for scaling which preserves aspect ratio and only scales # when necessary. For now, it is necessary to query video size.) free_aspect_ratio = thumb_web_size[1] == -1 try: command = ["ffprobe", "-print_format", "json", "-select_streams", "v:0", "-show_entries", "stream=height,width", self.file_path] p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() data = jsonloads(out) streams = data.get("streams") or [] sample_stream = streams[0] width = int(sample_stream.get("width")) height = int(sample_stream.get("height")) max_width = thumb_web_size[0] max_height = max_width*10 if free_aspect_ratio else thumb_web_size[1] if width < max_width and height < max_height: # Resizing is not necessary size_option = "" size = "" elif not free_aspect_ratio and (width > max_width or height > max_height): size_option = "-s" size = "%sx%s" % (thumb_web_size[0], thumb_web_size[1]) else: if width > height: size_option = "-vf" size = "scale=%s:-1" % thumb_web_size[0] elif height > width: aspect_ratio = float(float(height)/(width)) if aspect_ratio >= 10: size_option = "-vf" size = "scale=-1:%s" % max_height else: new_height = max_height new_width = float(new_height)/height if new_width > max_width: new_width = max_width new_height = height*float(new_width)/width size_option = "-vf" size = "scale=%s:-1" % max_width else: size_option = "-vf" size = "scale=-1:%s" % max_height except Exception as e: if free_aspect_ratio: size_option = "-vf" size = "scale=%s:-1" % thumb_web_size[0] else: size_option = "-s" size = "%sx%s" % (thumb_web_size[0], thumb_web_size[1]) command = [ffmpeg_exe, '-i', self.file_path, "-y", "-ss", "00:00:00","-t","1"] if size_option and size: command.extend([size_option, size]) command.extend(["-vframes","1","-f","image2", tmp_web_path]) subprocess.call(command) if os.path.exists(tmp_web_path): self.web_path = tmp_web_path else: self.web_path = None except Exception as e: Environment.add_warning("Could not process file", \ "%s - %s" % (self.file_path, e.__str__())) pass try: subprocess.call([ffmpeg_exe, '-i', self.file_path, "-y", "-ss", "00:00:00","-t","1",\ "-s","%sx%s"%(thumb_icon_size[0], thumb_icon_size[1]),"-vframes","1","-f","image2", tmp_icon_path]) if os.path.exists(tmp_icon_path): self.icon_path = tmp_icon_path else: self.icon_path = None except Exception as e: Environment.add_warning("Could not process file", \ "%s - %s" % (self.file_path, e.__str__())) pass if (ext == ".gif" and not self.web_path): self._process_image( file_name )
def get_display(self): top = self.top scan_path = "/tmp/scan" if not os.path.exists(scan_path): top.add("No results in current scan session") return top base_dir = "/home/apache" total_count = 0 mode = 'not' if mode == 'scan': # find all the files in the scanned data f = open(scan_path) data = jsonloads( f.read() ) f.close() elif mode == 'not': # find all of the files not in the scanned data f = open(scan_path) scan_data = jsonloads( f.read() ) f.close() data = {} count = 0 limit = 5000 for root, dirs, files in os.walk(base_dir): for file in files: total_count += 1 path = "%s/%s" % (root, file) if scan_data.get(path) == None: continue count +=1 if count > limit: break (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) = os.stat(path) data[path] = {"size": size} if count > limit: break elif mode == 'bad': data = {} count = 0 limit = 5000 for root, dirs, files in os.walk(base_dir): for file in files: path = "%s/%s" % (root, file) if not self.check_irregular(path): continue count +=1 if count > limit: break (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) = os.stat(path) data[path] = {"size": size} if count > limit: break elif mode == 'png': data = {} count = 0 limit = 5000 for root, dirs, files in os.walk(base_dir): for file in files: path = "%s/%s" % (root, file) if not path.endswith(".png"): continue count +=1 if count > limit: break (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) = os.stat(path) data[path] = {"size": size} if count > limit: break elif mode == 'custom': data = {} count = 0 limit = 5000 # What does this look like??? handler = Hander() for root, dirs, files in os.walk(base_dir): for file in files: path = "%s/%s" % (root, file) if not handler.validate(): continue count +=1 if count > limit: break (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) = os.stat(path) data[path] = {"size": size} if count > limit: break paths = data.keys() paths.sort() sobjects = [] for path in paths: sobject = SearchType.create("sthpw/virtual") basename = os.path.basename(path) dirname = os.path.dirname(path) reldir = dirname.replace("%s" % base_dir, "") basename = os.path.basename(path) dirname = os.path.dirname(path) reldir = dirname.replace("%s" % base_dir, "") if not reldir: reldir = ' ' else: reldir.lstrip("/") if not basename: basename = ' ' sobject.set_value("folder", reldir) sobject.set_value("file_name", basename) sobjects.append(sobject) info = data.get(path) if info: sobject.set_value("size", info.get("size")) from tactic.ui.panel import TableLayoutWdg element_names = ['folder','file_name', 'size'] #element_names.extend( list(tags_keys) ) #show_metadata = False #if not show_metadata: # element_names.remove('metadata') #config_xml = self.get_config_xml(list(tags_keys)) #layout = TableLayoutWdg(search_type='sthpw/virtual', view='report', element_names=element_names, mode='simple') #layout.set_sobjects(sobjects) #top.add(layout) top.add("Matched %s items of %s<br/>" % (len(sobjects), total_count) ) table = Table() table.add_color("color", "color") top.add(table) table.add_row() for element_name in element_names: title = Common.get_display_title(element_name) td = table.add_cell("<b>%s</b>" % title) td.add_border() td.add_color("color", "color", +5) td.add_gradient('background', 'background', -20) td.add_style("height: 20px") td.add_style("padding: 3px") for row, sobject in enumerate(sobjects): tr = table.add_row() if row % 2: background = tr.add_color("background", "background") else: background = tr.add_color("background", "background", -2) tr.add_attr("spt_background", background) for element_name in element_names: td = table.add_cell(sobject.get_value(element_name)) td.add_border() return top
def get_display(self): session_code = self.kwargs.get("session_code") session_code = 'session101' session = Search.get_by_code("config/ingest_session", session_code) data = session.get_json_value("data") if data == None: data = {} top = self.top top.add_class("spt_ingest_dir_list_top") self.set_as_panel(top) inner = DivWdg() top.add(inner) inner.add_style("padding: 10px") inner.add_color("background", "background") base_dir = self.kwargs.get("base_dir") location = self.kwargs.get("location") rescan = self.kwargs.get("rescan") if rescan in [True, 'true']: rescan = True else: rescan = False rescan = ActionButtonWdg(title='Rescan', tip="Rescan file system") inner.add(rescan) rescan.add_style("float: right") rescan.add_behavior( { 'type': 'click_up', 'base_dir': base_dir, 'cbjs_action': ''' spt.app_busy.show("Scanning", "Scanning files in " + bvr.base_dir); var top = bvr.src_el.getParent(".spt_ingest_dir_list_top"); spt.panel.refresh(top); spt.app_busy.hide(); ''' } ) import datetime last_scan = datetime.datetime.now() last_scan_wdg = DivWdg() inner.add(last_scan_wdg) last_scan_wdg.add("Last Scan: %s" % last_scan) last_scan_wdg.add_style("margin-bottom: 5px") found_wdg = DivWdg() inner.add(found_wdg) found_wdg.add_style("margin-bottom: 5px") inner.add("<hr/>") show_handled = self.kwargs.get("show_handled") show_handled = False if show_handled in [False, 'false']: try: scan_path = "/tmp/scan" import codecs f = codecs.open(scan_path, 'r', 'utf-8') ignore = jsonloads( f.read() ) f.close() except Exception as e: print("Error: ", e.message) ignore = None else: ignore = None # check for rescan cache_key = self.kwargs.get("cache_key") rescan = True if rescan == True: paths = [] else: paths = data.get(cache_key) if not paths: paths = [] dir_list_wdg = DirListWdg(paths=paths, base_dir=base_dir, location=location, ignore=ignore, depth=2) # cache the paths if rescan: paths = dir_list_wdg.get_paths() data['paths'] = paths session.set_json_value("data", data) session.commit() # add the paths found num_paths = dir_list_wdg.get_num_paths() found_wdg.add("Found: %s items" % num_paths) inner.add(dir_list_wdg) return top
def execute(my): start = time.time() from pyasm.common import SPTDate timestamp = SPTDate.now() timestamp = SPTDate.add_gmt_timezone(timestamp) timestamp = SPTDate.convert_to_local(timestamp) format = '%Y-%m-%d %H:%M:%S' timestamp = timestamp.strftime(format) updates = my.kwargs.get("updates") if isinstance(updates, basestring): updates = jsonloads(updates) last_timestamp = my.kwargs.get("last_timestamp") #assert last_timestamp if not last_timestamp: my.info = {"updates": {}, "timestamp": timestamp} return last_timestamp = parser.parse(last_timestamp) last_timestamp = SPTDate.add_gmt_timezone(last_timestamp) # give 2 seconds of extra room last_timestamp = last_timestamp - timedelta(seconds=2) # get out all of the search_keys client_keys = set() client_stypes = set() for id, values_list in updates.items(): if isinstance(values_list, dict): values_list = [values_list] for values in values_list: handler = values.get("handler") if handler: handler = Common.create_from_class_path(handler) # it could be a list search_key = handler.get_search_key() else: search_key = values.get("search_key") if search_key: if isinstance(search_key, list): search_key_set = set(search_key) else: search_key_set = set() search_key_set.add(search_key) client_keys.update(search_key_set) stype = values.get("search_type") if stype: client_stypes.add(stype) # find all of the search that have changed changed_keys = set() changed_types = set() for check_type in ['sthpw/change_timestamp', 'sthpw/sobject_log']: search = Search(check_type) search.add_filter("timestamp", last_timestamp, op=">") search.add_filters("search_type", ["sthpw/sobject_log", "sthpw/status_log"], op="not in") changed_sobjects = search.get_sobjects() for sobject in changed_sobjects: search_type = sobject.get_value("search_type") search_code = sobject.get_value("search_code") if search_type.startswith("sthpw/"): search_key = "%s?code=%s" % (search_type, search_code) else: search_key = "%s&code=%s" % (search_type, search_code) changed_keys.add(u'%s' % search_key) changed_types.add(search_type) intersect_keys = client_keys.intersection(changed_keys) from pyasm.web import HtmlElement results = {} for id, values_list in updates.items(): if isinstance(values_list, dict): values_list = [values_list] for values in values_list: handler = values.get("handler") if handler: handler = Common.create_from_class_path(handler) # handler can return a list of search_keys search_key = handler.get_search_key() else: search_key = values.get("search_key") stype = values.get("search_type") if search_key: if isinstance(search_key, list): search_key_set = set(search_key) else: search_key_set = set() search_key_set.add(search_key) # filter for search_type first if it exists # check if any search_key is contained in intersect_keys, skip if not if stype and stype in changed_types: if len(intersect_keys - search_key_set) == len(intersect_keys): continue elif len(intersect_keys - search_key_set) == len(intersect_keys): continue # evaluate any compare expressions compare = values.get("compare") if compare: search_key = values.get("search_key") if search_key: sobject = Search.get_by_search_key(search_key) else: sobject = None cmp_result = Search.eval(compare, sobject, single=True) if cmp_result == True: continue # some value to display value = "Loading ..." else: value = HtmlElement.eval_update(values) if value == None: continue results[id] = value my.info = {"updates": results, "timestamp": timestamp} #print "Dyn Cmd duration", time.time() - start return results
def get_display(self): top = self.top self.set_as_panel(top) top.add_class("spt_ingestion_top") top.add_color("background", "background", -5) self.data = {} rules_div = DivWdg() top.add(rules_div) rules_div.add_style("padding: 10px") rules_div.add("Rules: ") rules_select = SelectWdg("rule_code") rule_code = self.get_value('rule_code') if rule_code: rules_select.set_value(rule_code) rules_select.set_option("query", "config/ingest_rule|code|title") rules_select.add_empty_option("-- New --") rules_div.add(rules_select) rules_select.add_behavior( { 'type': 'change', 'cbjs_action': ''' var top = bvr.src_el.getParent(".spt_ingestion_top"); value = bvr.src_el.value; var class_name = 'tactic.ui.tools.IngestionToolWdg'; spt.panel.load(top, class_name, {rule_code: value} ); ''' } ) rules_div.add("<hr/>") # read from the database if rule_code: search = Search("config/ingest_rule") search.add_filter("code", rule_code) sobject = search.get_sobject() else: sobject = None if sobject: self.data = sobject.get_value("data") if self.data: self.data = jsonloads(self.data) session_code = self.kwargs.get("session_code") if session_code: session = Search.get_by_code("config/ingest_session", session_code) else: if sobject: session = sobject.get_related_sobject("config/ingest_session") print("sobject: ", sobject.get_code(), sobject.get_value("spt_ingest_session_code")) print("parent: ", session) else: session = None if not session: #session = SearchType.create("config/ingest_session") #session.set_value("code", "session101") #session.set_value("location", "local") ##session.set_value("base_dir", "C:") top.add("No session defined!!!") return top rule = "" filter = "" ignore = "" # get the base path if sobject: base_dir = sobject.get_value("base_dir") else: base_dir = '' #else: # base_dir = self.get_value("base_dir") #if not base_dir: # base_dir = '' if sobject: title = sobject.get_value("title") else: title = '' if sobject: code = sobject.get_value("code") else: code = '' file_list = self.get_value("file_list") scan_type = self.get_value("scan_type") action_type = self.get_value("action_type") rule = self.get_value("rule") if not rule: rule = base_dir # get the rule for this path checkin_mode = "dir" depth = 0 table = Table() rules_div.add(table) table.add_color("color", "color") from tactic.ui.input.text_input_wdg import TextInputWdg # add the title table.add_row() td = table.add_cell() td.add("Title: ") td = table.add_cell() text = TextInputWdg(name="title") td.add(text) if title: text.set_value(title) text.add_class("spt_title") text.add_style("width: 400px") #text.add_color("background", "background", -10) # add the optional code table.add_row() td = table.add_cell() td.add("Code (optional): ") td = table.add_cell() text = TextInputWdg(name="code") td.add(text) if code: text.set_value(code) text.set_readonly() text.add_color("background", "background", -10) text.add_class("spt_code") text.add_style("width: 400px") table.add_row() td = table.add_cell() td.add_style("height: 10px") td.add("<hr/>") table.add_row() td = table.add_cell() td.add("<b>Scan:</b><br/>") td.add("The following information will be used to find the paths that will be operated on by the ingestion process<br/><br/>") # add a scan type table.add_row() td = table.add_cell() td.add("Type: ") select = SelectWdg("scan_type") select.add_class("spt_scan_type") td = table.add_cell() td.add(select) select.set_value( self.get_value("action") ) labels = ['Simple List', 'Rule', 'Script'] values = ['list', 'rule', 'script'] select.set_option("values", values) select.set_option("labels", labels) if scan_type: select.set_value(scan_type) table.add_row() table.add_cell(" ") select.add_behavior( { 'type': 'change', 'cbjs_action': ''' var top = bvr.src_el.getParent(".spt_ingestion_top"); value = bvr.src_el.value; var elements = top.getElements(".spt_scan_list"); for (var i = 0; i < elements.length; i++) { if (value == 'list') spt.show(elements[i]); else spt.hide(elements[i]); } var elements = top.getElements(".spt_scan_rule"); for (var i = 0; i < elements.length; i++) { if (value == 'rule') spt.show(elements[i]); else spt.hide(elements[i]); } var elements = top.getElements(".spt_scan_script"); for (var i = 0; i < elements.length; i++) { if (value == 'script') spt.show(elements[i]); else spt.hide(elements[i]); } ''' } ) # add in a list of stuff tbody = table.add_tbody() tbody.add_class("spt_scan_list") if scan_type != 'list': tbody.add_style("display: none") tr = table.add_row() td = table.add_cell() td.add("List of files: ") td = table.add_cell() text = TextAreaWdg(name="file_list") td.add(text) text.add_style("width: 400px") #text.set_readonly() #text.add_color("background", "background", -10) text.set_value(file_list) table.close_tbody() # add rule scan mode tbody = table.add_tbody() tbody.add_class("spt_scan_rule") if scan_type != 'rule': tbody.add_style("display: none") # add the path tr = table.add_row() td = table.add_cell() td.add("Starting Path: ") td = table.add_cell() hidden = HiddenWdg("session_code", session.get_code() ) td.add(hidden) text = TextInputWdg(name="base_dir") td.add(text) text.set_value(base_dir) text.add_style("width: 400px") #text.set_readonly() #text.add_color("background", "background", -10) text.set_value(base_dir) # add rule tr = table.add_row() td = table.add_cell() td.add("Tag Rule: ") td = table.add_cell() text = TextInputWdg(name="rule") td.add(text) text.add_style("width: 400px") text.set_value(rule) tr = table.add_row() td = table.add_cell() td.add("Filter: ") td = table.add_cell() text = TextWdg("filter") td.add(text) text.set_value( self.get_value("filter") ) text.add_style("width: 400px") text.add_style("padding: 2px") text.add_style("-moz-border-radius: 5px") tr = table.add_row() td = table.add_cell() td.add("Ignore: ") td = table.add_cell() text = TextWdg("ignore") td.add(text) text.set_value( self.get_value("ignore") ) text.set_value(ignore) text.add_style("width: 400px") text.add_style("padding: 2px") text.add_style("-moz-border-radius: 5px") table.add_row() td = table.add_cell() td.add("Validation script: ") td.add_style("vertical-align: top") td.add_style("padding-top: 5px") td = table.add_cell() text = TextInputWdg(name="validation_script") text.set_value( self.get_value("validation_script") ) text.add_style("width: 400px") td.add(text) icon = IconButtonWdg(title='Edit Validation Script', icon=IconWdg.EDIT) icon.add_style("float: right") td.add(icon) icon.add_behavior( { 'type': 'click_up', 'cbjs_action': ''' spt.named_events.fire_event("show_script_editor"); var top = bvr.src_el.getParent(".spt_ingestion_top"); var values = spt.api.Utility.get_input_values(top, null, false); var kwargs = { script_path: values.validation_script } setTimeout( function() { spt.js_edit.display_script_cbk(evt, kwargs) }, 500 ); ''' } ) table.close_tbody() # add the script path tbody = table.add_tbody() tbody.add_class("spt_scan_script") if scan_type != 'script': tbody.add_style("display: none") tr = table.add_row() td = table.add_cell() td.add("Script Path: ") td = table.add_cell() text = TextInputWdg(name="script_path") td.add(text) text.add_style("width: 400px") table.close_tbody() table.add_row() td = table.add_cell("<hr/>") table.add_row() td = table.add_cell() td.add("<b>Action</b><br/>") td.add("The following information define the actions that will be used on each matched path<br/><br/>") # pick the type of action table.add_row() td = table.add_cell() td.add("Type: ") select = SelectWdg("action_type") td = table.add_cell() td.add(select) labels = ['Checkin', 'Ignore'] values = ['checkin', 'ignore'] select.set_option("values", values) select.set_option("labels", labels) select.add_empty_option("-- Select --") if action_type: select.set_value(action_type) select.add_behavior( { 'type': 'change', 'cbjs_action': ''' var top = bvr.src_el.getParent(".spt_ingestion_top"); value = bvr.src_el.value; var elements = top.getElements(".spt_action_ignore"); for (var i = 0; i < elements.length; i++) { if (value == 'ignore') spt.show(elements[i]); else spt.hide(elements[i]); } var elements = top.getElements(".spt_action_checkin"); for (var i = 0; i < elements.length; i++) { if (value == 'checkin') spt.show(elements[i]); else spt.hide(elements[i]); } ''' } ) table.add_row() td = table.add_cell("<br/>") # add the script path tbody = table.add_tbody() tbody.add_class("spt_action_checkin") if action_type != 'checkin': tbody.add_style("display: none") # add the checkin type table.add_row() td = table.add_cell() td.add("Action: ") select = SelectWdg("action") td = table.add_cell() td.add(select) select.set_value( self.get_value("action") ) labels = ['File Checkin', 'Directory Checkin', 'Sequence Checkin'] values = ['file', 'directory', 'sequence', 'ignore'] select.set_option("values", values) select.set_option("labels", labels) table.add_row() td = table.add_cell() td.add("Mode: ") select = SelectWdg("mode") td = table.add_cell() td.add(select) labels = ['Copy', 'Move', 'In Place'] values = ['copy', 'move', 'inplace'] select.set_option("values", values) select.set_option("labels", labels) # add the search_type table.add_row() td = table.add_cell() td.add("sType: ") td = table.add_cell() select = SelectWdg("search_type") td.add(select) search_types = Project.get().get_search_types() values = [x.get_value("search_type") for x in search_types] select.set_option("values", values) search_type = self.kwargs.get("search_type") if search_type: select.set_value(search_type) # add the search_type table.add_row() td = table.add_cell() td.add("Context: ") td = table.add_cell() select = SelectWdg("context") td.add(select) select.set_option("values", ['publish', 'by rule', 'custom']) # add extra values extra_div = DivWdg() text = TextWdg("extra_name") text.add_attr("spt_is_multiple", "true") extra_div.add(text) extra_div.add(" = ") text = TextWdg("extra_value") extra_div.add(text) text.add_attr("spt_is_multiple", "true") template_div = DivWdg() text = TextWdg("extra_name") text.add_attr("spt_is_multiple", "true") template_div.add(text) template_div.add(" = ") text = TextWdg("extra_value") template_div.add(text) text.add_attr("spt_is_multiple", "true") table.close_tbody() table.add_row() td = table.add_cell("<br/>") table.add_row() td = table.add_cell() td.add("Extra Keywords: ") td.add_style("vertical-align: top") td.add_style("padding-top: 5px") td = table.add_cell() text = TextWdg("keywords") text.add_style("width: 300px") td.add(text) table.add_row() td = table.add_cell() td.add("Extra Values: ") td.add_style("vertical-align: top") td.add_style("padding-top: 5px") td = table.add_cell() extra_list = DynamicListWdg() td.add(extra_list) extra_list.add_item(extra_div) extra_list.add_template(template_div) table.add_row() table.add_cell(" ") table.add_row() td = table.add_cell() td.add("Process script: ") td.add_style("vertical-align: top") td.add_style("padding-top: 5px") td = table.add_cell() text = TextWdg("process_script") text.add_style("width: 300px") td.add(text) text.set_value( self.get_value("process_script") ) icon = IconButtonWdg(title='Edit Process Script', icon=IconWdg.EDIT) icon.add_style("float: right") td.add(icon) icon.add_behavior( { 'type': 'click_up', 'cbjs_action': ''' spt.named_events.fire_event("show_script_editor"); var top = bvr.src_el.getParent(".spt_ingestion_top"); var values = spt.api.Utility.get_input_values(top, null, false); var kwargs = { script_path: values.process_script } // need to wait for this setTimeout( function() { spt.js_edit.display_script_cbk(evt, kwargs) }, 500 ); ''' } ) table.add_row() td = table.add_cell() td.add("Custom Naming: ") td.add_style("vertical-align: top") td.add_style("padding-top: 5px") td = table.add_cell() text = TextWdg("naming") text.add_style("width: 300px") td.add(text) table.add_row() td = table.add_cell() #td.add("<br clear='all'/>") td.add("<hr/>") behavior = { 'type': 'click_up', 'cbjs_action': ''' var top = bvr.src_el.getParent(".spt_ingestion_top"); var values = spt.api.Utility.get_input_values(top, null, false); spt.app_busy.show("Scanning ...", values.base_dir); var class_name = 'tactic.ui.tools.IngestionProcessWdg'; var server = TacticServerStub.get(); values.mode = bvr.mode; values.is_local = 'true'; // scan client side if (values.is_local == 'true') { var base_dir = values.base_dir; var applet = spt.Applet.get(); var files = applet.list_recursive_dir(base_dir); // turn into a string var files_in_js = []; for (var i = 0; i < files.length; i++) { var file = files[i].replace(/\\\\/g, "/"); files_in_js.push( file ); } values.files = files_in_js; values.base_dir = base_dir; /* var server = TacticServerStub.get(); var handoff_dir = server.get_handoff_dir(); var applet = spt.Applet.get(); for (var i = 0; i < files_in_js.length; i++) { try { var parts = files_in_js[i].split("/"); var filename = parts[parts.length-1]; spt.app_busy.show("Copying files to handoff", filename); applet.copy_file(files_in_js[i], handoff_dir+"/"+filename); } catch(e) { log.error(e); } } */ } var info_el = top.getElement(".spt_info"); spt.panel.load(info_el, class_name, values); spt.app_busy.hide(); ''' } # Save button button = ActionButtonWdg(title="Save", tip="Save Rule") td.add(button) button.add_style("float: right") behavior['mode'] = 'save' button.add_behavior( { 'type': 'click_up', 'cbjs_action': ''' var top = bvr.src_el.getParent(".spt_ingestion_top"); var values = spt.api.Utility.get_input_values(top, null, false); spt.app_busy.show("Saving ..."); var class_name = 'tactic.command.CheckinRuleSaveCmd'; var server = TacticServerStub.get(); server.execute_cmd(class_name, values); spt.panel.refresh(top, {}); spt.app_busy.hide(); ''' } ) # Scan button button = ActionButtonWdg(title="Scan", tip="Click to Scan") td.add(button) button.add_style("float: left") # set a limit #limit = TextWdg("limit") #td.add(limit) #text.add_style("float: left") behavior = behavior.copy() behavior['mode'] = 'scan' button.add_behavior(behavior) # Test button button = ActionButtonWdg(title="Test", tip="Do a test of this rule") td.add(button) behavior = behavior.copy() behavior['mode'] = 'test' button.add_behavior(behavior) button.add_style("float: left") # Ingest button button = ActionButtonWdg(title="Ingest", tip="Click to start ingesting") td.add(button) behavior = behavior.copy() behavior['mode'] = 'checkin' button.add_behavior(behavior) table.add_behavior( { 'type': 'listen', 'event_name': 'file_browser|select', 'cbjs_action': ''' var dirname = bvr.firing_data.dirname; var top = bvr.src_el.getParent(".spt_ingestion_top"); var kwargs = { base_dir: dirname }; spt.panel.load(top, top.getAttribute("spt_class_name"), kwargs); ''' }) top.add( self.get_info_wdg() ) return top
# Southpaw Technology, and is not to be reproduced, transmitted, # or disclosed in any way without written permission. # # # import tacticenv from pyasm.common import jsonloads, jsondumps from tactic_client_lib import TacticServerStub from pyasm.biz import Project # The variables "function" and "kwargs" are autofilled by Jepp print "function: ", function print "kwargs: ", kwargs kwargs_dict = jsonloads(kwargs) # remap keys so that they are strings kwargs = {} for key,value in kwargs_dict.items(): kwargs[key.encode('utf-8')] = value #server = TacticServerStub.get(protocol='xmlrpc') server = TacticServerStub.get() # convert the args to a dict method = eval('''server.%s''' % function) ret_val = method(**kwargs) ret_val = jsondumps(ret_val)
def execute(my): trigger_sobj = my.get_trigger_sobj() data = trigger_sobj.get_value("data") data = jsonloads(data) print "trigger data: ", data, type(data) data_list = data if isinstance(data, dict): data_list = [data] src_task = my.get_caller() for data in data_list: # get the src task caller dst_task = None # it could be the FileCheckin Command if not isinstance(src_task, SObject): input = my.get_input() snapshot = input.get('snapshot') if not snapshot: continue if isinstance(snapshot, dict): snapshot = SearchKey.get_by_search_key(snapshot.get('__search_key__')) src_process = data.get('src_process') src_task = Search.eval("@SOBJECT(parent.sthpw/task['process','%s'])"%src_process,\ sobjects=snapshot, single=True) if not src_task: continue # make sure the caller process is the same as the source process if src_task.get_value("process") != data.get("src_process"): continue #conditionx = "@GET(.status) != 'Approved'" #result = Search.eval(conditionx, src_task) #print "result: ", result # make sure that the appropriate status was set src_status = data.get("src_status") if src_status and src_task.get_value("status") != src_status: continue dst_process = data.get("dst_process") dst_status = data.get("dst_status") sobject = src_task.get_parent() tasks = Task.get_by_sobject(sobject) updated_tasks = [] use_parent = data.get("use_parent") if use_parent in [True,'true']: parent = sobject.get_parent() parent_tasks = Task.get_by_sobject(parent, dst_process) condition = data.get("condition") if not condition: condition = "all" if condition == "all": condition_met = True for task in tasks: if src_task.get_value("status") != src_status: condition_met = False elif condition == "any": condition_met = False for task in tasks: if task.get_value("status") == src_status: condition_met = True break if condition_met: for task in parent_tasks: if task.get_value("process") == dst_process: updated_tasks.append(task) else: for task in tasks: if task.get_value("process") == dst_process: updated_tasks.append(task) for task in updated_tasks: if task.get_value("process") == dst_process: task.set_value("status", dst_status) task.commit() """