def copy_sobject(self, sobject, dst_search_type, context=None, checkin_mode='inplace'): new_sobject = SearchType.create(dst_search_type) search_type = SearchType.get(dst_search_type) columns = SearchType.get_columns(dst_search_type) data = sobject.get_data() for name, value in data.items(): if name in ['id','pipeline_code']: continue if name not in columns: continue if not value: continue if name == "code": value = Common.get_next_sobject_code(sobject, 'code') if not value: continue new_sobject.set_value(name, value) if SearchType.column_exists(dst_search_type, "project_code"): project_code = Project.get_project_code() new_sobject.set_value("project_code", project_code) new_sobject.commit() # get all of the current snapshots and file paths associated if not context: snapshots = Snapshot.get_all_current_by_sobject(sobject) else: snapshots = [Snapshot.get_current_by_sobject(sobject, context)] if not snapshots: return msgs = [] for snapshot in snapshots: #file_paths = snapshot.get_all_lib_paths() file_paths_dict = snapshot.get_all_paths_dict() file_types = file_paths_dict.keys() if not file_types: continue # make sure the paths match the file_types file_paths = [file_paths_dict.get(x)[0] for x in file_types] mode = checkin_mode # checkin the files (inplace) try: context = snapshot.get_value('context') checkin = FileCheckin(new_sobject, context=context, file_paths=file_paths, file_types=file_types, mode=mode) checkin.execute() #print "done: ", context, new_sobject.get_related_sobjects("sthpw/snapshot") except CheckinException, e: msgs.append('Post-process Check-in Error for %s: %s ' %(context, e.__str__()))
def copy_sobject(my, sobject, dst_search_type, context=None, checkin_mode='inplace'): new_sobject = SearchType.create(dst_search_type) search_type = SearchType.get(dst_search_type) columns = SearchType.get_columns(dst_search_type) data = sobject.get_data() for name, value in data.items(): if name in ['id','pipeline_code']: continue if name not in columns: continue if not value: continue if name == "code": value = Common.get_next_sobject_code(sobject, 'code') if not value: continue new_sobject.set_value(name, value) if SearchType.column_exists(dst_search_type, "project_code"): project_code = Project.get_project_code() new_sobject.set_value("project_code", project_code) new_sobject.commit() # get all of the current snapshots and file paths associated if not context: snapshots = Snapshot.get_all_current_by_sobject(sobject) else: snapshots = [Snapshot.get_current_by_sobject(sobject, context)] if not snapshots: return msgs = [] for snapshot in snapshots: #file_paths = snapshot.get_all_lib_paths() file_paths_dict = snapshot.get_all_paths_dict() file_types = file_paths_dict.keys() if not file_types: continue # make sure the paths match the file_types file_paths = [file_paths_dict.get(x)[0] for x in file_types] mode = checkin_mode # checkin the files (inplace) try: context = snapshot.get_value('context') checkin = FileCheckin(new_sobject, context=context, file_paths=file_paths, file_types=file_types, mode=mode) checkin.execute() #print "done: ", context, new_sobject.get_related_sobjects("sthpw/snapshot") except CheckinException, e: msgs.append('Post-process Check-in Error for %s: %s ' %(context, e.__str__()))
def get_search_col(cls, search_type): '''Get the appropriate keyword search col based on column existence in this sType''' for col in cls.SEARCH_COLS: if SearchType.column_exists(search_type, col): return col return cls.SEARCH_COLS[-1]
def _test_create(my): search = Search("unittest/person") persons = search.get_sobjects() person = persons[0] snapshot_type = "file" snapshot = Snapshot.create(person, context="publish", snapshot_type=snapshot_type) version = snapshot.get_value("version") my.assertEquals(1, version) search_type = snapshot.get_value("search_type") my.assertEquals(search_type, person.get_search_type()) search_code = snapshot.get_value("search_code") my.assertEquals(search_code, person.get_value("code")) # also check search_id if SearchType.column_exists("sthpw/snapshot", "search_id"): search_code = snapshot.get_value("search_id") my.assertEquals(search_code, person.get_value("id")) test_person = snapshot.get_sobject() my.assertEquals(test_person.get_code(), person.get_code())
def execute(my): collection_key = my.kwargs.get("collection_key") search_keys = my.kwargs.get("search_keys") collection = Search.get_by_search_key(collection_key) if not collection: raise Exception("Collection does not exist") search_type = collection.get_base_search_type() parts = search_type.split("/") collection_type = "%s/%s_in_%s" % (parts[0], parts[1], parts[1]) search = Search(collection_type) search.add_filter("parent_code", collection.get_code()) items = search.get_sobjects() search_codes = [x.get_value("search_code") for x in items] search_codes = set(search_codes) has_keywords = SearchType.column_exists(search_type, "keywords") if has_keywords: collection_keywords = collection.get_value("keywords", no_exception=True) collection_keywords = collection_keywords.split(" ") collection_keywords = set(collection_keywords) # create new items sobjects = Search.get_by_search_keys(search_keys) for sobject in sobjects: if sobject.get_code() in search_codes: continue new_item = SearchType.create(collection_type) new_item.set_value("parent_code", collection.get_code()) new_item.set_value("search_code", sobject.get_code()) new_item.commit() # copy the metadata of the collection if has_keywords: keywords = sobject.get_value("keywords") keywords = keywords.split(" ") keywords = set(keywords) keywords = keywords.union(collection_keywords) keywords = " ".join(keywords) sobject.set_value("keywords", keywords) sobject.commit()
def execute(my): collection_key = my.kwargs.get("collection_key") search_keys = my.kwargs.get("search_keys") collection = Search.get_by_search_key(collection_key) if not collection: raise Exception("Collection does not exist") search_type = collection.get_base_search_type() parts = search_type.split("/") collection_type = "%s/%s_in_%s" % (parts[0], parts[1], parts[1]) search = Search(collection_type) search.add_filter("parent_code", collection.get_code()) items = search.get_sobjects() search_codes = [x.get_value("search_code") for x in items] search_codes = set(search_codes) has_keywords = SearchType.column_exists(search_type, "keywords") if has_keywords: collection_keywords = collection.get_value("keywords", no_exception=True) collection_keywords = collection_keywords.split(" ") collection_keywords = set(collection_keywords) # create new items sobjects = Search.get_by_search_keys(search_keys) for sobject in sobjects: if sobject.get_code() in search_codes: continue new_item = SearchType.create(collection_type) new_item.set_value("parent_code", collection.get_code()) new_item.set_value("search_code", sobject.get_code()) new_item.commit() # copy the metadata of the collection if has_keywords: keywords = sobject.get_value("keywords") keywords = keywords.split(" ") keywords = set(keywords) keywords = keywords.union(collection_keywords) keywords = " ".join(keywords) sobject.set_value("keywords", keywords) sobject.commit()
def get_defaults(my): '''specifies the defaults for this sobject''' pipeline_code ='' task_process = my.get_value("process") parent = None if task_process: # TODO: this is slow. Need presearch all of the parents parent = my.get_parent() if parent: parent_pipeline_code = parent.get_value('pipeline_code', no_exception=True) pipeline = Pipeline.get_by_code(parent_pipeline_code) if pipeline: attributes = pipeline.get_process_attrs(task_process) pipeline_code = attributes.get('task_pipeline') if not pipeline_code: node_type = attributes.get('type') if node_type == "approval": pipeline_code = "approval" elif node_type == "task": pipeline_code = "approval" if not pipeline_code: pipeline_code = 'task' # in case it's a subpipeline context = task_process context=my._add_context_suffix(context,task_process,parent) # then use the project as a parent project = Project.get() search_type = "sthpw/project" search_id = project.get_id() defaults = { "pipeline_code": pipeline_code, "project_code": Project.get_project_code(), "context": context, "search_type": search_type, "search_id": search_id } if SearchType.column_exists("sthpw/task", "search_code"): search_code = project.get_code() defaults['search_code'] = search_code return defaults
def resolve_search_type_relationship(self, attrs, search_type, search_type2): # determine the direction of the relationship my_is_from = attrs['from'] == search_type relationship = attrs.get('relationship') assert relationship == 'search_type' if my_is_from: has_code = SearchType.column_exists(search_type2, "code") if has_code: relationship = 'search_code' else: relationship = 'search_id' else: has_code = SearchType.column_exists(search_type, "code") if has_code: relationship = 'search_code' else: relationship = 'search_id' return relationship
def get_defaults(my): '''specifies the defaults for this sobject''' pipeline_code ='' task_process = my.get_value("process") parent = None if task_process: # TODO: this is slow. Need presearch all of the parents parent = my.get_parent() if parent: parent_pipeline_code = parent.get_value('pipeline_code', no_exception=True) pipeline = Pipeline.get_by_code(parent_pipeline_code) if pipeline: attributes = pipeline.get_process_attrs(task_process) pipeline_code = attributes.get('task_pipeline') if not pipeline_code: node_type = attributes.get('type') if node_type == "approval": pipeline_code = "approval" elif node_type == "task": pipeline_code = "approval" if not pipeline_code: pipeline_code = 'task' # in case it's a subpipeline context = task_process context=my._add_context_suffix(context,task_process,parent) # then use the project as a parent project = Project.get() search_type = "sthpw/project" search_id = project.get_id() defaults = { "pipeline_code": pipeline_code, "project_code": Project.get_project_code(), "context": context, "search_type": search_type, "search_id": search_id } if SearchType.column_exists("sthpw/task", "search_code"): search_code = project.get_code() defaults['search_code'] = search_code return defaults
def get_content_wdg(my): my.search_type = my.kwargs.get("search_type") my.collection_key = my.kwargs.get("collection_key") top = DivWdg() top.add_class("spt_collection_top") if not SearchType.column_exists(my.search_type, "_is_collection"): msg_div = DivWdg() top.add(msg_div) msg_div.add("Search Type [%s] does not support collections" % my.search_type) msg_div.add_style("padding: 40px") msg_div.add_style("width: 300px") msg_div.add_style("margin: 100px auto") msg_div.add_border() return top top.add_style("margin: 5px 20px") table = Table() top.add(table) table.add_row() table.add_style("width: 100%") #tr, header = table.add_row_cell() #header.add_style("height: 40px") table.add_row() left = table.add_cell() left.add_style("vertical-align: top") left.add_style("width: 300px") left.add_style("max-width: 300px") left.add_style("height: auto") right = table.add_cell() right.add_style("vertical-align: top") right.add_style("width: auto") right.add_style("height: auto") left.add(my.get_collection_wdg()) right.add(my.get_right_content_wdg()) return top
def get_content_wdg(my): my.search_type = my.kwargs.get("search_type") my.collection_key = my.kwargs.get("collection_key") top = DivWdg() top.add_class("spt_collection_top") if not SearchType.column_exists(my.search_type, "_is_collection"): msg_div = DivWdg() top.add(msg_div) msg_div.add("Search Type [%s] does not support collections" % my.search_type) msg_div.add_style("padding: 40px") msg_div.add_style("width: 300px") msg_div.add_style("margin: 100px auto") msg_div.add_border() return top top.add_style("margin: 5px 20px") table = Table() top.add(table) table.add_row() table.add_style("width: 100%") #tr, header = table.add_row_cell() #header.add_style("height: 40px") table.add_row() left = table.add_cell() left.add_style("vertical-align: top") left.add_style("width: 300px") left.add_style("max-width: 300px") left.add_style("height: auto") right = table.add_cell() right.add_style("vertical-align: top") right.add_style("width: auto") right.add_style("height: auto") left.add(my.get_collection_wdg()) right.add(my.get_right_content_wdg()) return top
def get_search_col(cls, search_type, simple_search_view=''): '''Get the appropriate keyword search col based on column existence in this sType''' if simple_search_view: from pyasm.widget import WidgetConfigView config = WidgetConfigView.get_by_search_type(search_type, simple_search_view) # assume the keyword filter is named "keyword" options = config.get_display_options('keyword') column = options.get('column') if column: return column for col in cls.SEARCH_COLS: if SearchType.column_exists(search_type, col): return col return cls.SEARCH_COLS[-1]
def get_search_col(cls, search_type, simple_search_view=''): '''Get the appropriate keyword search col based on column existence in this sType''' if simple_search_view: from pyasm.widget import WidgetConfigView config = WidgetConfigView.get_by_search_type(search_type, simple_search_view) # assume the keyword filter is named "keyword" options = config.get_display_options('keyword') column = options.get('column') if column: return column for col in cls.SEARCH_COLS: if SearchType.column_exists(search_type, col): return col return cls.SEARCH_COLS[-1]
def get_display(my): #project = Project.get() schema = Schema.get() # no hierarchy to prevent all sthpw and parent sTypes search_type_names = schema.get_search_types(hierarchy=False) search = Search('sthpw/search_object') search.add_filters('search_type', search_type_names) search_types = search.get_sobjects() task_search_type = SearchType.get("sthpw/task") search_types.append(task_search_type) values = [x.get_value("search_type") for x in search_types] filtered = [] labels = [] for x in search_types: base_type = x.get_base_key() exists = SearchType.column_exists(base_type, "pipeline_code") if not exists: continue label = "%s (%s)" % (x.get_value("title"), x.get_value("search_type")) labels.append(label) filtered.append(base_type) values = filtered sobject = my.get_current_sobject() if not sobject: value = "" else: value = sobject.get_value(my.get_name()) my.set_option("values", values) my.set_option("labels", labels) my.add_empty_option("-- Select --") if value: my.set_value(value) return super(SearchTypeWithPipelineInputWdg, my).get_display()
def get_display(self): #project = Project.get() schema = Schema.get() # no hierarchy to prevent all sthpw and parent sTypes search_type_names = schema.get_search_types(hierarchy=False) search = Search('sthpw/search_object') search.add_filters('search_type', search_type_names) search_types = search.get_sobjects() task_search_type = SearchType.get("sthpw/task") search_types.append(task_search_type) values = [ x.get_value("search_type") for x in search_types] filtered = [] labels = [] for x in search_types: base_type = x.get_base_key() exists = SearchType.column_exists(base_type, "pipeline_code") if not exists: continue label = "%s (%s)" % (x.get_value("title"), x.get_value("search_type")) labels.append(label) filtered.append(base_type) values = filtered sobject = self.get_current_sobject() if not sobject: value = "" else: value = sobject.get_value(self.get_name() ) self.set_option("values", values) self.set_option("labels", labels) self.add_empty_option("-- Select --") if value: self.set_value(value) return super(SearchTypeWithPipelineInputWdg, self).get_display()
def _test_create(my): search = Search("unittest/person") persons = search.get_sobjects() person = persons[0] snapshot_type = "file" snapshot = Snapshot.create(person, context="publish", snapshot_type=snapshot_type) version = snapshot.get_value("version") my.assertEquals( 1, version ) search_type = snapshot.get_value("search_type") my.assertEquals( search_type, person.get_search_type() ) search_code = snapshot.get_value("search_code") my.assertEquals( search_code, person.get_value("code") ) # also check search_id if SearchType.column_exists("sthpw/snapshot", "search_id"): search_code = snapshot.get_value("search_id") my.assertEquals( search_code, person.get_value("id") ) test_person = snapshot.get_sobject() my.assertEquals(test_person.get_code(), person.get_code())
def get_data_wdg(my): div = DivWdg() from pyasm.biz import Pipeline from pyasm.widget import SelectWdg search_type_obj = SearchType.get(my.search_type) base_type = search_type_obj.get_base_key() search = Search("sthpw/pipeline") search.add_filter("search_type", base_type) pipelines = search.get_sobjects() if pipelines: pipeline = pipelines[0] process_names = pipeline.get_process_names() if process_names: table = Table() div.add(table) table.add_row() table.add_cell("Process: ") select = SelectWdg("process") table.add_cell(select) process_names.append("---") process_names.append("publish") process_names.append("icon") select.set_option("values", process_names) #### buttons = Table() div.add(buttons) buttons.add_row() #button = IconButtonWdg(title="Fill in Data", icon=IconWdg.EDIT) button = ActionButtonWdg(title="Metadata") button.add_style("float: left") button.add_style("margin-top: -3px") buttons.add_cell(button) select_label = DivWdg("Update mode") select_label.add_style("float: left") select_label.add_style("margin-top: -3px") select_label.add_style("margin-left: 20px") buttons.add_cell(select_label) update_mode_option = my.kwargs.get("update_mode") if not update_mode_option: update_mode_option = "true" update_mode = SelectWdg(name="update mode") update_mode.add_class("spt_update_mode_select") update_mode.set_option("values", ["false", "true", "sequence"]) update_mode.set_option("labels", ["Off", "On", "Sequence"]) update_mode.set_option("default", update_mode_option) update_mode.add_style("float: left") update_mode.add_style("margin-top: -3px") update_mode.add_style("margin-left: 5px") update_mode.add_style("margin-right: 5px") buttons.add_cell(update_mode) update_info = DivWdg() update_info.add_class("glyphicon") update_info.add_class("glyphicon-info-sign") update_info.add_style("float: left") update_info.add_style("margin-top: -3px") update_info.add_style("margin-left: 10px") update_info.add_behavior({ 'type': 'click_up', 'cbjs_action': ''' spt.info("When update mode is on, if a file shares the name of one other file in the asset library, the file will update on ingest. If more than one file shares the name of an ingested asset, a new asset is created.<br> If sequence mode is selected, the system will update the sobject on ingest if a file sequence sharing the same name already exists.", {type: 'html'}); ''' }) buttons.add_cell(update_info) dialog = DialogWdg(display="false", show_title=False) div.add(dialog) dialog.set_as_activator(button, offset={'x': -10, 'y': 10}) dialog_data_div = DivWdg() dialog_data_div.add_color("background", "background") dialog_data_div.add_style("padding", "20px") dialog.add(dialog_data_div) # Order folders by date name_div = DivWdg() dialog_data_div.add(name_div) name_div.add_style("margin: 15px 0px") if SearchType.column_exists(my.search_type, "relative_dir"): category_div = DivWdg() name_div.add(category_div) checkbox = RadioWdg("category") checkbox.set_option("value", "none") category_div.add(checkbox) category_div.add(" No categories") category_div.add_style("margin-bottom: 5px") checkbox.set_option("checked", "true") category_div = DivWdg() name_div.add(category_div) checkbox = RadioWdg("category") checkbox.set_option("value", "by_day") category_div.add(checkbox) category_div.add(" Categorize files by Day") category_div.add_style("margin-bottom: 5px") category_div = DivWdg() name_div.add(category_div) checkbox = RadioWdg("category") checkbox.set_option("value", "by_week") category_div.add(checkbox) category_div.add(" Categorize files by Week") category_div.add_style("margin-bottom: 5px") category_div = DivWdg() name_div.add(category_div) checkbox = RadioWdg("category") checkbox.set_option("value", "by_year") category_div.add(checkbox) category_div.add(" Categorize files by Year") category_div.add_style("margin-bottom: 5px") """ checkbox = RadioWdg("category") checkbox.set_option("value", "custom") name_div.add(checkbox) name_div.add(" Custom") """ name_div.add("<br/>") ingest_data_view = my.kwargs.get('ingest_data_view') from tactic.ui.panel import EditWdg sobject = SearchType.create(my.search_type) edit = EditWdg(search_key=sobject.get_search_key(), mode='view', view=ingest_data_view) dialog_data_div.add(edit) hidden = HiddenWdg(name="parent_key") dialog_data_div.add(hidden) hidden.add_class("spt_parent_key") parent_key = my.kwargs.get("parent_key") or "" if parent_key: hidden.set_value(parent_key) extra_data = my.kwargs.get("extra_data") if not isinstance(extra_data, basestring): extra_data = jsondumps(extra_data) if extra_data and extra_data != "null": # it needs a TextArea instead of Hidden because of JSON data text = TextAreaWdg(name="extra_data") text.add_style('display: none') text.set_value(extra_data) dialog_data_div.add(text) """ dialog_data_div.add("Keywords:<br/>") dialog.add(dialog_data_div) text = TextAreaWdg(name="keywords") dialog_data_div.add(text) text.add_class("spt_keywords") text.add_style("padding: 1px") dialog_data_div.add("<br/>"*2) text.add_class("spt_extra_data") text.add_style("padding: 1px") """ #### TEST Image options """ button = IconButtonWdg(title="Resize", icon=IconWdg.FILM) buttons.add_cell(button) dialog = DialogWdg(display="false", show_title=False) div.add(dialog) dialog.set_as_activator(button, offset={'x':-10,'y':10}) try: from spt.tools.convert import ConvertOptionsWdg convert_div = DivWdg() dialog.add(convert_div) convert_div.add_style("padding: 20px") convert_div.add_color("background", "background") convert_div.add_class("spt_image_convert") convert = ConvertOptionsWdg() convert_div.add(convert) except: pass """ # use base name for name """ name_div = DivWdg() dialog_data_div.add(name_div) name_div.add_style("margin: 15px 0px") checkbox = CheckboxWdg("use_file_name") name_div.add(checkbox) name_div.add(" Use name of file for name") name_div.add("<br/>") checkbox = CheckboxWdg("use_base_name") name_div.add(checkbox) name_div.add(" Remove extension") name_div.add("<br/>") checkbox = CheckboxWdg("file_keywords") name_div.add(checkbox) name_div.add(" Use file name for keywords") """ return div
def get_display(self): sobject = self.get_current_sobject() column = self.kwargs.get('column') if column: name = column else: name = self.get_name() value = self.get_value(name=name) empty = self.get_option("empty") if empty and self.is_editable() and not value: from pyasm.web import SpanWdg div = DivWdg() div.add_style("text-align: center") div.add_style("width: 100%") div.add_style("white-space: nowrap") if empty in [True, 'true']: div.add("--Select--") div.add_style("opacity: 0.5") return div if sobject: data_type = SearchType.get_column_type(sobject.get_search_type(), name) else: data_type = 'text' if type(value) in types.StringTypes: wiki = WikiUtil() value = wiki.convert(value) if name == 'id' and value == -1: value = '' elif data_type in ["timestamp", "time"] or name == "timestamp": if value == 'now': value = '' elif value: # This date is assumed to be GMT date = parser.parse(value) # convert to user timezone if not SObject.is_day_column(name): date = self.get_timezone_value(date) try: encoding = locale.getlocale()[1] value = date.strftime("%b %d, %Y - %H:%M").decode(encoding) except: value = date.strftime("%b %d, %Y - %H:%M") else: value = '' else: if isinstance(value, Widget): return value elif not isinstance(value, basestring): try: value + 1 except TypeError: value = str(value) #else: # value_wdg.add_style("float: right") # value_wdg.add_style("padding-right: 3px") if sobject and SearchType.column_exists(sobject.get_search_type(), name): value_wdg = DivWdg() self.add_value_update(value_wdg, sobject, name) # don't call str() to prevent utf-8 encode error value_wdg.add(value) value_wdg.add_style("overflow-x: hidden") value_wdg.add_style("text-overflow: ellipsis") # sompe properties min_height = 25 value_wdg.add_style("min-height: %spx" % min_height) single_line = self.get_option("single_line") or False if single_line in ["true", True]: value_wdg.add_style("line-height: %spx" % min_height) value_wdg.add_style("white-space: nowrap") #value_wdg.add_style("overflow-y: hidden") #value_wdg.add_class("spt_scrollable") #value_wdg.add_attr("title", value) link_expression = self.get_option("link_expression") if link_expression: value_wdg.add_class("tactic_new_tab") value_wdg.add_style("display: inline-block") value_wdg.add_attr("search_key", sobject.get_search_key()) value_wdg.add_style("text-decoration: underline") #value_wdg.add_attr("spt_class_name", "tactic.ui.tools.SObjectDetailWdg") value_wdg.add_class("hand") return value_wdg return value
def get_display(my): top = DivWdg() top.add_border() top.add_style("padding: 10px") top.add_color("color", "color") top.add_gradient("background", "background", 0, -5) #top.add_style("height: 550px") top.add_class("spt_reports_top") my.set_as_panel(top) inner = DivWdg() top.add(inner) title = DivWdg() title.add("Reports") title.add_style("font-size: 18px") title.add_style("font-weight: bold") title.add_style("text-align: center") title.add_style("padding: 10px") title.add_style("margin: -10px -10px 0px -10px") inner.add(title) title.add_gradient("background", "background3", 5, -10) from tactic.ui.widget import TitleWdg subtitle = TitleWdg(name_of_title='List of Built in Reports',help_alias='main') inner.add(subtitle) inner.add("<br/>") button_div = DivWdg() inner.add(button_div) button_div.add_class("spt_buttons_top") button_div.add_style("margin-top: -5px") button_div.add_style("margin-bottom: 30px") button_div.add_border() button_div.add_style("margin-top: -15px") button_div.add_style("margin-bottom: 0px") button_div.add_style("width: 100%") button_div.add_style("height: 33px") button_div.add_color("background", "background2") button_div.add_style("margin-left: auto") button_div.add_style("margin-right: auto") button = SingleButtonWdg(title="Collapse", icon=IconWdg.HOME) button_div.add(button) button.add_style("float: left") button.add_style("left: 5px") button.add_style("top: 5px") # FIXME: get home for the user #home = 'tactic.ui.startup.ContentCreatorWdg' home = 'tactic.ui.startup.MainWdg' button.add_behavior( { 'type': 'click_up', 'cbjs_action': ''' spt.tab.set_main_body_tab(); var class_name = 'tactic.ui.startup.MainWdg'; var kwargs = { help_alias: 'main' }; spt.tab.add_new("_startup", "Startup", class_name, kwargs); ''' } ) """ button = SingleButtonWdg(title="Collapse", icon=IconWdg.ARROW_UP) button_div.add(button) button.add_class("spt_collapse") inner.add(button_div) button.add_style("float: left") button.add_style("left: 5px") button.add_style("top: 5px") button.add_behavior( { 'type': 'click_up', 'cbjs_action': ''' var top = bvr.src_el.getParent(".spt_reports_top"); var element = top.getElement(".spt_reports_list"); var buttons = bvr.src_el.getParent(".spt_buttons_top"); expand = buttons.getElement(".spt_expand"); new Fx.Tween(element).start('margin-top', "-400px"); expand.setStyle("display", ""); bvr.src_el.setStyle("display", "none"); ''' } ) button = SingleButtonWdg(title="Expand", icon=IconWdg.ARROW_DOWN) button.add_style("display: none") button.add_class("spt_expand") button_div.add(button) button.add_style("left: 5px") button.add_style("top: 5px") inner.add(button_div) button.add_style("float: left") button.add_behavior( { 'type': 'click_up', 'cbjs_action': ''' var top = bvr.src_el.getParent(".spt_reports_top"); var element = top.getElement(".spt_reports_list"); var buttons = bvr.src_el.getParent(".spt_buttons_top"); collapse = buttons.getElement(".spt_collapse"); new Fx.Tween(element).start('margin-top', "0px"); collapse.setStyle("display", ""); bvr.src_el.setStyle("display", "none"); ''' } ) """ reports = [] # read the config file from pyasm.widget import WidgetConfig tmp_path = __file__ dir_name = os.path.dirname(tmp_path) file_path="%s/../config/reports-conf.xml" % (dir_name) config = WidgetConfig.get(file_path=file_path, view="definition") category = my.kwargs.get('category') # get all of the configs from the database if not category or category in ["custom_reports", "custom_charts"]: search = Search("config/widget_config") search.add_op("begin") if category == "custom_reports": search.add_filter("widget_type", "report") elif category == "custom_charts": search.add_filter("widget_type", "chart") elif not category: search.add_filters("widget_type", ["chart","report"]) search.add_op("or") db_configs = search.get_sobjects() else: db_configs = [] element_names = my.kwargs.get("element_names") if element_names is None: element_names = config.get_element_names() project = Project.get() for element_name in element_names: key = {'project': project.get_code(), 'element': element_name} key2 = {'project': project.get_code(), 'element': '*'} key3 = {'element': element_name} key4 = {'element': '*'} keys = [key, key2, key3, key4] if not top.check_access("link", keys, "view", default="deny"): continue attrs = config.get_element_attributes(element_name) report_data = {} kwargs = config.get_display_options(element_name) class_name = kwargs.get('class_name') # the straight xml definition contains the sidebar class_name # with LinkWdg ... we shouldn't use this, so build the # element from scratch #xml = config.get_element_xml(element_name) from pyasm.search import WidgetDbConfig xml = WidgetDbConfig.build_xml_definition(class_name, kwargs) report_data['class_name'] = class_name report_data['kwargs'] = kwargs report_data['title'] = attrs.get("title") report_data['description'] = attrs.get("description") report_data['image'] = attrs.get("image") report_data['xml'] = xml reports.append(report_data) for db_config in db_configs: element_name = db_config.get_value("view") key = {'project': project.get_code(), 'element': element_name} key2 = {'project': project.get_code(), 'element': '*'} key3 = {'element': element_name} key4 = {'element': '*'} keys = [key, key2, key3, key4] if not top.check_access("link", keys, "view", default="deny"): continue report_data = {} view = db_config.get_value("view") kwargs = { 'view': view } parts = view.split(".") title = Common.get_display_title(parts[-1]) xml = db_config.get_value("config") report_data['class_name'] = "tactic.ui.panel.CustomLayoutWdg" report_data['kwargs'] = kwargs report_data['title'] = title report_data['description'] = title report_data['image'] = None report_data['xml'] = xml report_data['widget_type'] = db_config.get_value("widget_type") if report_data['widget_type'] == 'report': report_data['category'] = "custom_reports" elif report_data['widget_type'] == 'chart': report_data['category'] = "custom_charts" reports.append(report_data) """ report_data = { 'title': 'Tasks Completed This Week', 'class_name': 'tactic.ui.panel.ViewPanelWdg', 'kwargs': { 'search_type': 'sthpw/task', 'view': 'table' }, } reports.append(report_data) """ if category == 'list_item_reports' or not category: search_types = Project.get().get_search_types() for search_type in search_types: base_key = search_type.get_base_key() key = {'project': project.get_code(), 'code': base_key} key2 = {'project': project.get_code(), 'code': '*'} key3 = {'code': base_key} key4 = {'code': '*'} keys = [key, key2, key3, key4] if not top.check_access("search_type", keys, "view", default="deny"): continue if not SearchType.column_exists(base_key, "pipeline_code"): continue thumb_div = DivWdg() image = thumb_div thumb_div.add_border() thumb_div.set_box_shadow("1px 1px 1px 1px") thumb_div.add_style("width: 60px") thumb = ThumbWdg() thumb_div.add(thumb) thumb.set_sobject(search_type) thumb.set_icon_size(60) report_data = { 'title': '%s Workflow Status' % search_type.get_title(), 'description': 'Number of items in each process', 'class_name': 'tactic.ui.report.stype_report_wdg.STypeReportWdg', 'kwargs': { 'search_type': base_key }, 'image': thumb_div } reports.append(report_data) report_data = { 'title': '%s Labor Cost Report' % search_type.get_title(), 'description': 'Labor Cost Breakdown for each Item', 'class_name': 'tactic.ui.panel.ViewPanelWdg', 'kwargs': { 'search_type': search_type.get_code(), 'view': "table", 'show_header': False, 'mode': 'simple', 'element_names': "preview,code,title,cost_breakdown,bid_hours,bid_cost,actual_hours,actual_cost,overbudget,variance" }, 'image': IconWdg("", IconWdg.REPORT_03) } reports.append(report_data) table2 = Table() inner.add(table2) table2.add_style("width: 100%") categories_div = DivWdg() td = table2.add_cell(categories_div) td.add_style("vertical-align: top") td.add_style("width: 200px") td.add_color("background", "background3") td.add_border() #categories_div.add_style("margin: -1px 0px 0px -1px") categories_div.add_style("padding-top: 10px") #categories_div.add_style("float: left") categories_div.add_color("color", "color3") categories = config.get_all_views() categories.insert(-1, "list_item_reports") categories.insert(-1, "custom_charts") categories.insert(-1, "custom_reports") table_div = DivWdg() td = table2.add_cell(table_div) td.add_style("vertical-align: top") table_div.add_class("spt_reports_list") table_div.add_border() table_div.add_color("background", "background", -5) table_div.add_style("min-height: 500px") for i, category in enumerate(categories): if i == len(categories) - 1: categories_div.add("<hr/>") config.set_view(category) element_names = config.get_element_names() if category == "definition": title = "All Reports" else: title = Common.get_display_title(category) category_div = DivWdg() categories_div.add(category_div) category_div.add(title) category_div.add_style("padding: 5px") category_div.add_class("hand") category_div.add_behavior( { 'type': 'click_up', 'category': category, 'element_names': element_names, 'class_name': Common.get_full_class_name(my), 'cbjs_action': ''' var kwargs = { is_refresh: true, category: bvr.category, element_names: bvr.element_names } //spt.panel.refresh(top, kwargs); var top = bvr.src_el.getParent(".spt_reports_top"); spt.panel.load(top, bvr.class_name, kwargs); ''' } ) bgcolor = category_div.get_color("background3", -10) category_div.add_behavior( { 'type': 'mouseover', 'bgcolor': bgcolor, 'cbjs_action': ''' bvr.src_el.setStyle("background", bvr.bgcolor); ''' } ) category_div.add_behavior( { 'type': 'mouseout', 'bgcolor': bgcolor, 'cbjs_action': ''' bvr.src_el.setStyle("background", ""); ''' } ) # create a bunch of panels table = Table() table_div.add(table) table.add_color("color", "color") table.add_style("margin-top: 20px") table.center() table_div.add_style("margin: -3px -3px -1px -2px") if not reports: tr = table.add_row() td = table.add_cell() td.add("There are no reports defined.") td.add_style("padding: 50px") if my.kwargs.get("is_refresh") in ['true', True]: return inner else: return top for i, report in enumerate(reports): #if i == 0 or i%4 == 0: if i%3 == 0: tr = table.add_row() td = table.add_cell() td.add_style("vertical-align: top") td.add_style("padding: 3px") title = report #description = '''The schema is used to layout the basic components of your project. Each component represents a list of items that you use in your business everyday.''' description = report.get("title") # Each node will contain a list of "items" and will be stored as a table in the database.''' class_name = report.get("class_name") kwargs = report.get("kwargs") title = report.get("title") description = report.get("description") widget_type = report.get("widget_type") image = report.get("image") icon = report.get("icon") xml = report.get("xml") if image: div = DivWdg() if isinstance(image, basestring): image = image.upper() image = eval("IconWdg('', IconWdg.%s)" % image) div.add_style("margin-left: 15px") div.add_style("margin-top: 5px") else: image = image div.add(image) image = div elif icon: icon = icon.upper() image = eval("IconWdg('', IconWdg.%s)" % icon) else: div = DivWdg() """ import random num = random.randint(0,3) if num == 1: image = IconWdg("Bar Chart", IconWdg.GRAPH_BAR_01) elif num == 2: image = IconWdg("Bar Chart", IconWdg.GRAPH_LINE_01) else: image = IconWdg("Bar Chart", IconWdg.GRAPH_BAR_02) """ if widget_type == "chart": image = IconWdg("Chart", IconWdg.GRAPH_BAR_02) else: image = IconWdg("No Image", IconWdg.WARNING) div.add_style("margin-left: 15px") div.add_style("margin-top: 5px") div.add(image) image = div behavior = { 'type': 'click_up', 'title': title, 'class_name': class_name, 'kwargs': kwargs, 'cbjs_action': ''' spt.tab.set_main_body_tab(); //var top = bvr.src_el.getParent(".spt_reports_top"); //spt.tab.set_tab_top(top); spt.tab.add_new(bvr.title, bvr.title, bvr.class_name, bvr.kwargs); ''' } schema_wdg = my.get_section_wdg(title, description, image, behavior) schema_wdg.add_behavior( { 'type': 'load', 'title': title, 'class_name': class_name, 'xml': xml, 'kwargs': kwargs, 'cbjs_action': ''' var report_top = bvr.src_el; report_top.kwargs = bvr.kwargs; report_top.class_name = bvr.class_name; report_top.element_name = bvr.title; report_top.xml = bvr.xml; ''' } ) td.add(schema_wdg) inner.add("<br/>") #from tactic.ui.container import TabWdg #tab = TabWdg(show_add=False) #inner.add(tab) if my.kwargs.get("is_refresh") in ['true', True]: return inner else: return top
def get_data_wdg(my): div = DivWdg() from pyasm.biz import Pipeline from pyasm.widget import SelectWdg search_type_obj = SearchType.get(my.search_type) base_type = search_type_obj.get_base_key() search = Search("sthpw/pipeline") search.add_filter("search_type", base_type) pipelines = search.get_sobjects() if pipelines: pipeline = pipelines[0] process_names = pipeline.get_process_names() if process_names: table = Table() div.add(table) table.add_row() table.add_cell("Process: ") select = SelectWdg("process") table.add_cell(select) process_names.append("---") process_names.append("publish") process_names.append("icon") select.set_option("values", process_names) #### buttons = Table() div.add(buttons) buttons.add_row() #button = IconButtonWdg(title="Fill in Data", icon=IconWdg.EDIT) button = ActionButtonWdg(title="Metadata") button.add_style("float: left") button.add_style("margin-top: -3px") buttons.add_cell(button) select_label = DivWdg("Update mode"); select_label.add_style("float: left") select_label.add_style("margin-top: -3px") select_label.add_style("margin-left: 20px") buttons.add_cell(select_label) update_mode_option = my.kwargs.get("update_mode") if not update_mode_option: update_mode_option = "true" update_mode = SelectWdg(name="update mode") update_mode.add_class("spt_update_mode_select") update_mode.set_option("values", ["false", "true", "sequence"]) update_mode.set_option("labels", ["Off", "On", "Sequence"]) update_mode.set_option("default", update_mode_option) update_mode.add_style("float: left") update_mode.add_style("margin-top: -3px") update_mode.add_style("margin-left: 5px") update_mode.add_style("margin-right: 5px") buttons.add_cell(update_mode) update_info = DivWdg() update_info.add_class("glyphicon") update_info.add_class("glyphicon-info-sign") update_info.add_style("float: left") update_info.add_style("margin-top: -3px") update_info.add_style("margin-left: 10px") update_info.add_behavior( { 'type': 'click_up', 'cbjs_action': ''' spt.info("When update mode is on, if a file shares the name of one other file in the asset library, the file will update on ingest. If more than one file shares the name of an ingested asset, a new asset is created.<br> If sequence mode is selected, the system will update the sobject on ingest if a file sequence sharing the same name already exists.", {type: 'html'}); ''' } ) buttons.add_cell(update_info); dialog = DialogWdg(display="false", show_title=False) div.add(dialog) dialog.set_as_activator(button, offset={'x':-10,'y':10}) dialog_data_div = DivWdg() dialog_data_div.add_color("background", "background") dialog_data_div.add_style("padding", "20px") dialog.add(dialog_data_div) # Order folders by date name_div = DivWdg() dialog_data_div.add(name_div) name_div.add_style("margin: 15px 0px") if SearchType.column_exists(my.search_type, "relative_dir"): category_div = DivWdg() name_div.add(category_div) checkbox = RadioWdg("category") checkbox.set_option("value", "none") category_div.add(checkbox) category_div.add(" No categories") category_div.add_style("margin-bottom: 5px") checkbox.set_option("checked", "true") category_div = DivWdg() name_div.add(category_div) checkbox = RadioWdg("category") checkbox.set_option("value", "by_day") category_div.add(checkbox) category_div.add(" Categorize files by Day") category_div.add_style("margin-bottom: 5px") category_div = DivWdg() name_div.add(category_div) checkbox = RadioWdg("category") checkbox.set_option("value", "by_week") category_div.add(checkbox) category_div.add(" Categorize files by Week") category_div.add_style("margin-bottom: 5px") category_div = DivWdg() name_div.add(category_div) checkbox = RadioWdg("category") checkbox.set_option("value", "by_year") category_div.add(checkbox) category_div.add(" Categorize files by Year") category_div.add_style("margin-bottom: 5px") """ checkbox = RadioWdg("category") checkbox.set_option("value", "custom") name_div.add(checkbox) name_div.add(" Custom") """ name_div.add("<br/>") ingest_data_view = my.kwargs.get('ingest_data_view') from tactic.ui.panel import EditWdg sobject = SearchType.create(my.search_type) edit = EditWdg(search_key =sobject.get_search_key(), mode='view', view=ingest_data_view ) dialog_data_div.add(edit) hidden = HiddenWdg(name="parent_key") dialog_data_div.add(hidden) hidden.add_class("spt_parent_key") parent_key = my.kwargs.get("parent_key") or "" if parent_key: hidden.set_value(parent_key) extra_data = my.kwargs.get("extra_data") if not isinstance(extra_data, basestring): extra_data = jsondumps(extra_data) if extra_data and extra_data != "null": # it needs a TextArea instead of Hidden because of JSON data text = TextAreaWdg(name="extra_data") text.add_style('display: none') text.set_value(extra_data) dialog_data_div.add(text) """ dialog_data_div.add("Keywords:<br/>") dialog.add(dialog_data_div) text = TextAreaWdg(name="keywords") dialog_data_div.add(text) text.add_class("spt_keywords") text.add_style("padding: 1px") dialog_data_div.add("<br/>"*2) text.add_class("spt_extra_data") text.add_style("padding: 1px") """ #### TEST Image options """ button = IconButtonWdg(title="Resize", icon=IconWdg.FILM) buttons.add_cell(button) dialog = DialogWdg(display="false", show_title=False) div.add(dialog) dialog.set_as_activator(button, offset={'x':-10,'y':10}) try: from spt.tools.convert import ConvertOptionsWdg convert_div = DivWdg() dialog.add(convert_div) convert_div.add_style("padding: 20px") convert_div.add_color("background", "background") convert_div.add_class("spt_image_convert") convert = ConvertOptionsWdg() convert_div.add(convert) except: pass """ # use base name for name """ name_div = DivWdg() dialog_data_div.add(name_div) name_div.add_style("margin: 15px 0px") checkbox = CheckboxWdg("use_file_name") name_div.add(checkbox) name_div.add(" Use name of file for name") name_div.add("<br/>") checkbox = CheckboxWdg("use_base_name") name_div.add(checkbox) name_div.add(" Remove extension") name_div.add("<br/>") checkbox = CheckboxWdg("file_keywords") name_div.add(checkbox) name_div.add(" Use file name for keywords") """ return div
def get_data_wdg(my): div = DivWdg() from pyasm.biz import Pipeline from pyasm.widget import SelectWdg search_type_obj = SearchType.get(my.search_type) base_type = search_type_obj.get_base_key() search = Search("sthpw/pipeline") search.add_filter("search_type", base_type) pipelines = search.get_sobjects() if pipelines: pipeline = pipelines[0] process_names = pipeline.get_process_names() if process_names: table = Table() div.add(table) table.add_row() table.add_cell("Process: ") select = SelectWdg("process") table.add_cell(select) process_names.append("---") process_names.append("publish") process_names.append("icon") select.set_option("values", process_names) #### buttons = Table() div.add(buttons) buttons.add_row() button = IconButtonWdg(title="Fill in Data", icon=IconWdg.EDIT) buttons.add_cell(button) dialog = DialogWdg(display="false", show_title=False) div.add(dialog) dialog.set_as_activator(button, offset={'x': -10, 'y': 10}) dialog_data_div = DivWdg() dialog_data_div.add_color("background", "background") dialog_data_div.add_style("padding", "20px") dialog.add(dialog_data_div) # Order folders by date name_div = DivWdg() dialog_data_div.add(name_div) name_div.add_style("margin: 15px 0px") if SearchType.column_exists(my.search_type, "relative_dir"): category_div = DivWdg() name_div.add(category_div) checkbox = RadioWdg("category") checkbox.set_option("value", "none") category_div.add(checkbox) category_div.add(" No categories") category_div.add_style("margin-bottom: 5px") checkbox.set_option("checked", "true") category_div = DivWdg() name_div.add(category_div) checkbox = RadioWdg("category") checkbox.set_option("value", "by_day") category_div.add(checkbox) category_div.add(" Categorize files by Day") category_div.add_style("margin-bottom: 5px") category_div = DivWdg() name_div.add(category_div) checkbox = RadioWdg("category") checkbox.set_option("value", "by_week") category_div.add(checkbox) category_div.add(" Categorize files by Week") category_div.add_style("margin-bottom: 5px") category_div = DivWdg() name_div.add(category_div) checkbox = RadioWdg("category") checkbox.set_option("value", "by_year") category_div.add(checkbox) category_div.add(" Categorize files by Year") category_div.add_style("margin-bottom: 5px") """ checkbox = RadioWdg("category") checkbox.set_option("value", "custom") name_div.add(checkbox) name_div.add(" Custom") """ name_div.add("<br/>") ingest_data_view = my.kwargs.get('ingest_data_view') from tactic.ui.panel import EditWdg sobject = SearchType.create(my.search_type) edit = EditWdg(search_key=sobject.get_search_key(), mode='view', view=ingest_data_view) dialog_data_div.add(edit) hidden = HiddenWdg(name="parent_key") dialog_data_div.add(hidden) hidden.add_class("spt_parent_key") parent_key = my.kwargs.get("parent_key") or "" if parent_key: hidden.set_value(parent_key) extra_data = my.kwargs.get("extra_data") if not isinstance(extra_data, basestring): extra_data = jsondumps(extra_data) if extra_data and extra_data != "null": # it needs a TextArea instead of Hidden because of JSON data text = TextAreaWdg(name="extra_data") text.add_style('display: none') text.set_value(extra_data) dialog_data_div.add(text) """ dialog_data_div.add("Keywords:<br/>") dialog.add(dialog_data_div) text = TextAreaWdg(name="keywords") dialog_data_div.add(text) text.add_class("spt_keywords") text.add_style("padding: 1px") dialog_data_div.add("<br/>"*2) text.add_class("spt_extra_data") text.add_style("padding: 1px") """ #### TEST Image options """ button = IconButtonWdg(title="Resize", icon=IconWdg.FILM) buttons.add_cell(button) dialog = DialogWdg(display="false", show_title=False) div.add(dialog) dialog.set_as_activator(button, offset={'x':-10,'y':10}) try: from spt.tools.convert import ConvertOptionsWdg convert_div = DivWdg() dialog.add(convert_div) convert_div.add_style("padding: 20px") convert_div.add_color("background", "background") convert_div.add_class("spt_image_convert") convert = ConvertOptionsWdg() convert_div.add(convert) except: pass """ # use base name for name """ name_div = DivWdg() dialog_data_div.add(name_div) name_div.add_style("margin: 15px 0px") checkbox = CheckboxWdg("use_file_name") name_div.add(checkbox) name_div.add(" Use name of file for name") name_div.add("<br/>") checkbox = CheckboxWdg("use_base_name") name_div.add(checkbox) name_div.add(" Remove extension") name_div.add("<br/>") checkbox = CheckboxWdg("file_keywords") name_div.add(checkbox) name_div.add(" Use file name for keywords") """ return div
def execute(my): collection_keys = my.kwargs.get("collection_keys") search_keys = my.kwargs.get("search_keys") message = {} if collection_keys == None: collection_keys = [] for collection_key in collection_keys: collection = Search.get_by_search_key(collection_key) if not collection: raise Exception("Collection does not exist") collection_name = collection.get("name") search_type = collection.get_base_search_type() parts = search_type.split("/") collection_type = "%s/%s_in_%s" % (parts[0], parts[1], parts[1]) search = Search(collection_type) search.add_filter("parent_code", collection.get_code()) items = search.get_sobjects() search_codes = [x.get_value("search_code") for x in items] search_codes = set(search_codes) # Try to find all the parent codes of the destination, and see if there's any that # matches the codes in "search_codes" # Check for parent/child hierarchy in destination to prevent circular relationships src_collections_codes = [] for search_key in search_keys: asset = Search.get_by_search_key(search_key) is_collection = asset.get("_is_collection") if is_collection: src_collection_code = asset.get("code") src_collections_codes.append(src_collection_code) if src_collections_codes: collection_code = collection.get("code") my.kwargs["collection_code"] = collection_code my.kwargs["collection_type"] = collection_type my.kwargs["search_type"] = search_type # Run SQL to find all parent collections(and above) of the selected collections # The all_parent_codes contain all parent codes up the relationship tree # ie. parent collections' parents ...etc all_parent_codes = my.get_parent_codes() all_parent_codes.add(collection_code) all_parent_codes = list(all_parent_codes) # Once retrieve the parent codes, use a for loop to check if the the codes in # src_collections_codes are in parent_codes parent_collection_names = [] for parent_code in all_parent_codes: if parent_code in src_collections_codes: message['circular'] = "True" parent_collection_name = Search.get_by_code( search_type, parent_code).get("name") parent_collection_names.append(parent_collection_name) if parent_collection_names: message[ 'parent_collection_names'] = parent_collection_names my.info['message'] = message return has_keywords = SearchType.column_exists(search_type, "keywords") if has_keywords: collection_keywords = collection.get_value("keywords", no_exception=True) collection_keywords = collection_keywords.split(" ") collection_keywords = set(collection_keywords) # create new items has_inserted = False sobjects = Search.get_by_search_keys(search_keys) for sobject in sobjects: if sobject.get_code() in search_codes: continue new_item = SearchType.create(collection_type) new_item.set_value("parent_code", collection.get_code()) new_item.set_value("search_code", sobject.get_code()) new_item.commit() has_inserted = True # copy the metadata of the collection if has_keywords: keywords = sobject.get_value("keywords") keywords = keywords.split(" ") keywords = set(keywords) keywords = keywords.union(collection_keywords) keywords = " ".join(keywords) sobject.set_value("keywords", keywords) sobject.commit() if not has_inserted: message[collection_name] = "No insert" else: message[collection_name] = "Insert OK" my.info['message'] = message my.add_description("Add [%s] item(s) to [%s] collection(s)" % (len(search_keys), len(collection_keys)))
def execute(my): file_path = my.kwargs.get("path") project_code = my.kwargs.get("project_code") base_dir = my.kwargs.get("base_dir") search_type = my.kwargs.get("search_type") process = my.kwargs.get("process") watch_script_path = my.kwargs.get("script_path") if not process: process = "publish" basename = os.path.basename(file_path) context = my.kwargs.get("context") if not context: context = '%s/%s' % (process, basename) # find the relative_dir and relative_path relative_path = file_path.replace("%s/" % base_dir, "") relative_dir = os.path.dirname(relative_path) file_name = os.path.basename(file_path) log_path = '%s/TACTIC_log.txt' %(base_dir) my.create_checkin_log() # Define asset type of the file asset_type = my.get_asset_type(file_path) description = "drop folder check-in of %s" %file_name from client.tactic_client_lib import TacticServerStub server = TacticServerStub.get(protocol='local') server.set_project(project_code) transaction = Transaction.get(create=True) server.start(title='Check-in of media', description='Check-in of media') server_return_value = {} try: filters = [ [ 'name', '=', file_name ], #[ 'relative_dir', '=', relative_dir ] ] sobj = server.query(search_type, filters=filters, single=True) if not sobj: # create sobject if it does not yet exist sobj = SearchType.create(search_type) if SearchType.column_exists(search_type, "name"): sobj.set_value("name", basename) if SearchType.column_exists(search_type, "media_type"): sobj.set_value("media_type", asset_type) if SearchType.column_exists(search_type, "relative_dir"): sobj.set_value("relative_dir", relative_dir) if SearchType.column_exists(search_type, "keywords"): relative_path = relative_path keywords = Common.get_keywords_from_path(relative_path) keywords = " ".join( keywords ) sobj.set_value("keywords", keywords) sobj.commit() search_key = sobj.get_search_key() else: search_key = sobj.get("__search_key__") #task = server.create_task(sobj.get('__search_key__'),process='publish') #server.update(task, {'status': 'New'}) server_return_value = server.simple_checkin(search_key, context, file_path, description=description, mode='move') if watch_script_path: cmd = PythonCmd(script_path=watch_script_path,search_type=search_type,drop_path=file_path,search_key=search_key) cmd.execute() except Exception, e: print "Error occurred", e error_message=str(e) import traceback tb = sys.exc_info()[2] stacktrace = traceback.format_tb(tb) stacktrace_str = "".join(stacktrace) print "-"*50 print stacktrace_str version_num='Error:' system_time=strftime("%Y/%m/%d %H:%M", gmtime()) pre_log=file_name+(50-len(file_name))*' '+system_time+(33-len(system_time))*' '+version_num+(15-len(version_num))*' ' +error_message+'\n'\ + stacktrace_str + '\n' + watch_script_path # Write data into TACTIC_log file under /tmp/drop f = open(log_path, 'a') f.write(pre_log) f.close() #server.abort() transaction.rollback() raise
def resolve_relationship_attrs(self, attrs, search_type, search_type2): if attrs.get("relationship") not in ("search_type","search_code","search_id"): return attrs search_type_obj = SearchType.get(search_type) search_type_obj2 = SearchType.get(search_type2) my_is_from = attrs['from'] == search_type_obj.get_base_key() db_resource = SearchType.get_db_resource_by_search_type(search_type) db_resource2 = SearchType.get_db_resource_by_search_type(search_type2) db_impl = db_resource.get_database_impl() db_impl2 = db_resource2.get_database_impl() # <connect from="sthpw/note" to="*" # type='hierarchy' relationship='search_type'/> prefix = attrs.get("prefix") if prefix: prefix = "%s_" % prefix else: prefix = "" if my_is_from: if db_impl2.get_database_type() == "MongoDb": attrs['from_col'] = '%ssearch_code' % prefix attrs['to_col'] = db_impl2.get_id_col(db_resource2,search_type2) attrs['relationship'] = 'search_code' else: code_column = "%ssearch_code" % prefix has_code = SearchType.column_exists(search_type, code_column) if has_code: attrs['from_col'] = '%ssearch_code' % prefix attrs['to_col'] = 'code' attrs['relationship'] = 'search_code' else: attrs['from_col'] = '%ssearch_id' % prefix attrs['to_col'] = db_impl2.get_id_col(db_resource2,search_type2) attrs['relationship'] = 'search_id' else: if db_impl.get_database_type() == "MongoDb": attrs['to_col'] = '%ssearch_code' % prefix attrs['from_col'] = db_impl.get_id_col(db_resource,search_type) attrs['relationship'] = 'search_code' else: code_column = "%ssearch_code" % prefix has_code = SearchType.column_exists(search_type2, code_column) if has_code: attrs['from_col'] = 'code' attrs['to_col'] = '%ssearch_code' % prefix attrs['relationship'] = 'search_code' else: attrs['from_col'] = db_impl.get_id_col(db_resource,search_type) attrs['to_col'] = '%ssearch_id' % prefix attrs['relationship'] = 'search_id' return attrs
def execute(my): from pyasm.web import WebContainer web = WebContainer.get_web() from pyasm.widget import WebLoginWdg # If the tag <force_lowercase_login> is set to "true" # in the TACTIC config file, # then force the login string argument to be lowercase. # This tag is false by default. my.login = web.get_form_value("login") if Config.get_value("security","force_lowercase_login") == "true": my.login = my.login.lower() my.password = web.get_form_value("password") my.domain = web.get_form_value("domain") if my.login == "" and my.password == "": return False if my.login == "" or my.password == "": web.set_form_value(WebLoginWdg.LOGIN_MSG, \ "Empty username or password") return False security = WebContainer.get_security() # handle windows domains #if my.domain: # my.login = "******" % (my.domain, my.login) verify_password = web.get_form_value("verify_password") if verify_password: if verify_password != my.password: web.set_form_value(WebLoginWdg.LOGIN_MSG, \ "Passwords do not match.") return False login_sobject = None if SearchType.column_exists("sthpw/login", "upn"): search = Search("sthpw/login") search.add_filter('upn',my.login) login_sobject = search.get_sobject() if not login_sobject: search2 = Search("sthpw/login") search2.add_filter('login',my.login) login_sobject = search2.get_sobject() # FIXME: need to only be able to do this if admin password is empty if verify_password: if login_sobject and login_sobject.get_value("login") == "admin": login_sobject.set_password(verify_password) try: security.login_user(my.login, my.password, domain=my.domain) except SecurityException, e: msg = str(e) if not msg: msg = "Incorrect username or password" web.set_form_value(WebLoginWdg.LOGIN_MSG, msg) max_attempts=-1 try: max_attempts = int(Config.get_value("security", "max_login_attempt")) except: pass if max_attempts > 0: login_attempt = 0 if login_sobject: login_attempt = login_sobject.get_value('login_attempt') login_attempt = login_attempt + 1 login_sobject.set_value('login_attempt', login_attempt) if login_attempt == max_attempts: #set license_Type to disabled and set off the thread to re-enable it login_sobject.set_value('license_type', 'disabled') disabled_time = Config.get_value("security", "account_lockout_duration") if not disabled_time: disabled_time = "30 minutes" delay,unit = disabled_time.split(" ",1) if "minute" in unit: delay = int(delay)*60 elif "hour" in unit: delay =int(delay)*3600 elif "second" in unit: delay = int(delay) else: #make delay default to 30 min delay = 30*60 my.reenable_user(login_sobject, delay) if login_sobject: login_sobject.commit(triggers=False)
def import_data(my, path, commit=True, unique=False): if not os.path.exists(path): # This is printed too often in harmless situations #print "WARNING: path [%s] does not exist" % path return [] #f = codecs.open(path, 'r', 'utf-8') f = codecs.getreader('utf8')(open(path, 'r')) statement = [] count = 1 insert = None table = None sobject = None jobs = [] filter_line_handler = my.kwargs.get('filter_line_handler') filter_sobject_handler = my.kwargs.get('filter_sobject_handler') for line in f: # FIXME: this SQLServer specific #if line.startswith("insert.set_value('Version'"): # #line = "insert.set_value('Version', '')" # continue if filter_line_handler: line = filter_line_handler(path, line) if line == None: continue if line.startswith("#-- Start Entry --#"): statement = [] elif line.startswith("#-- End Entry --#"): if not statement: continue # strip out a line feeds and add proper new lines #statement_str = "\n".join([x.strip("\n") for x in statement]) statement_str = "\n".join([x.rstrip("\r\n") for x in statement]) try: exec(statement_str) except SqlException, e: print "ERROR (SQLException): ", e except Exception, e: print "ERROR: ", e print print statement_str print #raise continue sobject = insert if sobject: jobs.append(sobject) stype_id = 0 if sobject.get_base_search_type() =='sthpw/search_object': stype_id = Search.eval("@GET(sthpw/search_object['search_type', '%s'].id)" %sobject.get_value('search_type'), single=True) else: # if there is an id, then set the sobject to be insert sobject_id = sobject.get_id() if sobject_id and sobject_id != -1: sobject.set_force_insert(True) # if unique, then check to see if it already exists. # Same idea with stype_exists # if so, take the id to turn this from an insert to an # update operation writing over existing data if stype_id: sobject.set_value("id", stype_id) if filter_sobject_handler: sobject = filter_sobject_handler(sobject) # if the search type is in sthpw namespace, then change # the project code to the current project base_search_type = sobject.get_base_search_type() if base_search_type.startswith("sthpw/"): project = Project.get() project_code = project.get_value("code") if SearchType.column_exists(sobject.get_search_type(), "project_code"): sobject.set_value("project_code", project_code) if base_search_type == "sthpw/schema": # if a schema is already defined, the delete # the current one. This is not necessary # if unique flag is on if not unique: search = Search("sthpw/schema") search.add_filter("code", project_code) old_schema = search.get_sobject() if old_schema: old_schema.delete() sobject.set_value("code", project_code) if unique: unique_sobject = my.get_unique_sobject(sobject) if unique_sobject: sobject.set_value("id", unique_sobject.get_id() ) if sobject == None: continue try: if commit: sobject.commit(triggers=False) chunk = 100 if my.verbose and count and count % chunk == 0: print "\t... handled entry [%s]" % count if my.plugin and my.plugin.get_value("type", no_exception=True) == "config": plugin_content = SearchType.create("config/plugin_content") plugin_content.set_value("search_type", sobject.get_search_type()) plugin_content.set_value("search_code", sobject.get_code()) plugin_content.set_value("plugin_code", my.plugin.get_code()) plugin_content.commit() except UnicodeDecodeError, e: print "Skipping due to unicode decode error: [%s]" % statement_str continue
def execute(my): filenames = my.kwargs.get("filenames") upload_dir = Environment.get_upload_dir() base_dir = upload_dir search_type = my.kwargs.get("search_type") key = my.kwargs.get("key") relative_dir = my.kwargs.get("relative_dir") if not relative_dir: project_code = Project.get_project_code() search_type_obj = SearchType.get(search_type) table = search_type_obj.get_table() relative_dir = "%s/%s" % (project_code, table) server = TacticServerStub.get() parent_key = my.kwargs.get("parent_key") category = my.kwargs.get("category") keywords = my.kwargs.get("keywords") extra_data = my.kwargs.get("extra_data") if extra_data: extra_data = jsonloads(extra_data) else: extra_data = {} # TODO: use this to generate a category category_script_path = my.kwargs.get("category_script_path") """ ie: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") return date.split(" ")[0] """ if not SearchType.column_exists(search_type, "name"): raise TacticException('The Ingestion puts the file name into the name column which is the minimal requirement. Please first create a "name" column for this sType.') for count, filename in enumerate(filenames): # first see if this sobjects still exists search = Search(search_type) search.add_filter("name", filename) if relative_dir and search.column_exists("relative_dir"): search.add_filter("relative_dir", relative_dir) sobject = search.get_sobject() # else create a new one if not sobject: sobject = SearchType.create(search_type) sobject.set_value("name", filename) if relative_dir and sobject.column_exists("relative_dir"): sobject.set_value("relative_dir", relative_dir) # extract metadata file_path = "%s/%s" % (base_dir, File.get_filesystem_name(filename)) # TEST: convert on upload try: convert = my.kwargs.get("convert") if convert: message_key = "IngestConvert001" cmd = ConvertCbk(**convert) cmd.execute() except Exception, e: print "WARNING: ", e if not os.path.exists(file_path): raise Exception("Path [%s] does not exist" % file_path) # get the metadata from this image if SearchType.column_exists(search_type, "relative_dir"): if category and category not in ['none', None]: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") if not date: date_str = "No-Date" else: date_str = str(date) # this can't be parsed correctly by dateutils parts = date_str.split(" ") date_str = parts[0].replace(":", "-") date_str = "%s %s" % (date_str, parts[1]) from dateutil import parser orig_date = parser.parse(date_str) if category == "by_day": date_str = orig_date.strftime("%Y/%Y-%m-%d") elif category == "by_month": date_str = orig_date.strftime("%Y-%m") elif category == "by_week": date_str = orig_date.strftime("%Y/Week-%U") full_relative_dir = "%s/%s" % (relative_dir, date_str) sobject.set_value("relative_dir", full_relative_dir) if parent_key: parent = Search.get_by_search_key(parent_key) if parent: sobject.set_sobject_value(sobject) if SearchType.column_exists(search_type, "keywords"): if keywords: sobject.set_value("keywords", keywords) for key, value in extra_data.items(): if SearchType.column_exists(search_type, key): sobject.set_value(key, value) """ if category: if SearchType.column_exists(search_type, "category"): sobject.set_value("category", category) if SearchType.column_exists(search_type, "relative_dir"): full_relative_dir = "%s/%s" % (relative_dir, category) sobject.set_value("relative_dir", category) """ sobject.commit() search_key = sobject.get_search_key() # use API to check in file process = my.kwargs.get("process") if not process: process = "publish" if process == "icon": context = "icon" else: context = "%s/%s" % (process, filename) server.simple_checkin(search_key, context, filename, mode='uploaded') percent = int((float(count)+1) / len(filenames)*100) print "checking in: ", filename, percent msg = { 'progress': percent, 'description': 'Checking in file [%s]' % filename, } server.log_message(key, msg, status="in progress")
def execute(self): from pyasm.web import WebContainer web = WebContainer.get_web() from pyasm.widget import WebLoginWdg # If the tag <force_lowercase_login> is set to "true" # in the TACTIC config file, # then force the login string argument to be lowercase. # This tag is false by default. self.login = web.get_form_value("login") if Config.get_value("security", "force_lowercase_login") == "true": self.login = self.login.lower() password = web.get_form_value("password") self.password = password self.domain = web.get_form_value("domain") if self.login == "" and self.password == "": web.set_form_value(WebLoginWdg.LOGIN_MSG, \ "Username and password are empty") return False if self.login == "": web.set_form_value(WebLoginWdg.LOGIN_MSG, \ "Username is empty") return False if self.password == "": web.set_form_value(WebLoginWdg.LOGIN_MSG, \ "Password is empty") return False security = WebContainer.get_security() # handle windows domains #if self.domain: # self.login = "******" % (self.domain, self.login) verify_password = web.get_form_value("verify_password") if verify_password: if verify_password != self.password: web.set_form_value(WebLoginWdg.LOGIN_MSG, \ "Passwords do not match.") return False # check to see if the login exists in the database login_sobject = None if SearchType.column_exists("sthpw/login", "upn"): search = Search("sthpw/login") search.add_filter('upn', self.login) login_sobject = search.get_sobject() if not login_sobject: search2 = Search("sthpw/login") search2.add_filter('login', self.login) login_sobject = search2.get_sobject() if not login_sobject: search2 = Search("sthpw/login") search2.add_filter('email', self.login) login_sobject = search2.get_sobject() # FIXME: need to only be able to do this if admin password is empty if verify_password: if login_sobject and login_sobject.get_value("login") == "admin": login_sobject.set_password(verify_password) try: # always use the login column regardless of what the user entered if login_sobject: login = login_sobject.get_value("login") else: login = self.login security.login_user(login, self.password, domain=self.domain) except SecurityException, e: msg = str(e) if not msg: msg = "Incorrect username or password" web.set_form_value(WebLoginWdg.LOGIN_MSG, msg) max_attempts = -1 try: max_attempts = int( Config.get_value("security", "max_login_attempt")) except: pass if max_attempts > 0: login_attempt = 0 if login_sobject: login_attempt = login_sobject.get_value('login_attempt') login_attempt = login_attempt + 1 login_sobject.set_value('login_attempt', login_attempt) if login_attempt == max_attempts: #set license_Type to disabled and set off the thread to re-enable it login_sobject.set_value('license_type', 'disabled') disabled_time = Config.get_value( "security", "account_lockout_duration") if not disabled_time: disabled_time = "30 minutes" delay, unit = disabled_time.split(" ", 1) if "minute" in unit: delay = int(delay) * 60 elif "hour" in unit: delay = int(delay) * 3600 elif "second" in unit: delay = int(delay) else: #make delay default to 30 min delay = 30 * 60 self.reenable_user(login_sobject, delay) if login_sobject: login_sobject.commit(triggers=False)
def import_data(my, path, commit=True, unique=False): if not os.path.exists(path): # This is printed too often in harmless situations #print "WARNING: path [%s] does not exist" % path return [] #f = codecs.open(path, 'r', 'utf-8') f = codecs.getreader('utf8')(open(path, 'r')) statement = [] count = 1 insert = None table = None sobject = None jobs = [] filter_line_handler = my.kwargs.get('filter_line_handler') filter_sobject_handler = my.kwargs.get('filter_sobject_handler') for line in f: # FIXME: this SQLServer specific #if line.startswith("insert.set_value('Version'"): # #line = "insert.set_value('Version', '')" # continue if filter_line_handler: line = filter_line_handler(path, line) if line == None: continue if line.startswith("#-- Start Entry --#"): statement = [] elif line.startswith("#-- End Entry --#"): if not statement: continue # strip out a line feeds and add proper new lines #statement_str = "\n".join([x.strip("\n") for x in statement]) statement_str = "\n".join([x.rstrip("\r\n") for x in statement]) try: exec(statement_str) except SqlException, e: print "ERROR (SQLException): ", e except Exception, e: print "ERROR: ", e print print statement_str print #raise continue sobject = insert if sobject: jobs.append(sobject) stype_id = 0 if sobject.get_base_search_type() =='sthpw/search_object': stype_id = Search.eval("@GET(sthpw/search_object['search_type', '%s'].id)" %sobject.get_value('search_type'), single=True) else: # if there is an id, then set the sobject to be insert sobject_id = sobject.get_id() if sobject_id and sobject_id != -1: sobject.set_force_insert(True) # if unique, then check to see if it already exists. # Same idea with stype_exists # if so, take the id to turn this from an insert to an # update operation writing over existing data if stype_id: sobject.set_value("id", stype_id) if filter_sobject_handler: sobject = filter_sobject_handler(sobject) # if the search type is in sthpw namespace, then change # the project code to the current project base_search_type = sobject.get_base_search_type() if base_search_type.startswith("sthpw/"): project = Project.get() project_code = project.get_value("code") if SearchType.column_exists(sobject.get_search_type(), "project_code"): sobject.set_value("project_code", project_code) if base_search_type == "sthpw/schema": # if a schema is already defined, the delete # the current one. This is not necessary # if unique flag is on if not unique: search = Search("sthpw/schema") search.add_filter("code", project_code) old_schema = search.get_sobject() if old_schema: old_schema.delete() sobject.set_value("code", project_code) if unique: unique_sobject = my.get_unique_sobject(sobject) if unique_sobject: sobject.set_value("id", unique_sobject.get_id() ) if sobject == None: continue try: if commit: sobject.commit(triggers=False) chunk = 100 if my.verbose and count and count % chunk == 0: print "\t... handled entry [%s]" % count if my.plugin and my.plugin.get_value("type", no_exception=True) == "config": plugin_content = SearchType.create("config/plugin_content") plugin_content.set_value("search_type", sobject.get_search_type()) plugin_content.set_value("search_code", sobject.get_code()) plugin_content.set_value("plugin_code", my.plugin.get_code()) plugin_content.commit() except UnicodeDecodeError, e: print "Skipping due to unicode decode error: [%s]" % statement_str continue
def execute(self): file_path = self.kwargs.get("path") site = self.kwargs.get("site") project_code = self.kwargs.get("project_code") base_dir = self.kwargs.get("base_dir") search_type = self.kwargs.get("search_type") process = self.kwargs.get("process") watch_script_path = self.kwargs.get("script_path") if not process: process = "publish" basename = os.path.basename(file_path) context = self.kwargs.get("context") if not context: context = '%s/%s' % (process, basename) # find the relative_dir and relative_path relative_path = file_path.replace("%s/" % base_dir, "") relative_dir = os.path.dirname(relative_path) file_name = os.path.basename(file_path) log_path = '%s/TACTIC_log.txt' %(base_dir) self.create_checkin_log() # Define asset type of the file asset_type = self.get_asset_type(file_path) description = "drop folder check-in of %s" %file_name from client.tactic_client_lib import TacticServerStub server = TacticServerStub.get(protocol='local') server.set_project(project_code) transaction = Transaction.get(create=True) server.start(title='Check-in of media', description='Check-in of media') server_return_value = {} try: filters = [ [ 'name', '=', file_name ], #[ 'relative_dir', '=', relative_dir ] ] sobj = server.query(search_type, filters=filters, single=True) if not sobj: # create sobject if it does not yet exist sobj = SearchType.create(search_type) if SearchType.column_exists(search_type, "name"): sobj.set_value("name", basename) if SearchType.column_exists(search_type, "media_type"): sobj.set_value("media_type", asset_type) if SearchType.column_exists(search_type, "relative_dir"): sobj.set_value("relative_dir", relative_dir) if SearchType.column_exists(search_type, "keywords"): relative_path = relative_path keywords = Common.extract_keywords_from_path(relative_path) keywords = " ".join( keywords ) sobj.set_value("keywords", keywords) sobj.commit() search_key = sobj.get_search_key() else: search_key = sobj.get("__search_key__") #task = server.create_task(sobj.get('__search_key__'),process='publish') #server.update(task, {'status': 'New'}) """ #TEST: simulate different check-in duration from random import randint sec = randint(1, 5) print "checking in for ", sec, "sec" server.eval("@SOBJECT(sthpw/login)") import shutil dir_name,base_name = os.path.split(file_path) dest_dir = 'C:/ProgramData/Southpaw/watch_temp' if not os.path.exists(dest_dir): os.makedirs(dest_dir) shutil.move(file_path, '%s/%s'%(dest_dir, base_name)) time.sleep(sec) # move back the file in a few seconds shutil.move('%s/%s'%(dest_dir, base_name), file_path) """ server_return_value = server.simple_checkin(search_key, context, file_path, description=description, mode='move') if watch_script_path: cmd = PythonCmd(script_path=watch_script_path,search_type=search_type,drop_path=file_path,search_key=search_key) cmd.execute() except Exception as e: print "Error occurred", e error_message=str(e) import traceback tb = sys.exc_info()[2] stacktrace = traceback.format_tb(tb) stacktrace_str = "".join(stacktrace) print "-"*50 print stacktrace_str version_num='Error:' system_time=strftime("%Y/%m/%d %H:%M", gmtime()) pre_log=file_name+(50-len(file_name))*' '+system_time+(33-len(system_time))*' '+version_num+(15-len(version_num))*' ' +error_message+'\n'\ + stacktrace_str + '\n' + watch_script_path # Write data into TACTIC_log file under /tmp/drop f = open(log_path, 'a') f.write(pre_log) f.close() #server.abort() transaction.rollback() raise else: transaction.commit() #server.finish() if server_return_value: # Create the TACTIC_log file to record every check-in. # Search for all required data checkin_time=server_return_value.get('timestamp') version_nu=server_return_value.get('version') version_num=str(version_nu) try: value = parser.parse(checkin_time) value = value.strftime("%Y/%m/%d %H:%M") except: value = checkin_time pre_log=file_name+(50-len(file_name))*' '+value+(33-len(value))*' '+version_num+(15-len(version_num))*' ' +'ok\n' # Write data into TACTIC_log file under /tmp/drop f = open(log_path, 'a') f.write(pre_log) f.close() # Invoke Trigger:called_triggers from pyasm.command import Trigger Trigger.call_all_triggers() # Delete the source file after check-in step. print "File handled." if os.path.exists(file_path): if os.path.isdir(file_path): os.rmdirs(file_path) else: os.unlink(file_path) print "Source file [%s] deleted: " %file_name
def get_display(my): sobject = my.get_current_sobject() column = my.kwargs.get('column') if column: name = column else: name = my.get_name() value = my.get_value(name=name) if sobject: data_type = SearchType.get_column_type(sobject.get_search_type(), name) else: data_type = 'text' if type(value) in types.StringTypes: wiki = WikiUtil() value = wiki.convert(value) if name == 'id' and value == -1: value = '' elif data_type in ["timestamp","time"] or name == "timestamp": if value == 'now': value = '' elif value: # This date is assumed to be GMT date = parser.parse(value) # convert to user timezone if not SObject.is_day_column(name): date = my.get_timezone_value(date) try: encoding = locale.getlocale()[1] value = date.strftime("%b %d, %Y - %H:%M").decode(encoding) except: value = date.strftime("%b %d, %Y - %H:%M") else: value = '' else: if isinstance(value, Widget): return value elif not isinstance(value, basestring): try: value + 1 except TypeError: value = str(value) #else: # value_wdg.add_style("float: right") # value_wdg.add_style("padding-right: 3px") if sobject and SearchType.column_exists(sobject.get_search_type(), name): value_wdg = DivWdg() value_wdg.add_update( { 'search_key': sobject.get_search_key(), 'column': name } ) # don't call str() to prevent utf-8 encode error value_wdg.add(value) return value_wdg return value
def execute(my): collection_keys = my.kwargs.get("collection_keys") search_keys = my.kwargs.get("search_keys") message = {} if collection_keys == None: collection_keys = [] for collection_key in collection_keys: collection = Search.get_by_search_key(collection_key) if not collection: raise Exception("Collection does not exist") collection_name = collection.get("name") search_type = collection.get_base_search_type() parts = search_type.split("/") collection_type = "%s/%s_in_%s" % (parts[0], parts[1], parts[1]) search = Search(collection_type) search.add_filter("parent_code", collection.get_code()) items = search.get_sobjects() search_codes = [x.get_value("search_code") for x in items] search_codes = set(search_codes) # Try to find all the parent codes of the destination, and see if there's any that # matches the codes in "search_codes" # Check for parent/child hierarchy in destination to prevent circular relationships src_collections_codes = [] for search_key in search_keys: asset = Search.get_by_search_key(search_key) is_collection = asset.get("_is_collection") if is_collection: src_collection_code = asset.get("code") src_collections_codes.append(src_collection_code) if src_collections_codes: collection_code = collection.get("code") my.kwargs["collection_code"] = collection_code my.kwargs["collection_type"] = collection_type my.kwargs["search_type"] = search_type # Run SQL to find all parent collections(and above) of the selected collections # The all_parent_codes contain all parent codes up the relationship tree # ie. parent collections' parents ...etc all_parent_codes = my.get_parent_codes() all_parent_codes.add(collection_code) all_parent_codes = list(all_parent_codes) # Once retrieve the parent codes, use a for loop to check if the the codes in # src_collections_codes are in parent_codes parent_collection_names = [] for parent_code in all_parent_codes: if parent_code in src_collections_codes: message['circular'] = "True" parent_collection_name = Search.get_by_code(search_type, parent_code).get("name") parent_collection_names.append(parent_collection_name) if parent_collection_names: message['parent_collection_names'] = parent_collection_names my.info['message'] = message return has_keywords = SearchType.column_exists(search_type, "keywords") if has_keywords: collection_keywords = collection.get_value("keywords", no_exception=True) collection_keywords = collection_keywords.split(" ") collection_keywords = set(collection_keywords) # create new items has_inserted = False sobjects = Search.get_by_search_keys(search_keys) for sobject in sobjects: if sobject.get_code() in search_codes: continue new_item = SearchType.create(collection_type) new_item.set_value("parent_code", collection.get_code()) new_item.set_value("search_code", sobject.get_code()) new_item.commit() has_inserted = True # copy the metadata of the collection if has_keywords: keywords = sobject.get_value("keywords") keywords = keywords.split(" ") keywords = set(keywords) keywords = keywords.union(collection_keywords) keywords = " ".join(keywords) sobject.set_value("keywords", keywords) sobject.commit() if not has_inserted: message[collection_name] = "No insert" else: message[collection_name] = "Insert OK" my.info['message'] = message my.add_description("Add [%s] item(s) to [%s] collection(s)" % (len(search_keys), len(collection_keys)))
def get_data_wdg(my): div = DivWdg() from pyasm.biz import Pipeline from pyasm.widget import SelectWdg search_type_obj = SearchType.get(my.search_type) base_type = search_type_obj.get_base_key() search = Search("sthpw/pipeline") search.add_filter("search_type", base_type) pipelines = search.get_sobjects() if pipelines: pipeline = pipelines[0] process_names = pipeline.get_process_names() if process_names: table = Table() div.add(table) table.add_row() table.add_cell("Process: ") select = SelectWdg("process") table.add_cell(select) process_names.append("---") process_names.append("publish") process_names.append("icon") select.set_option("values", process_names) #### buttons = Table() div.add(buttons) buttons.add_row() button = IconButtonWdg(title="Add Data", icon=IconWdg.FOLDER) buttons.add_cell(button) dialog = DialogWdg(display="false", show_title=False) div.add(dialog) dialog.set_as_activator(button, offset={'x':-10,'y':10}) dialog_data_div = DivWdg() dialog_data_div.add_color("background", "background") dialog_data_div.add_style("padding", "20px") dialog.add(dialog_data_div) # Order folders by date name_div = DivWdg() dialog_data_div.add(name_div) name_div.add_style("margin: 15px 0px") if SearchType.column_exists(my.search_type, "relative_dir"): category_div = DivWdg() name_div.add(category_div) checkbox = RadioWdg("category") checkbox.set_option("value", "none") category_div.add(checkbox) category_div.add(" No categories") category_div.add_style("margin-bottom: 5px") checkbox.set_option("checked", "true") category_div = DivWdg() name_div.add(category_div) checkbox = RadioWdg("category") checkbox.set_option("value", "by_day") category_div.add(checkbox) category_div.add(" Categorize files by Day") category_div.add_style("margin-bottom: 5px") category_div = DivWdg() name_div.add(category_div) checkbox = RadioWdg("category") checkbox.set_option("value", "by_week") category_div.add(checkbox) category_div.add(" Categorize files by Week") category_div.add_style("margin-bottom: 5px") category_div = DivWdg() name_div.add(category_div) checkbox = RadioWdg("category") checkbox.set_option("value", "by_year") category_div.add(checkbox) category_div.add(" Categorize files by Year") category_div.add_style("margin-bottom: 5px") """ checkbox = RadioWdg("category") checkbox.set_option("value", "custom") name_div.add(checkbox) name_div.add(" Custom") """ name_div.add("<br/>") hidden = HiddenWdg(name="parent_key") dialog_data_div.add(hidden) hidden.add_class("spt_parent_key") parent_key = my.kwargs.get("parent_key") or "" if parent_key: hidden.set_value(parent_key) dialog_data_div.add("Keywords:<br/>") dialog.add(dialog_data_div) text = TextAreaWdg(name="keywords") dialog_data_div.add(text) text.add_class("spt_keywords") text.add_style("padding: 1px") dialog_data_div.add("<br/>"*2) extra_data = my.kwargs.get("extra_data") if not isinstance(extra_data, basestring): extra_data = jsondumps(extra_data) dialog_data_div.add("Extra Data (JSON):<br/>") text = TextAreaWdg(name="extra_data") dialog_data_div.add(text) if extra_data != "null": text.set_value(extra_data) text.add_class("spt_extra_data") text.add_style("padding: 1px") #### TEST Image options """ button = IconButtonWdg(title="Resize", icon=IconWdg.FILM) buttons.add_cell(button) dialog = DialogWdg(display="false", show_title=False) div.add(dialog) dialog.set_as_activator(button, offset={'x':-10,'y':10}) try: from spt.tools.convert import ConvertOptionsWdg convert_div = DivWdg() dialog.add(convert_div) convert_div.add_style("padding: 20px") convert_div.add_color("background", "background") convert_div.add_class("spt_image_convert") convert = ConvertOptionsWdg() convert_div.add(convert) except: pass """ # use base name for name """ name_div = DivWdg() dialog_data_div.add(name_div) name_div.add_style("margin: 15px 0px") checkbox = CheckboxWdg("use_file_name") name_div.add(checkbox) name_div.add(" Use name of file for name") name_div.add("<br/>") checkbox = CheckboxWdg("use_base_name") name_div.add(checkbox) name_div.add(" Remove extension") name_div.add("<br/>") checkbox = CheckboxWdg("file_keywords") name_div.add(checkbox) name_div.add(" Use file name for keywords") """ return div
def execute(my): filenames = my.kwargs.get("filenames") upload_dir = Environment.get_upload_dir() base_dir = upload_dir update_mode = my.kwargs.get("update_mode") search_type = my.kwargs.get("search_type") key = my.kwargs.get("key") relative_dir = my.kwargs.get("relative_dir") if not relative_dir: project_code = Project.get_project_code() search_type_obj = SearchType.get(search_type) table = search_type_obj.get_table() relative_dir = "%s/%s" % (project_code, table) server = TacticServerStub.get() parent_key = my.kwargs.get("parent_key") category = my.kwargs.get("category") keywords = my.kwargs.get("keywords") update_data = my.kwargs.get("update_data") extra_data = my.kwargs.get("extra_data") if extra_data: extra_data = jsonloads(extra_data) else: extra_data = {} # TODO: use this to generate a category category_script_path = my.kwargs.get("category_script_path") """ ie: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") return date.split(" ")[0] """ if not SearchType.column_exists(search_type, "name"): raise TacticException( 'The Ingestion puts the file name into the name column which is the minimal requirement. Please first create a "name" column for this sType.' ) input_prefix = update_data.get('input_prefix') non_seq_filenames = [] # For sequence mode, take all filenames, and regenerate the filenames based on the function "find_sequences" if update_mode == "sequence": non_seq_filenames_dict, seq_digit_length = my.find_sequences( filenames) # non_seq_filenames is a list of filenames that are stored in the None key, # which are the filenames that are not part of a sequence, or does not contain # a sequence pattern. non_seq_filenames = non_seq_filenames_dict[None] # delete the None key from list so filenames can be used in the latter for loop del non_seq_filenames_dict[None] filenames = non_seq_filenames_dict.keys() if filenames == []: raise TacticException( 'No sequences are found in files. Please follow the pattern of [filename] + [digits] + [file extension (optional)]. Examples: [abc_1001.png, abc_1002.png] [abc.1001.mp3, abc.1002.mp3] [abc_100_1001.png, abc_100_1002.png]' ) for count, filename in enumerate(filenames): # Check if files should be updated. # If so, attempt to find one to update. # If more than one is found, do not update. if update_mode in ["true", "True"]: # first see if this sobjects still exists search = Search(search_type) search.add_filter("name", filename) if relative_dir and search.column_exists("relative_dir"): search.add_filter("relative_dir", relative_dir) sobjects = search.get_sobjects() if len(sobjects) > 1: sobject = None elif len(sobjects) == 1: sobject = sobjects[0] else: sobject = None elif update_mode == "sequence": if not FileGroup.is_sequence(filename): raise TacticException( 'Please modify sequence naming to have at least three digits.' ) search = Search(search_type) search.add_filter("name", filename) if relative_dir and search.column_exists("relative_dir"): search.add_filter("relative_dir", relative_dir) sobjects = search.get_sobjects() if sobjects: sobject = sobjects[0] else: sobject = None else: sobject = None # Create a new file if not sobject: sobject = SearchType.create(search_type) sobject.set_value("name", filename) if relative_dir and sobject.column_exists("relative_dir"): sobject.set_value("relative_dir", relative_dir) # extract metadata #file_path = "%s/%s" % (base_dir, File.get_filesystem_name(filename)) if update_mode == "sequence": first_filename = non_seq_filenames_dict.get(filename)[0] last_filename = non_seq_filenames_dict.get(filename)[-1] file_path = "%s/%s" % (base_dir, first_filename) else: file_path = "%s/%s" % (base_dir, filename) # TEST: convert on upload try: convert = my.kwargs.get("convert") if convert: message_key = "IngestConvert001" cmd = ConvertCbk(**convert) cmd.execute() except Exception, e: print "WARNING: ", e if not os.path.exists(file_path): raise Exception("Path [%s] does not exist" % file_path) # get the metadata from this image if SearchType.column_exists(search_type, "relative_dir"): if category and category not in ['none', None]: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") if not date: date_str = "No-Date" else: date_str = str(date) # this can't be parsed correctly by dateutils parts = date_str.split(" ") date_str = parts[0].replace(":", "-") date_str = "%s %s" % (date_str, parts[1]) from dateutil import parser orig_date = parser.parse(date_str) if category == "by_day": date_str = orig_date.strftime("%Y/%Y-%m-%d") elif category == "by_month": date_str = orig_date.strftime("%Y-%m") elif category == "by_week": date_str = orig_date.strftime("%Y/Week-%U") full_relative_dir = "%s/%s" % (relative_dir, date_str) sobject.set_value("relative_dir", full_relative_dir) if parent_key: parent = Search.get_by_search_key(parent_key) if parent: sobject.set_sobject_value(sobject) for key, value in update_data.items(): if input_prefix: key = key.replace('%s|' % input_prefix, '') if SearchType.column_exists(search_type, key): if value: sobject.set_value(key, value) """ if SearchType.column_exists(search_type, "keywords"): if keywords: sobject.set_value("keywords", keywords) """ for key, value in extra_data.items(): if SearchType.column_exists(search_type, key): sobject.set_value(key, value) """ if category: if SearchType.column_exists(search_type, "category"): sobject.set_value("category", category) if SearchType.column_exists(search_type, "relative_dir"): full_relative_dir = "%s/%s" % (relative_dir, category) sobject.set_value("relative_dir", category) """ sobject.commit() search_key = sobject.get_search_key() # use API to check in file process = my.kwargs.get("process") if not process: process = "publish" if process == "icon": context = "icon" else: context = "%s/%s" % (process, filename.lower()) if update_mode == "sequence": pattern_expr = re.compile('^.*(\d{%d})\..*$' % seq_digit_length) m_first = re.match(pattern_expr, first_filename) m_last = re.match(pattern_expr, last_filename) # for files without extension # abc_1001, abc.1123_1001 if not m_first: no_ext_expr = re.compile('^.*(\d{%d})$' % seq_digit_length) m_first = re.match(no_ext_expr, first_filename) m_last = re.match(no_ext_expr, last_filename) # using second last index , to grab the set right before file type groups_first = m_first.groups() if groups_first: range_start = int(m_first.groups()[0]) groups_last = m_last.groups() if groups_last: range_end = int(m_last.groups()[0]) file_range = '%s-%s' % (range_start, range_end) file_path = "%s/%s" % (base_dir, filename) server.group_checkin(search_key, context, file_path, file_range, mode='uploaded') else: server.simple_checkin(search_key, context, filename, mode='uploaded') percent = int((float(count) + 1) / len(filenames) * 100) print "checking in: ", filename, percent msg = { 'progress': percent, 'description': 'Checking in file [%s]' % filename, } server.log_message(key, msg, status="in progress")
def execute(my): filenames = my.kwargs.get("filenames") upload_dir = Environment.get_upload_dir() base_dir = upload_dir update_mode = my.kwargs.get("update_mode") search_type = my.kwargs.get("search_type") key = my.kwargs.get("key") relative_dir = my.kwargs.get("relative_dir") if not relative_dir: project_code = Project.get_project_code() search_type_obj = SearchType.get(search_type) table = search_type_obj.get_table() relative_dir = "%s/%s" % (project_code, table) server = TacticServerStub.get() parent_key = my.kwargs.get("parent_key") category = my.kwargs.get("category") keywords = my.kwargs.get("keywords") update_data = my.kwargs.get("update_data") extra_data = my.kwargs.get("extra_data") if extra_data: extra_data = jsonloads(extra_data) else: extra_data = {} # TODO: use this to generate a category category_script_path = my.kwargs.get("category_script_path") """ ie: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") return date.split(" ")[0] """ if not SearchType.column_exists(search_type, "name"): raise TacticException('The Ingestion puts the file name into the name column which is the minimal requirement. Please first create a "name" column for this sType.') input_prefix = update_data.get('input_prefix') non_seq_filenames = [] # For sequence mode, take all filenames, and regenerate the filenames based on the function "find_sequences" if update_mode == "sequence": non_seq_filenames_dict, seq_digit_length = my.find_sequences(filenames) # non_seq_filenames is a list of filenames that are stored in the None key, # which are the filenames that are not part of a sequence, or does not contain # a sequence pattern. non_seq_filenames = non_seq_filenames_dict[None] # delete the None key from list so filenames can be used in the latter for loop del non_seq_filenames_dict[None] filenames = non_seq_filenames_dict.keys() if filenames == []: raise TacticException('No sequences are found in files. Please follow the pattern of [filename] + [digits] + [file extension (optional)]. Examples: [abc_1001.png, abc_1002.png] [abc.1001.mp3, abc.1002.mp3] [abc_100_1001.png, abc_100_1002.png]') for count, filename in enumerate(filenames): # Check if files should be updated. # If so, attempt to find one to update. # If more than one is found, do not update. if update_mode in ["true", "True"]: # first see if this sobjects still exists search = Search(search_type) search.add_filter("name", filename) if relative_dir and search.column_exists("relative_dir"): search.add_filter("relative_dir", relative_dir) sobjects = search.get_sobjects() if len(sobjects) > 1: sobject = None elif len(sobjects) == 1: sobject = sobjects[0] else: sobject = None elif update_mode == "sequence": if not FileGroup.is_sequence(filename): raise TacticException('Please modify sequence naming to have at least three digits.') search = Search(search_type) search.add_filter("name", filename) if relative_dir and search.column_exists("relative_dir"): search.add_filter("relative_dir", relative_dir) sobjects = search.get_sobjects() if sobjects: sobject = sobjects[0] else: sobject = None else: sobject = None # Create a new file if not sobject: sobject = SearchType.create(search_type) sobject.set_value("name", filename) if relative_dir and sobject.column_exists("relative_dir"): sobject.set_value("relative_dir", relative_dir) # extract metadata #file_path = "%s/%s" % (base_dir, File.get_filesystem_name(filename)) if update_mode == "sequence": first_filename = non_seq_filenames_dict.get(filename)[0] last_filename = non_seq_filenames_dict.get(filename)[-1] file_path = "%s/%s" % (base_dir, first_filename) else: file_path = "%s/%s" % (base_dir, filename) # TEST: convert on upload try: convert = my.kwargs.get("convert") if convert: message_key = "IngestConvert001" cmd = ConvertCbk(**convert) cmd.execute() except Exception, e: print "WARNING: ", e if not os.path.exists(file_path): raise Exception("Path [%s] does not exist" % file_path) # get the metadata from this image if SearchType.column_exists(search_type, "relative_dir"): if category and category not in ['none', None]: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") if not date: date_str = "No-Date" else: date_str = str(date) # this can't be parsed correctly by dateutils parts = date_str.split(" ") date_str = parts[0].replace(":", "-") date_str = "%s %s" % (date_str, parts[1]) from dateutil import parser orig_date = parser.parse(date_str) if category == "by_day": date_str = orig_date.strftime("%Y/%Y-%m-%d") elif category == "by_month": date_str = orig_date.strftime("%Y-%m") elif category == "by_week": date_str = orig_date.strftime("%Y/Week-%U") full_relative_dir = "%s/%s" % (relative_dir, date_str) sobject.set_value("relative_dir", full_relative_dir) if parent_key: parent = Search.get_by_search_key(parent_key) if parent: sobject.set_sobject_value(sobject) for key, value in update_data.items(): if input_prefix: key = key.replace('%s|'%input_prefix, '') if SearchType.column_exists(search_type, key): if value: sobject.set_value(key, value) """ if SearchType.column_exists(search_type, "keywords"): if keywords: sobject.set_value("keywords", keywords) """ for key, value in extra_data.items(): if SearchType.column_exists(search_type, key): sobject.set_value(key, value) """ if category: if SearchType.column_exists(search_type, "category"): sobject.set_value("category", category) if SearchType.column_exists(search_type, "relative_dir"): full_relative_dir = "%s/%s" % (relative_dir, category) sobject.set_value("relative_dir", category) """ sobject.commit() search_key = sobject.get_search_key() # use API to check in file process = my.kwargs.get("process") if not process: process = "publish" if process == "icon": context = "icon" else: context = "%s/%s" % (process, filename.lower()) if update_mode == "sequence": pattern_expr = re.compile('^.*(\d{%d})\..*$'%seq_digit_length) m_first = re.match(pattern_expr, first_filename) m_last = re.match(pattern_expr, last_filename) # for files without extension # abc_1001, abc.1123_1001 if not m_first: no_ext_expr = re.compile('^.*(\d{%d})$'%seq_digit_length) m_first = re.match(no_ext_expr, first_filename) m_last = re.match(no_ext_expr, last_filename) # using second last index , to grab the set right before file type groups_first = m_first.groups() if groups_first: range_start = int(m_first.groups()[0]) groups_last = m_last.groups() if groups_last: range_end = int(m_last.groups()[0]) file_range = '%s-%s' % (range_start, range_end) file_path = "%s/%s" % (base_dir, filename) server.group_checkin(search_key, context, file_path, file_range, mode='uploaded') else: server.simple_checkin(search_key, context, filename, mode='uploaded') percent = int((float(count)+1) / len(filenames)*100) print "checking in: ", filename, percent msg = { 'progress': percent, 'description': 'Checking in file [%s]' % filename, } server.log_message(key, msg, status="in progress")
def execute(my): file_path = my.kwargs.get("path") project_code = my.kwargs.get("project_code") base_dir = my.kwargs.get("base_dir") search_type = my.kwargs.get("search_type") process = my.kwargs.get("process") watch_script_path = my.kwargs.get("script_path") if not process: process = "publish" basename = os.path.basename(file_path) context = my.kwargs.get("context") if not context: context = '%s/%s' % (process, basename) # find the relative_dir and relative_path relative_path = file_path.replace("%s/" % base_dir, "") relative_dir = os.path.dirname(relative_path) file_name = os.path.basename(file_path) log_path = '%s/TACTIC_log.txt' % (base_dir) my.create_checkin_log() # Define asset type of the file asset_type = my.get_asset_type(file_path) description = "drop folder check-in of %s" % file_name from client.tactic_client_lib import TacticServerStub server = TacticServerStub.get(protocol='local') server.set_project(project_code) transaction = Transaction.get(create=True) server.start(title='Check-in of media', description='Check-in of media') server_return_value = {} try: filters = [ ['name', '=', file_name], #[ 'relative_dir', '=', relative_dir ] ] sobj = server.query(search_type, filters=filters, single=True) if not sobj: # create sobject if it does not yet exist sobj = SearchType.create(search_type) if SearchType.column_exists(search_type, "name"): sobj.set_value("name", basename) if SearchType.column_exists(search_type, "media_type"): sobj.set_value("media_type", asset_type) if SearchType.column_exists(search_type, "relative_dir"): sobj.set_value("relative_dir", relative_dir) if SearchType.column_exists(search_type, "keywords"): relative_path = relative_path keywords = Common.get_keywords_from_path(relative_path) keywords = " ".join(keywords) sobj.set_value("keywords", keywords) sobj.commit() search_key = sobj.get_search_key() else: search_key = sobj.get("__search_key__") #task = server.create_task(sobj.get('__search_key__'),process='publish') #server.update(task, {'status': 'New'}) server_return_value = server.simple_checkin( search_key, context, file_path, description=description, mode='move') if watch_script_path: cmd = PythonCmd(script_path=watch_script_path, search_type=search_type, drop_path=file_path, search_key=search_key) cmd.execute() except Exception, e: print "Error occurred", e error_message = str(e) import traceback tb = sys.exc_info()[2] stacktrace = traceback.format_tb(tb) stacktrace_str = "".join(stacktrace) print "-" * 50 print stacktrace_str version_num = 'Error:' system_time = strftime("%Y/%m/%d %H:%M", gmtime()) pre_log=file_name+(50-len(file_name))*' '+system_time+(33-len(system_time))*' '+version_num+(15-len(version_num))*' ' +error_message+'\n'\ + stacktrace_str + '\n' + watch_script_path # Write data into TACTIC_log file under /tmp/drop f = open(log_path, 'a') f.write(pre_log) f.close() #server.abort() transaction.rollback() raise
def get_relationship_attrs(self, search_type, search_type2, path=None, cache=True, type=None): if cache: key = "Schema:%s|%s|%s|%s" % (search_type, search_type2, path, type) attrs_dict = Container.get("Schema:relationship") if attrs_dict == None: attrs_dict = {} Container.put("Schema:relationship", attrs_dict) attrs = attrs_dict.get(key) if attrs != None: return attrs # Need to remove ? and get the base if search_type.find("?") != -1: parts = search_type.split("?") search_type = parts[0] if search_type2.find("?") != -1: parts = search_type2.split("?") search_type2 = parts[0] direction = 'forward' # find all the connects with the first search_type connect = None # assemble all of the connects xpaths = [] if type: xpath = "schema/connect[@from='%s' and @to='%s' and @type='%s']" %(search_type, search_type2, type) xpaths.append( xpath ) xpath = "schema/connect[@from='%s' and @to='%s' and @type='%s']" %(search_type2, search_type, type) xpaths.append( xpath ) xpath = "schema/connect[@from='%s' and @to='*' and @type='%s']" %(search_type, type) xpaths.append( xpath ) xpath = "schema/connect[@from='%s' and @to='*' and @type='%s']" %(search_type2, type) xpaths.append( xpath ) else: xpath = "schema/connect[@from='%s' and @to='%s']" %(search_type, search_type2) xpaths.append( xpath ) xpath = "schema/connect[@from='%s' and @to='%s']" %(search_type2, search_type) xpaths.append( xpath ) xpath = "schema/connect[@from='%s' and @to='*']" %(search_type) xpaths.append( xpath ) xpath = "schema/connect[@from='%s' and @to='*']" %(search_type2) xpaths.append( xpath ) try: if path: for xpath in xpaths: # if a path is specified then use that connects = self.xml.get_nodes(xpath) for conn in connects: # at some odd times, the cached value is None if conn is None: continue conn_path = self.xml.get_attribute(conn, "path") if conn_path == path: connect = conn raise ExitLoop else: for xpath in xpaths: connects = self.xml.get_nodes(xpath) for conn in connects: # at some odd times, the cached value is None if conn is None: continue conn_path = self.xml.get_attribute(conn, "path") if conn_path: continue # this check is not really needed #if conn is not None: connect = conn raise ExitLoop except ExitLoop: pass if connect is not None: if self.xml.get_attribute(connect, "from") == search_type: direction = 'forward' else: direction = 'backward' # since we are adding keys like 'disabled' below, processed is a boolean # to indicate if a found attrs dict has already been processed once # thru recursive running of the current method processed = False # if no explicit relationship is defined, find it in the parents if connect == None: if self.parent_schema: attrs = self.parent_schema.get_relationship_attrs(search_type, search_type2, path=path, cache=False, type=type) if not attrs: attrs = self.sthpw_schema.get_relationship_attrs(search_type, search_type2, path=path, cache=False, type=type) processed = True else: if self.sthpw_schema: attrs = self.sthpw_schema.get_relationship_attrs(search_type, search_type2, path=path, cache=False, type=type) processed = True else: attrs = {} else: attrs = self.xml.get_attributes(connect) if processed: return attrs relationship = attrs.get('relationship') # backwards compatibility mapping if not relationship: if attrs.get("type") == "hierarchy": attrs['relationship'] = 'code' relationship = 'code' if direction == 'forward': a_search_type = search_type b_search_type = search_type2 else: a_search_type = search_type2 b_search_type = search_type # fill in some defaults from_col = attrs.get('from_col') to_col = attrs.get('to_col') if relationship == 'id': a_search_type_obj = SearchType.get(a_search_type) b_search_type_obj = SearchType.get(b_search_type) if not from_col: table = b_search_type_obj.get_table() attrs['from_col'] = '%s_id' % table if not to_col: attrs['to_col'] = 'id' to_col = 'id' if not SearchType.column_exists(b_search_type, to_col): attrs['disabled'] = True if not SearchType.column_exists(a_search_type, from_col): attrs['disabled'] = True elif relationship in ['code']: a_search_type_obj = SearchType.get(a_search_type) b_search_type_obj = SearchType.get(b_search_type) if not from_col: table = b_search_type_obj.get_table() from_col = '%s_code' % table attrs['from_col'] = from_col if not to_col: attrs['to_col'] = 'code' to_col = 'code' if not SearchType.column_exists(b_search_type, to_col): attrs['disabled'] = True if not SearchType.column_exists(a_search_type, from_col): attrs['disabled'] = True elif relationship in ['instance']: #i_search_type = self.xml.get_attribute(conn, "instance") #i_search_type_obj = SearchType.get(i_search_type) a_search_type_obj = SearchType.get(a_search_type) b_search_type_obj = SearchType.get(b_search_type) if not from_col: attrs['from_col'] = 'code' from_col = 'code' if not to_col: attrs['to_col'] = 'code' to_col = 'code' if not SearchType.column_exists(b_search_type, to_col): attrs['disabled'] = True if not SearchType.column_exists(a_search_type, from_col): attrs['disabled'] = True # store if cache: attrs_dict[key] = attrs return attrs
def execute(my): filenames = my.kwargs.get("filenames") upload_dir = Environment.get_upload_dir() base_dir = upload_dir search_type = my.kwargs.get("search_type") key = my.kwargs.get("key") relative_dir = my.kwargs.get("relative_dir") if not relative_dir: project_code = Project.get_project_code() search_type_obj = SearchType.get(search_type) table = search_type_obj.get_table() relative_dir = "%s/%s" % (project_code, table) server = TacticServerStub.get() parent_key = my.kwargs.get("parent_key") category = my.kwargs.get("category") keywords = my.kwargs.get("keywords") update_data = my.kwargs.get("update_data") extra_data = my.kwargs.get("extra_data") if extra_data: extra_data = jsonloads(extra_data) else: extra_data = {} # TODO: use this to generate a category category_script_path = my.kwargs.get("category_script_path") """ ie: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") return date.split(" ")[0] """ if not SearchType.column_exists(search_type, "name"): raise TacticException( 'The Ingestion puts the file name into the name column which is the minimal requirement. Please first create a "name" column for this sType.' ) input_prefix = update_data.get('input_prefix') for count, filename in enumerate(filenames): # first see if this sobjects still exists search = Search(search_type) search.add_filter("name", filename) if relative_dir and search.column_exists("relative_dir"): search.add_filter("relative_dir", relative_dir) sobject = search.get_sobject() # else create a new one if not sobject: sobject = SearchType.create(search_type) sobject.set_value("name", filename) if relative_dir and sobject.column_exists("relative_dir"): sobject.set_value("relative_dir", relative_dir) # extract metadata #file_path = "%s/%s" % (base_dir, File.get_filesystem_name(filename)) file_path = "%s/%s" % (base_dir, filename) # TEST: convert on upload try: convert = my.kwargs.get("convert") if convert: message_key = "IngestConvert001" cmd = ConvertCbk(**convert) cmd.execute() except Exception, e: print "WARNING: ", e if not os.path.exists(file_path): raise Exception("Path [%s] does not exist" % file_path) # get the metadata from this image if SearchType.column_exists(search_type, "relative_dir"): if category and category not in ['none', None]: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") if not date: date_str = "No-Date" else: date_str = str(date) # this can't be parsed correctly by dateutils parts = date_str.split(" ") date_str = parts[0].replace(":", "-") date_str = "%s %s" % (date_str, parts[1]) from dateutil import parser orig_date = parser.parse(date_str) if category == "by_day": date_str = orig_date.strftime("%Y/%Y-%m-%d") elif category == "by_month": date_str = orig_date.strftime("%Y-%m") elif category == "by_week": date_str = orig_date.strftime("%Y/Week-%U") full_relative_dir = "%s/%s" % (relative_dir, date_str) sobject.set_value("relative_dir", full_relative_dir) if parent_key: parent = Search.get_by_search_key(parent_key) if parent: sobject.set_sobject_value(sobject) for key, value in update_data.items(): if input_prefix: key = key.replace('%s|' % input_prefix, '') if SearchType.column_exists(search_type, key): if value: sobject.set_value(key, value) """ if SearchType.column_exists(search_type, "keywords"): if keywords: sobject.set_value("keywords", keywords) """ for key, value in extra_data.items(): if SearchType.column_exists(search_type, key): sobject.set_value(key, value) """ if category: if SearchType.column_exists(search_type, "category"): sobject.set_value("category", category) if SearchType.column_exists(search_type, "relative_dir"): full_relative_dir = "%s/%s" % (relative_dir, category) sobject.set_value("relative_dir", category) """ sobject.commit() search_key = sobject.get_search_key() # use API to check in file process = my.kwargs.get("process") if not process: process = "publish" if process == "icon": context = "icon" else: context = "%s/%s" % (process, filename) server.simple_checkin(search_key, context, filename, mode='uploaded') percent = int((float(count) + 1) / len(filenames) * 100) print "checking in: ", filename, percent msg = { 'progress': percent, 'description': 'Checking in file [%s]' % filename, } server.log_message(key, msg, status="in progress")
def execute(self): file_path = self.kwargs.get("path") site = self.kwargs.get("site") project_code = self.kwargs.get("project_code") base_dir = self.kwargs.get("base_dir") search_type = self.kwargs.get("search_type") process = self.kwargs.get("process") watch_script_path = self.kwargs.get("script_path") if not process: process = "publish" basename = os.path.basename(file_path) context = self.kwargs.get("context") if not context: context = '%s/%s' % (process, basename) # find the relative_dir and relative_path relative_path = file_path.replace("%s/" % base_dir, "") relative_dir = os.path.dirname(relative_path) file_name = os.path.basename(file_path) log_path = '%s/TACTIC_log.txt' % (base_dir) self.create_checkin_log() # Define asset type of the file asset_type = self.get_asset_type(file_path) description = "drop folder check-in of %s" % file_name from client.tactic_client_lib import TacticServerStub server = TacticServerStub.get(protocol='local') server.set_project(project_code) transaction = Transaction.get(create=True) server.start(title='Check-in of media', description='Check-in of media') server_return_value = {} try: filters = [ ['name', '=', file_name], #[ 'relative_dir', '=', relative_dir ] ] sobj = server.query(search_type, filters=filters, single=True) if not sobj: # create sobject if it does not yet exist sobj = SearchType.create(search_type) if SearchType.column_exists(search_type, "name"): sobj.set_value("name", basename) if SearchType.column_exists(search_type, "media_type"): sobj.set_value("media_type", asset_type) if SearchType.column_exists(search_type, "relative_dir"): sobj.set_value("relative_dir", relative_dir) if SearchType.column_exists(search_type, "keywords"): relative_path = relative_path keywords = Common.extract_keywords_from_path(relative_path) keywords = " ".join(keywords) sobj.set_value("keywords", keywords) sobj.commit() search_key = sobj.get_search_key() else: search_key = sobj.get("__search_key__") #task = server.create_task(sobj.get('__search_key__'),process='publish') #server.update(task, {'status': 'New'}) """ #TEST: simulate different check-in duration from random import randint sec = randint(1, 5) print "checking in for ", sec, "sec" server.eval("@SOBJECT(sthpw/login)") import shutil dir_name,base_name = os.path.split(file_path) dest_dir = 'C:/ProgramData/Southpaw/watch_temp' if not os.path.exists(dest_dir): os.makedirs(dest_dir) shutil.move(file_path, '%s/%s'%(dest_dir, base_name)) time.sleep(sec) # move back the file in a few seconds shutil.move('%s/%s'%(dest_dir, base_name), file_path) """ server_return_value = server.simple_checkin( search_key, context, file_path, description=description, mode='move') if watch_script_path: cmd = PythonCmd(script_path=watch_script_path, search_type=search_type, drop_path=file_path, search_key=search_key) cmd.execute() except Exception as e: print "Error occurred", e error_message = str(e) import traceback tb = sys.exc_info()[2] stacktrace = traceback.format_tb(tb) stacktrace_str = "".join(stacktrace) print "-" * 50 print stacktrace_str version_num = 'Error:' system_time = strftime("%Y/%m/%d %H:%M", gmtime()) pre_log=file_name+(50-len(file_name))*' '+system_time+(33-len(system_time))*' '+version_num+(15-len(version_num))*' ' +error_message+'\n'\ + stacktrace_str + '\n' + watch_script_path # Write data into TACTIC_log file under /tmp/drop f = open(log_path, 'a') f.write(pre_log) f.close() #server.abort() transaction.rollback() raise else: transaction.commit() #server.finish() if server_return_value: # Create the TACTIC_log file to record every check-in. # Search for all required data checkin_time = server_return_value.get('timestamp') version_nu = server_return_value.get('version') version_num = str(version_nu) try: value = parser.parse(checkin_time) value = value.strftime("%Y/%m/%d %H:%M") except: value = checkin_time pre_log = file_name + (50 - len(file_name)) * ' ' + value + ( 33 - len(value)) * ' ' + version_num + ( 15 - len(version_num)) * ' ' + 'ok\n' # Write data into TACTIC_log file under /tmp/drop f = open(log_path, 'a') f.write(pre_log) f.close() # Invoke Trigger:called_triggers from pyasm.command import Trigger Trigger.call_all_triggers() # Delete the source file after check-in step. print "File handled." if os.path.exists(file_path): if os.path.isdir(file_path): os.rmdirs(file_path) else: os.unlink(file_path) print "Source file [%s] deleted: " % file_name
def get_display(self): sobject = self.get_current_sobject() column = self.kwargs.get('column') if column: name = column else: name = self.get_name() value = self.get_value(name=name) empty = self.get_option("empty") if empty and self.is_editable() and not value: from pyasm.web import SpanWdg div = DivWdg() div.add_style("text-align: center") div.add_style("width: 100%") div.add_style("white-space: nowrap" ) if empty in [True, 'true']: div.add("--Select--") div.add_style("opacity: 0.5") return div if sobject: data_type = SearchType.get_column_type(sobject.get_search_type(), name) else: data_type = 'text' if type(value) in types.StringTypes: wiki = WikiUtil() value = wiki.convert(value) if name == 'id' and value == -1: value = '' elif data_type in ["timestamp","time"] or name == "timestamp": if value == 'now': value = '' elif value: # This date is assumed to be GMT date = parser.parse(value) # convert to user timezone if not SObject.is_day_column(name): date = self.get_timezone_value(date) try: encoding = locale.getlocale()[1] value = date.strftime("%b %d, %Y - %H:%M").decode(encoding) except: value = date.strftime("%b %d, %Y - %H:%M") else: value = '' else: if isinstance(value, Widget): return value elif not isinstance(value, basestring): try: value + 1 except TypeError: value = str(value) #else: # value_wdg.add_style("float: right") # value_wdg.add_style("padding-right: 3px") if sobject and SearchType.column_exists(sobject.get_search_type(), name): value_wdg = DivWdg() self.add_value_update(value_wdg, sobject, name) # don't call str() to prevent utf-8 encode error value_wdg.add(value) value_wdg.add_style("overflow-x: hidden") value_wdg.add_style("text-overflow: ellipsis") # sompe properties min_height = 25 value_wdg.add_style("min-height: %spx" % min_height) single_line = self.get_option("single_line") or False if single_line in ["true", True]: value_wdg.add_style("line-height: %spx" % min_height) value_wdg.add_style("white-space: nowrap") #value_wdg.add_style("overflow-y: hidden") #value_wdg.add_class("spt_scrollable") #value_wdg.add_attr("title", value) link_expression = self.get_option("link_expression") if link_expression: value_wdg.add_class("tactic_new_tab") value_wdg.add_style("display: inline-block") value_wdg.add_attr("search_key", sobject.get_search_key()) value_wdg.add_style("text-decoration: underline") #value_wdg.add_attr("spt_class_name", "tactic.ui.tools.SObjectDetailWdg") value_wdg.add_class("hand") return value_wdg return value