def execute(my): filename = my.kwargs.get("filename") ticket = my.kwargs.get("ticket") upload_dir = Environment.get_upload_dir(ticket=ticket) # can't rely on that #ticket = Environment.get_ticket() asset_temp_dir = "%s/temp/%s" % (Environment.get_asset_dir(), ticket) if not os.path.exists(asset_temp_dir): os.makedirs(asset_temp_dir) from_path = "%s/%s" % (upload_dir, filename) icon_creator = IconCreator(from_path) icon_creator.execute() icon_path = icon_creator.get_icon_path() to_path = "%s/%s" % (asset_temp_dir, filename) if icon_path: shutil.copy(icon_path, to_path) my.info = { "web_path": "/assets/temp/%s/%s" % (ticket, filename), "lib_path": to_path } else: my.info = {}
def execute(my): web = my.get_web() keys = web.get_form_keys() file_name = my.kwargs.get("file_name") # process and get the uploaded files dir = Environment.get_upload_dir() license_file = "%s/%s" % (dir, file_name) if not os.path.exists(license_file): raise TacticException("Error retrieving the license file in [%s]" % license_file) std_name = 'tactic-license.xml' head, file_name = os.path.split(license_file) # no restrictions for license file #if file_name != std_name: # raise TacticException("License file name should be named tactic-license.xml. The file given is [%s]" %file_name) license_dir = Environment.get_license_dir() current_license = "%s/%s" % (license_dir, std_name) if os.path.exists(current_license): FileUndo.remove(current_license) FileUndo.move(license_file, current_license) my.add_description('Renewed license file') security = Environment.get_security() security.reread_license()
def execute(my): import os path = my.kwargs.get("path") path = path.replace("\\", "/") basename = os.path.basename(path) upload_dir = Environment.get_upload_dir() path = "%s/%s" % (upload_dir, basename) f = open(path, 'r') transactions = [] xml = [] for line in f: if line == '\n': transactions.append(xml) xml = [] continue xml.append(line.strip()) transactions.append(xml) for transaction in transactions: value = "\n".join(transaction) # we have the transaction # recreate the log transacton_log = SearchType.create("sthpw/transaction_log") transaction_log.set_value("transaction", transaction) break
def execute(my): from pyasm.common import ZipUtil ziputil = ZipUtil() paths = my.kwargs.get("paths") upload_dir = Environment.get_upload_dir() template_dir = Environment.get_template_dir() for path in paths: path = path.replace("\\", "/") basename = os.path.basename(path) upload_path = "%s/%s" % (upload_dir, basename) if not upload_path.endswith(".zip"): continue print "upload: ", upload_path if not os.path.exists(upload_path): continue print "template_dir: ", template_dir shutil.move(upload_path, template_dir) to_path = "%s/%s" % (template_dir, basename) # unzip the file ziputil.extract(to_path)
def execute(self): web = self.get_web() keys = web.get_form_keys() file_name = self.kwargs.get("file_name") # process and get the uploaded files dir = Environment.get_upload_dir() license_file = "%s/%s" % (dir, file_name) if not os.path.exists(license_file): raise TacticException("Error retrieving the license file in [%s]"%license_file) std_name = 'tactic-license.xml' head, file_name = os.path.split(license_file) # no restrictions for license file #if file_name != std_name: # raise TacticException("License file name should be named tactic-license.xml. The file given is [%s]" %file_name) license_dir = Environment.get_license_dir() current_license = "%s/%s" %(license_dir, std_name) if os.path.exists(current_license): FileUndo.remove(current_license) FileUndo.move(license_file, current_license) self.add_description('Renewed license file') security = Environment.get_security() security.reread_license()
def execute(self): from pyasm.common import ZipUtil ziputil = ZipUtil() paths = self.kwargs.get("paths") upload_dir = Environment.get_upload_dir() template_dir = Environment.get_template_dir() for path in paths: path = path.replace("\\", "/") basename = os.path.basename(path) upload_path = "%s/%s" % (upload_dir, basename) if not upload_path.endswith(".zip"): continue print "upload: ", upload_path if not os.path.exists(upload_path): continue print "template_dir: ", template_dir shutil.move(upload_path, template_dir) to_path = "%s/%s" % (template_dir, basename) # unzip the file ziputil.extract(to_path)
def handle_path(my, src_path): src_path = src_path.replace("\\", "/") # upload folder basename = os.path.basename(src_path) if my.mode =='copy': target_path = src_path target_dir = os.path.dirname(target_path) else: target_dir = Environment.get_upload_dir() target_path = "%s/%s" % (target_dir, basename) base_dir = Environment.get_template_dir() template_dir = "%s/%s" % (base_dir, my.project_code) if os.path.exists(template_dir): shutil.rmtree(template_dir) #raise TacticException("Template is already installed at [%s]" %template_dir) # unzip the file from pyasm.common import ZipUtil # this is fixed for windows if zipping doesn't use compression paths = ZipUtil.extract(target_path) # veryify that the paths extracted are the expected ones rootname, ext = os.path.splitext(basename) # check if it unzips at the templates folder directly unzip_at_template_dir = False # move the plugin zip file to the appropriate folder if my.mode == 'copy': # if they manually drop the zip file already here, skip if target_dir != base_dir: shutil.copy(target_path, base_dir) else: unzip_at_template_dir = True else: shutil.move(target_path, base_dir) # move unzipped files into the plugin area # remove any version info, only allow 1 particular version installed for now import re rootname = re.sub('(.*)(-)(\d.*)', r'\1', rootname) unzip_path = "%s/%s" % (target_dir, rootname) dest_dir = '%s/%s'%(base_dir, rootname) if not unzip_at_template_dir and os.path.exists(dest_dir): shutil.rmtree(dest_dir) shutil.move(unzip_path, dest_dir)
def handle_path(my, src_path): src_path = src_path.replace("\\", "/") # upload folder basename = os.path.basename(src_path) if my.mode == 'copy': target_path = src_path target_dir = os.path.dirname(target_path) else: target_dir = Environment.get_upload_dir() target_path = "%s/%s" % (target_dir, basename) base_dir = Environment.get_template_dir() template_dir = "%s/%s" % (base_dir, my.project_code) if os.path.exists(template_dir): shutil.rmtree(template_dir) #raise TacticException("Template is already installed at [%s]" %template_dir) # unzip the file from pyasm.common import ZipUtil # this is fixed for windows if zipping doesn't use compression paths = ZipUtil.extract(target_path) # veryify that the paths extracted are the expected ones rootname, ext = os.path.splitext(basename) # check if it unzips at the templates folder directly unzip_at_template_dir = False # move the plugin zip file to the appropriate folder if my.mode == 'copy': # if they manually drop the zip file already here, skip if target_dir != base_dir: shutil.copy(target_path, base_dir) else: unzip_at_template_dir = True else: shutil.move(target_path, base_dir) # move unzipped files into the plugin area # remove any version info, only allow 1 particular version installed for now import re rootname = re.sub('(.*)(-)(\d.*)', r'\1', rootname) unzip_path = "%s/%s" % (target_dir, rootname) dest_dir = '%s/%s' % (base_dir, rootname) if not unzip_at_template_dir and os.path.exists(dest_dir): shutil.rmtree(dest_dir) shutil.move(unzip_path, dest_dir)
def execute(self): import os path = self.kwargs.get("path") path = path.replace("\\", "/") basename = os.path.basename(path) upload_dir = Environment.get_upload_dir() path = "%s/%s" % (upload_dir, basename) paths = ZipUtil.extract(path) # TODO: why do we need to read the manifest here? # ... should be automatic manifest_path = "%s/transaction_log/manifest.xml" % upload_dir if not os.path.exists(manifest_path): raise TacticException("Cannot find manifest file [%s]" % manifest_path) f = codecs.open(manifest_path, 'r', 'utf-8') manifest_xml = f.read() f.close() creator = PluginInstaller(base_dir=upload_dir, manifest=manifest_xml) creator.execute() # run the transactions logs = creator.get_jobs() for log in logs: transaction_xml = log.get_value("transaction") cmd = RunTransactionCmd(transaction_xml=transaction_xml) cmd.execute() # This is needed here, because normaly, RunTransactionCmd # is run by a sync, so it blocks further syncs. When # a transaction session is installed, we need to propogate # this to the other machines cmd = TransactionQueueAppendCmd() input = { 'search_type': 'sthpw/transaction_log', 'sobject': log } cmd.input = input cmd.execute()
def execute(self): import os path = self.kwargs.get("path") path = path.replace("\\", "/") basename = os.path.basename(path) upload_dir = Environment.get_upload_dir() path = "%s/%s" % (upload_dir, basename) f = open(path, 'r') transactions = [] xml = [] for line in f: if line == '\n': transactions.append(xml) xml = [] continue xml.append(line.strip()) transactions.append(xml) for transaction in transactions: value = "\n".join(transaction) # we have the transaction # recreate the log transacton_log = SearchType.create("sthpw/transaction_log") transaction_log.set_value("transaction", transaction) break
def __init__(my, **kwargs): super(PluginBase,my).__init__(**kwargs) # plugin sobject (Not really used anymore?) my.search_key = my.kwargs.get("search_key") zip_path = my.kwargs.get("zip_path") upload_file_name = my.kwargs.get("upload_file_name") my.base_dir = my.kwargs.get("base_dir") my.plugin_dir = my.kwargs.get("plugin_dir") my.manifest = my.kwargs.get("manifest") my.code = my.kwargs.get("code") my.version = my.kwargs.get("version") relative_dir = my.kwargs.get("relative_dir") my.verbose = my.kwargs.get("verbose") not in [False, 'false'] # at the end of this, the following variables are needed in order to # define the plugin # # version: the version of the plugin # plugin_dir: the directory where the plugin definition is located # manifest: the description of what is in the plugin if zip_path: # assume the zip path is the same as the basename basename = os.path.basename(zip_path) basename, ext = os.path.splitext(basename) assert ext == '.zip' tmp_dir = Environment.get_tmp_dir() unzip_dir = "%s/%s" % (tmp_dir, basename) if os.path.exists(unzip_dir): shutil.rmtree(unzip_dir) # unzip the file in to the tmp_dir or plugin_dir (for install) zip_util = ZipUtil() zip_util.extract(zip_path, base_dir=tmp_dir) # assume zip path my.plugin_dir, ext = os.path.splitext(zip_path) # mv from temp if my.plugin_dir != unzip_dir: if os.path.exists(my.plugin_dir): shutil.rmtree(my.plugin_dir) shutil.move(unzip_dir, my.plugin_dir) manifest_path = "%s/manifest.xml" % my.plugin_dir f = open(manifest_path, 'r') my.manifest = f.read() f.close() elif upload_file_name: # The path is moved to the plugin dir, if this process is taking # "local" file (such as one uploaded) upload_dir = Environment.get_upload_dir() upload_path = "%s/%s" % (upload_dir, upload_file_name) plugin_base_dir = Environment.get_plugin_dir() dist_dir = Environment.get_dist_dir() if not os.path.exists(dist_dir): os.makedirs(dist_dir) basename = os.path.basename(upload_path) #if os.path.exists("%s/%s" % (plugin_base_dir, basename)): # os.unlink("%s/%s" % (plugin_base_dir, basename) ) #shutil.move(upload_path, plugin_base_dir) # copy to dist folder if os.path.exists("%s/%s" % (dist_dir, basename)): os.unlink("%s/%s" % (dist_dir, basename) ) shutil.move(upload_path, dist_dir) zip_path = "%s/%s" % (dist_dir, upload_file_name) zip_util = ZipUtil() zip_util.extract(zip_path, base_dir=plugin_base_dir) my.plugin_dir = "%s/%s" % (plugin_base_dir, basename) my.plugin_dir = my.plugin_dir[:-4] manifest_path = "%s/manifest.xml" % (my.plugin_dir) if os.path.exists(manifest_path): f = open(manifest_path, 'r') my.manifest = f.read() f.close() else: # when uploading, this will likely not be needed my.manifest = "<manifest/>" return elif relative_dir: plugin_base_dir = Environment.get_plugin_dir() my.plugin_dir = "%s/%s" % (plugin_base_dir, relative_dir) manifest_path = "%s/manifest.xml" % my.plugin_dir if not os.path.exists(manifest_path): plugin_base_dir = Environment.get_builtin_plugin_dir() my.plugin_dir = "%s/%s" % (plugin_base_dir, relative_dir) manifest_path = "%s/manifest.xml" % my.plugin_dir f = open(manifest_path, 'r') my.manifest = f.read() f.close() elif my.plugin_dir: manifest_path = "%s/manifest.xml" % (my.plugin_dir) f = open(manifest_path, 'r') my.manifest = f.read() f.close() # get the plugin sobject elif my.search_key: plugin = SearchKey.get_by_search_key(my.search_key) my.manifest = plugin.get_value("manifest") my.code = plugin.get_code() my.version = plugin.get_value("version") elif my.manifest: # everything is extracted from the manifest later pass elif my.code: search = Search("config/plugin") search.add_filter("code", my.code) plugin = search.get_sobject() # In case there is extra plugins folder which is the case when the user # is developing. relative_dir = plugin.get_value("rel_dir") plugin_base_dir = Environment.get_plugin_dir() my.plugin_dir = "%s/%s" % (plugin_base_dir, relative_dir) # TODO: fix the ZipUtil.zip_dir() manifest_path = "%s/manifest.xml" % my.plugin_dir if not os.path.exists(manifest_path): plugin_base_dir = Environment.get_builtin_plugin_dir() my.plugin_dir = "%s/%s" % (plugin_base_dir, relative_dir) manifest_path = "%s/manifest.xml" % my.plugin_dir if os.path.exists(manifest_path): f = open(manifest_path, 'r') my.manifest = f.read() f.close() else: # this condition happens likely for a versioned installed plugin from a zip file # where it starts with an extra folder "plugins" and the rel_dir has not been recorded properly my.manifest = plugin.get_value("manifest") my.code = plugin.get_code() my.version = plugin.get_value("version") else: raise Exception("No plugin found") # assertions assert my.manifest # read the xml my.xml = Xml() my.xml.read_string(my.manifest) # if code is passed in, then use that. if not my.code: my.code = my.xml.get_value("manifest/data/code") # old implementation if not my.code: my.code = my.xml.get_value("manifest/@code") if not my.version: my.version = my.xml.get_value("manifest/data/version") assert my.code if not my.base_dir: if my.code.startswith("TACTIC"): my.base_dir = Environment.get_builtin_plugin_dir() else: my.base_dir = Environment.get_plugin_dir() # set the base directory for this particular plugin if not my.plugin_dir: if my.version: my.plugin_dir = "%s/%s-%s" % (my.base_dir, my.code, my.version) else: my.plugin_dir = "%s/%s" % (my.base_dir, my.code)
def execute(my): filenames = my.kwargs.get("filenames") upload_dir = Environment.get_upload_dir() base_dir = upload_dir update_mode = my.kwargs.get("update_mode") search_type = my.kwargs.get("search_type") key = my.kwargs.get("key") relative_dir = my.kwargs.get("relative_dir") if not relative_dir: project_code = Project.get_project_code() search_type_obj = SearchType.get(search_type) table = search_type_obj.get_table() relative_dir = "%s/%s" % (project_code, table) server = TacticServerStub.get() parent_key = my.kwargs.get("parent_key") category = my.kwargs.get("category") keywords = my.kwargs.get("keywords") update_data = my.kwargs.get("update_data") extra_data = my.kwargs.get("extra_data") if extra_data: extra_data = jsonloads(extra_data) else: extra_data = {} # TODO: use this to generate a category category_script_path = my.kwargs.get("category_script_path") """ ie: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") return date.split(" ")[0] """ if not SearchType.column_exists(search_type, "name"): raise TacticException('The Ingestion puts the file name into the name column which is the minimal requirement. Please first create a "name" column for this sType.') input_prefix = update_data.get('input_prefix') non_seq_filenames = [] # For sequence mode, take all filenames, and regenerate the filenames based on the function "find_sequences" if update_mode == "sequence": non_seq_filenames_dict, seq_digit_length = my.find_sequences(filenames) # non_seq_filenames is a list of filenames that are stored in the None key, # which are the filenames that are not part of a sequence, or does not contain # a sequence pattern. non_seq_filenames = non_seq_filenames_dict[None] # delete the None key from list so filenames can be used in the latter for loop del non_seq_filenames_dict[None] filenames = non_seq_filenames_dict.keys() if filenames == []: raise TacticException('No sequences are found in files. Please follow the pattern of [filename] + [digits] + [file extension (optional)]. Examples: [abc_1001.png, abc_1002.png] [abc.1001.mp3, abc.1002.mp3] [abc_100_1001.png, abc_100_1002.png]') for count, filename in enumerate(filenames): # Check if files should be updated. # If so, attempt to find one to update. # If more than one is found, do not update. if update_mode in ["true", "True"]: # first see if this sobjects still exists search = Search(search_type) search.add_filter("name", filename) if relative_dir and search.column_exists("relative_dir"): search.add_filter("relative_dir", relative_dir) sobjects = search.get_sobjects() if len(sobjects) > 1: sobject = None elif len(sobjects) == 1: sobject = sobjects[0] else: sobject = None elif update_mode == "sequence": if not FileGroup.is_sequence(filename): raise TacticException('Please modify sequence naming to have at least three digits.') search = Search(search_type) search.add_filter("name", filename) if relative_dir and search.column_exists("relative_dir"): search.add_filter("relative_dir", relative_dir) sobjects = search.get_sobjects() if sobjects: sobject = sobjects[0] else: sobject = None else: sobject = None # Create a new file if not sobject: sobject = SearchType.create(search_type) sobject.set_value("name", filename) if relative_dir and sobject.column_exists("relative_dir"): sobject.set_value("relative_dir", relative_dir) # extract metadata #file_path = "%s/%s" % (base_dir, File.get_filesystem_name(filename)) if update_mode == "sequence": first_filename = non_seq_filenames_dict.get(filename)[0] last_filename = non_seq_filenames_dict.get(filename)[-1] file_path = "%s/%s" % (base_dir, first_filename) else: file_path = "%s/%s" % (base_dir, filename) # TEST: convert on upload try: convert = my.kwargs.get("convert") if convert: message_key = "IngestConvert001" cmd = ConvertCbk(**convert) cmd.execute() except Exception, e: print "WARNING: ", e if not os.path.exists(file_path): raise Exception("Path [%s] does not exist" % file_path) # get the metadata from this image if SearchType.column_exists(search_type, "relative_dir"): if category and category not in ['none', None]: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") if not date: date_str = "No-Date" else: date_str = str(date) # this can't be parsed correctly by dateutils parts = date_str.split(" ") date_str = parts[0].replace(":", "-") date_str = "%s %s" % (date_str, parts[1]) from dateutil import parser orig_date = parser.parse(date_str) if category == "by_day": date_str = orig_date.strftime("%Y/%Y-%m-%d") elif category == "by_month": date_str = orig_date.strftime("%Y-%m") elif category == "by_week": date_str = orig_date.strftime("%Y/Week-%U") full_relative_dir = "%s/%s" % (relative_dir, date_str) sobject.set_value("relative_dir", full_relative_dir) if parent_key: parent = Search.get_by_search_key(parent_key) if parent: sobject.set_sobject_value(sobject) for key, value in update_data.items(): if input_prefix: key = key.replace('%s|'%input_prefix, '') if SearchType.column_exists(search_type, key): if value: sobject.set_value(key, value) """ if SearchType.column_exists(search_type, "keywords"): if keywords: sobject.set_value("keywords", keywords) """ for key, value in extra_data.items(): if SearchType.column_exists(search_type, key): sobject.set_value(key, value) """ if category: if SearchType.column_exists(search_type, "category"): sobject.set_value("category", category) if SearchType.column_exists(search_type, "relative_dir"): full_relative_dir = "%s/%s" % (relative_dir, category) sobject.set_value("relative_dir", category) """ sobject.commit() search_key = sobject.get_search_key() # use API to check in file process = my.kwargs.get("process") if not process: process = "publish" if process == "icon": context = "icon" else: context = "%s/%s" % (process, filename.lower()) if update_mode == "sequence": pattern_expr = re.compile('^.*(\d{%d})\..*$'%seq_digit_length) m_first = re.match(pattern_expr, first_filename) m_last = re.match(pattern_expr, last_filename) # for files without extension # abc_1001, abc.1123_1001 if not m_first: no_ext_expr = re.compile('^.*(\d{%d})$'%seq_digit_length) m_first = re.match(no_ext_expr, first_filename) m_last = re.match(no_ext_expr, last_filename) # using second last index , to grab the set right before file type groups_first = m_first.groups() if groups_first: range_start = int(m_first.groups()[0]) groups_last = m_last.groups() if groups_last: range_end = int(m_last.groups()[0]) file_range = '%s-%s' % (range_start, range_end) file_path = "%s/%s" % (base_dir, filename) server.group_checkin(search_key, context, file_path, file_range, mode='uploaded') else: server.simple_checkin(search_key, context, filename, mode='uploaded') percent = int((float(count)+1) / len(filenames)*100) print "checking in: ", filename, percent msg = { 'progress': percent, 'description': 'Checking in file [%s]' % filename, } server.log_message(key, msg, status="in progress")
def execute(my): filenames = my.kwargs.get("filenames") upload_dir = Environment.get_upload_dir() base_dir = upload_dir search_type = my.kwargs.get("search_type") key = my.kwargs.get("key") relative_dir = my.kwargs.get("relative_dir") if not relative_dir: project_code = Project.get_project_code() search_type_obj = SearchType.get(search_type) table = search_type_obj.get_table() relative_dir = "%s/%s" % (project_code, table) server = TacticServerStub.get() parent_key = my.kwargs.get("parent_key") category = my.kwargs.get("category") keywords = my.kwargs.get("keywords") update_data = my.kwargs.get("update_data") extra_data = my.kwargs.get("extra_data") if extra_data: extra_data = jsonloads(extra_data) else: extra_data = {} # TODO: use this to generate a category category_script_path = my.kwargs.get("category_script_path") """ ie: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") return date.split(" ")[0] """ if not SearchType.column_exists(search_type, "name"): raise TacticException( 'The Ingestion puts the file name into the name column which is the minimal requirement. Please first create a "name" column for this sType.' ) input_prefix = update_data.get('input_prefix') for count, filename in enumerate(filenames): # first see if this sobjects still exists search = Search(search_type) search.add_filter("name", filename) if relative_dir and search.column_exists("relative_dir"): search.add_filter("relative_dir", relative_dir) sobject = search.get_sobject() # else create a new one if not sobject: sobject = SearchType.create(search_type) sobject.set_value("name", filename) if relative_dir and sobject.column_exists("relative_dir"): sobject.set_value("relative_dir", relative_dir) # extract metadata #file_path = "%s/%s" % (base_dir, File.get_filesystem_name(filename)) file_path = "%s/%s" % (base_dir, filename) # TEST: convert on upload try: convert = my.kwargs.get("convert") if convert: message_key = "IngestConvert001" cmd = ConvertCbk(**convert) cmd.execute() except Exception, e: print "WARNING: ", e if not os.path.exists(file_path): raise Exception("Path [%s] does not exist" % file_path) # get the metadata from this image if SearchType.column_exists(search_type, "relative_dir"): if category and category not in ['none', None]: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") if not date: date_str = "No-Date" else: date_str = str(date) # this can't be parsed correctly by dateutils parts = date_str.split(" ") date_str = parts[0].replace(":", "-") date_str = "%s %s" % (date_str, parts[1]) from dateutil import parser orig_date = parser.parse(date_str) if category == "by_day": date_str = orig_date.strftime("%Y/%Y-%m-%d") elif category == "by_month": date_str = orig_date.strftime("%Y-%m") elif category == "by_week": date_str = orig_date.strftime("%Y/Week-%U") full_relative_dir = "%s/%s" % (relative_dir, date_str) sobject.set_value("relative_dir", full_relative_dir) if parent_key: parent = Search.get_by_search_key(parent_key) if parent: sobject.set_sobject_value(sobject) for key, value in update_data.items(): if input_prefix: key = key.replace('%s|' % input_prefix, '') if SearchType.column_exists(search_type, key): if value: sobject.set_value(key, value) """ if SearchType.column_exists(search_type, "keywords"): if keywords: sobject.set_value("keywords", keywords) """ for key, value in extra_data.items(): if SearchType.column_exists(search_type, key): sobject.set_value(key, value) """ if category: if SearchType.column_exists(search_type, "category"): sobject.set_value("category", category) if SearchType.column_exists(search_type, "relative_dir"): full_relative_dir = "%s/%s" % (relative_dir, category) sobject.set_value("relative_dir", category) """ sobject.commit() search_key = sobject.get_search_key() # use API to check in file process = my.kwargs.get("process") if not process: process = "publish" if process == "icon": context = "icon" else: context = "%s/%s" % (process, filename) server.simple_checkin(search_key, context, filename, mode='uploaded') percent = int((float(count) + 1) / len(filenames) * 100) print "checking in: ", filename, percent msg = { 'progress': percent, 'description': 'Checking in file [%s]' % filename, } server.log_message(key, msg, status="in progress")
def main(server=None, input=None): """ The main function of the custom script. The entire script was copied and pasted into the body of the try statement in order to add some error handling. It's all legacy code, so edit with caution. :param server: the TacticServerStub object :param input: a dict with data like like search_key, search_type, sobject, and update_data :return: None """ if not input: input = {} try: # CUSTOM_SCRIPT00074 # Matthew Tyler Misenhimer # This is run when a new file is uploaded to an object or a note (notes handled differently) # Do Not send external if location is internal def make_right_code_ending(sid): ending = str(sid) ending_len = len(ending) if ending_len < 5: zeros = 5 - ending_len for num in range (0, zeros): ending = '0%s' % ending return ending def make_note_code_ending(sid): ending = str(sid) ending_len = len(ending) if ending_len < 8: zeros = 8 - ending_len for num in range (0, zeros): ending = '0%s' % ending return ending def make_timestamp(): import datetime now = datetime.datetime.now() return now.strftime("%Y-%m-%d %H:%M:%S") def get_time_date_dict(str_time): pre_split = str_time.split('.')[0] first_split = pre_split.split(' ') date = first_split[0] time = first_split[1] date_split = date.split('-') dt = {} dt['year'] = int(date_split[0]) dt['month'] = int(date_split[1]) dt['day'] = int(date_split[2]) dt['date'] = date dt['time'] = time time_split = time.split(':') dt['hour'] = int(time_split[0]) dt['minute'] = int(time_split[1]) dt['second'] = int(time_split[2]) dt['big_time'] = float((dt['hour'] * 3600) + (dt['minute'] * 60) + dt['second']) return dt def compare_dt_dicts(dt1, dt2): # This is rough. Don't use it for anything else. Should work for this though. # This is to see the difference in seconds between two special date&time dicts. # Result in first position in return array will be 1 if the first passed in variable is older, 0 if it is newer than the second passed in difference = 0 newest = -1 dt1_bignum = float(float((dt1['year'] - 2000) * 365 * 24 * 3600) + float((dt1['month'] - 1) * 31 * 24 * 3600) + float((dt1['day'] - 1) * 24 * 3600) + dt1['big_time']) dt2_bignum = float(float((dt2['year'] - 2000) * 365 * 24 * 3600) + float((dt2['month'] - 1) * 31 * 24 * 3600) + float((dt2['day'] - 1) * 24 * 3600) + dt2['big_time']) difference = dt2_bignum - dt1_bignum if difference < 0: newest = 0 else: newest = 1 return [difference, newest] def fix_date(date): #This is needed due to the way Tactic deals with dates (using timezone info), post v4.0 from pyasm.common import SPTDate return_date = '' date_obj = SPTDate.convert_to_local(date) if date_obj not in [None,'']: return_date = date_obj.strftime("%Y-%m-%d %H:%M") return return_date def fix_note_chars(note): import sys from json import dumps as jsondumps if sys.stdout.encoding: note = note.decode(sys.stdout.encoding) note = jsondumps(note) note = note.replace('||t',' ') note = note.replace('\\\\t',' ') note = note.replace('\\\t',' ') note = note.replace('\\t',' ') note = note.replace('\t',' ') note = note.replace('\\"','"') note = note.replace('\"','"') note = note.replace('||n','<br/>') note = note.replace('\\\\n','<br/>') note = note.replace('\\\n','<br/>') note = note.replace('\\n','<br/>') note = note.replace('\n','<br/>') return note import os from pyasm.common import Environment allow_client_emails = True sobject = input.get('sobject') parent_type_solid = sobject.get('search_type') parent_type = sobject.get('search_type').split('?')[0] find_str = parent_type.split('/')[1].upper().split('?')[0] file_name = sobject.get('file_name') #.replace(' ','_') #This only handles files that were attached to notes or orders if '_web.' not in file_name and '_icon.' not in file_name and 'icon' not in sobject.get('checkin_dir'): #see if the file was attached to a note, if not, see if it was connected to an order note_expr = "@SOBJECT(sthpw/snapshot['search_id','%s']['search_type','sthpw/note']['@ORDER_BY','timestamp asc'])" % sobject.get('search_id') snapshots = server.eval(note_expr) if len(snapshots) == 0: order_expr = "@SOBJECT(sthpw/snapshot['search_id','%s']['search_type','twog/order?project=twog'])" % sobject.get('search_id') snapshots = server.eval(order_expr) if len(snapshots) > 0: #Get the most recent snapshot that the file could have been attached to snapshot = snapshots[len(snapshots) - 1] is_latest = snapshot.get('is_latest') description = snapshot.get('description') process = snapshot.get('process') id = snapshot.get('id') search_id = snapshot.get('search_id') code = snapshot.get('code') snap_timestamp = snapshot.get('timestamp') sea_t = sobject.get('search_type').split('?')[0] upper_st = sea_t.split('/')[1].upper() srch_id = sobject.get('search_id') full_ending = make_right_code_ending(srch_id) parent_code = '%s%s' % (upper_st, full_ending) parent_sk = server.build_search_key(sea_t, parent_code) parent_type = parent_sk.split('?')[0] version = int(snapshot.get('version')) #There's a template for the internal emails, and one for those that go out to our clients internal_template_file = '/opt/spt/custom/formatted_emailer/internal_email_template.html' external_template_file = '/opt/spt/custom/formatted_emailer/external_email_template.html' if is_latest and find_str == 'ORDER': #Handle the case in which it was attached to an order order_code = parent_code if version not in [-1,'-1']: #The only process we care about sending alerts out for is the "PO" if process == 'PO': parent = server.eval("@SOBJECT(%s['code','%s'])" % (parent_type, parent_code))[0] sched = parent.get('login') sched_email = server.eval("@GET(sthpw/login['login','%s'].email)" % sched) if sched_email: sched_email = sched_email[0] else: sched_email = 'imakestringnothinghappn' parent_timestamp = parent.get('timestamp') snap_tdict = get_time_date_dict(snap_timestamp) parent_tdict = get_time_date_dict(parent_timestamp) rez = compare_dt_dicts(parent_tdict, snap_tdict) #If the result is 1, the parent is older than the snapshot #If the snapshot is older than the order, the result will be 0, which means there is a problem if rez[1] == 1: #If the difference in seconds between the object creation is greater than 15 seconds, there will probably be no problems with the following queries (Had to put it in, because it was creating errors occasionally) if rez[0] > 15: #Get all the files associated with the file upload (there could be others attached to the same snapshot) #Then send an email, using the internal template from formatted_emailer import EmailDirections ed = EmailDirections(order_code=order_code) int_data = ed.get_internal_data() subject = '2G-PO-FILE-UPLOAD %s Order: "%s" PO#: %s' % (file_name, int_data['order_name'], int_data['po_number']) if int_data['client_email'] == '' and int_data['location'] == 'external': subject = 'NOT SENT TO CLIENT!? %s' % subject subject_see = subject subject = subject.replace(' ','..') message = '%s has uploaded a new PO File.' % int_data['from_name'].replace('.',' ') message = '%s<br/>Uploaded PO File: %s' % (message, file_name) if parent_type == 'twog/order': sales_repper = parent.get('sales_rep') sales_rep_email = server.eval("@GET(sthpw/login['login','%s']['location','internal'].email)" % sales_repper) if sales_rep_email not in [None,'',[]]: sales_rep_email = sales_rep_email[0] if int_data['ccs'] not in [None,'']: int_data['ccs'] = '%s;%s' % (int_data['ccs'], sales_rep_email) else: int_data['ccs'] = '%s' % sales_rep_email int_data['ccs'] = int_data['ccs'].replace(';%s' % sched_email, '').replace('%s;' % sched_email, '') template = open(internal_template_file, 'r') filled = '' for line in template: line = line.replace('[ORDER_CODE]', int_data['order_code']) line = line.replace('[PO_NUMBER]', int_data['po_number']) line = line.replace('[CLIENT_EMAIL]', int_data['client_email']) line = line.replace('[EMAIL_CC_LIST]', int_data['ccs']) line = line.replace('[SCHEDULER_EMAIL]', int_data['scheduler_email']) line = line.replace('[SUBJECT]', subject_see) line = line.replace('[MESSAGE]', message) line = line.replace('[CLIENT]', int_data['client_name']) line = line.replace('[CLIENT_LOGIN]', int_data['client_login']) line = line.replace('[ORDER_NAME]', int_data.get('order_hyperlink', int_data['order_name'])) line = line.replace('[START_DATE]', fix_date(int_data['start_date'])) line = line.replace('[DUE_DATE]', fix_date(int_data['due_date'])) line = line.replace('[TITLE_ROW]', '') line = line.replace('[PROJ_ROW]', '') filled = '%s%s' % (filled, line) transaction_ticket = server.get_transaction_ticket() upload_dir = Environment.get_upload_dir(transaction_ticket) filled = '%s\nMATTACHMENT:%s/%s' % (filled, upload_dir, file_name) template.close() filled_in_email = '/var/www/html/formatted_emails/int_snap_inserted_%s.html' % code filler = open(filled_in_email, 'w') filler.write(filled) filler.close() the_command = "php /opt/spt/custom/formatted_emailer/trusty_emailer.php '''%s''' '''%s''' '''%s''' '''%s''' '''%s''' '''%s'''" % (filled_in_email, int_data['to_email'], int_data['from_email'], int_data['from_name'], subject, int_data['ccs'].replace(';','#Xs*')) os.system(the_command) #If the location of the user is external, and we allow this client to receive emails, then send them an email as well if int_data['location'] == 'external' and allow_client_emails: ext_data = ed.get_external_data() template = open(external_template_file, 'r') filled = '' for line in template: line = line.replace('[ORDER_CODE]', ext_data['order_code']) line = line.replace('[PO_NUMBER]', ext_data['po_number']) line = line.replace('[CLIENT_EMAIL]', ext_data['client_email']) line = line.replace('[EMAIL_CC_LIST]', ext_data['ccs']) line = line.replace('[SCHEDULER_EMAIL]', ext_data['scheduler_email']) line = line.replace('[SUBJECT]', subject_see) line = line.replace('[MESSAGE]', message) line = line.replace('[CLIENT]', ext_data['client_name']) line = line.replace('[CLIENT_LOGIN]', ext_data['client_login']) line = line.replace('[ORDER_NAME]', ext_data['order_name']) line = line.replace('[START_DATE]', fix_date(ext_data['start_date'])) line = line.replace('[DUE_DATE]', fix_date(ext_data['due_date'])) filled = '%s%s' % (filled, line) filled = '%s\nMATTACHMENT:%s/%s' % (filled, upload_dir, file_name) template.close() filled_in_email = '/var/www/html/formatted_emails/ext_snap_inserted_%s.html' % code filler = open(filled_in_email, 'w') filler.write(filled) filler.close() os.system("php /opt/spt/custom/formatted_emailer/trusty_emailer.php '''%s''' '''%s''' '''%s''' '''%s''' '''%s''' '''%s'''" % (filled_in_email, ext_data['to_email'], ext_data['from_email'], ext_data['from_name'], subject, ext_data['ccs'].replace(';','#Xs*'))) else: print "THIS MAKES NO SENSE. THE SNAPSHOT WAS CREATED BEFORE THE ORDER?" elif find_str == 'NOTE': #So it was attached to a note #Need to make a code with the search id, as there's different ways the order and note deal with their snapshots full_ending = make_note_code_ending(srch_id) parent_code = 'NOTE%s' % (full_ending) parent_sk = server.build_search_key('sthpw/note', parent_code) #Get the note sobject #note_obj = server.eval("@SOBJECT(sthpw/note['code','%s'])" % parent_code)[0] note_obj = server.eval("@SOBJECT(sthpw/note['id','%s'])" % srch_id)[0] note = note_obj.get('note') #Need to wait until all files have been checked in to Tactic if process == 'note_attachment': timestamp = note_obj.get('timestamp').split('.')[0] search_id = note_obj.get('search_id') login = note_obj.get('login') process = note_obj.get('process') note_id = note_obj.get('id') addressed_to = note_obj.get('addressed_to') override_compression = True #if addressed_to not in [None,'']: # override_compression = True search_type = note_obj.get('search_type').split('?')[0] parent_tall_str = search_type.split('/')[1].upper() groups = Environment.get_group_names() note = note_obj.get('note') note = note.replace('\t',' ') note = note.replace('\n','<br/>') note = note.replace(' ', ' ') note = fix_note_chars(note) order = None title = None proj = None work_order = None order_code = '' display_ccs = '' subject_see = '' message = '' title_row = '' proj_row = '' #If the note was attached to an order, title, proj or work_order, and compression didn't write the note, then send it. If compression intended to send it, then go ahead and send. if parent_tall_str in ['ORDER','TITLE','PROJ','WORK_ORDER']: # and (('compression' not in groups and 'compression supervisor' not in groups) or override_compression): from formatted_emailer import EmailDirections #create the note parent's code from search_id right_ending = make_right_code_ending(search_id) parent_code = '%s%s' % (parent_tall_str, right_ending) parent = server.eval("@SOBJECT(%s['code','%s'])" % (search_type, parent_code)) if parent: parent = parent[0] ident_str = '' going_to_client = False #Get info from the related elements, going up the chain if parent_tall_str == 'WORK_ORDER': proj = server.eval("@SOBJECT(twog/work_order['code','%s'].twog/proj)" % parent_code)[0] title = server.eval("@SOBJECT(twog/work_order['code','%s'].twog/proj.twog/title)" % parent_code)[0] order = server.eval("@SOBJECT(twog/order['code','%s'])" % title.get('order_code'))[0] elif parent_tall_str == 'PROJ': proj = parent title = server.eval("@SOBJECT(twog/proj['code','%s'].twog/title)" % parent_code)[0] order = server.eval("@SOBJECT(twog/order['code','%s'])" % title.get('order_code'))[0] elif parent_tall_str == 'TITLE': title = parent order = server.eval("@SOBJECT(twog/order['code','%s'])" % title.get('order_code'))[0] elif parent_tall_str == 'ORDER': order = server.eval("@SOBJECT(twog/order['code','%s'])" % parent_code)[0] #If the note was attached to an order or a title, go ahead and send it to the client (as long as we allow emailing to that client), otherwise it will remain internal if parent_tall_str == 'ORDER' and process == 'client': order = parent going_to_client = True elif parent_tall_str == 'TITLE' and process == 'client': going_to_client = True display_heirarchy = '' #Get the different message elements and mail_to lists for internal and external emails ed = EmailDirections(order_code=order.get('code')) int_data = ed.get_internal_data() ext_data = ed.get_external_data() if title: title_display = title.get('title') if title.get('episode') not in [None,'']: title_display = '%s: %s' % (title_display, title.get('episode')) display_heirarchy = '"%s" in %s' % (title_display, display_heirarchy) ident_str = '' if parent_tall_str in ['ORDER','TITLE']: ident_str = '%s PO#: %s' % (display_heirarchy, ext_data['po_number']) else: ident_str = '%s (%s)' % (parent_code, ext_data['po_number']) subject = '2G-NOTE ATTACHMENT FOR %s (%s)' % (ident_str, file_name) #If it's not going to the client because we don't have their email, at least tell the people internally that it didn't go out to the client if ext_data['to_email'] == '' and ext_data['ext_ccs'] == '' and ext_data['location'] == 'external': subject = 'NOT SENT TO CLIENT!? %s' % subject subject_see = subject subject = subject.replace(' ','..') message = '<br/>%s has added a new note for %s:<br/><br/>Note:<br/>%s<br/>%s' % (ext_data['from_name'], ident_str, note, timestamp) if going_to_client and allow_client_emails: ext_template = open(external_template_file, 'r') filled = '' for line in ext_template: line = line.replace('[ORDER_CODE]', ext_data['order_code']) line = line.replace('[PO_NUMBER]', ext_data['po_number']) line = line.replace('[CLIENT_EMAIL]', ext_data['client_email']) line = line.replace('[EMAIL_CC_LIST]', ext_data['ext_ccs']) line = line.replace('[SCHEDULER_EMAIL]', ext_data['scheduler_email']) line = line.replace('[SUBJECT]', subject_see) line = line.replace('[MESSAGE]', message) line = line.replace('[CLIENT]', ext_data['client_name']) line = line.replace('[CLIENT_LOGIN]', ext_data['client_login']) line = line.replace('[ORDER_NAME]', ext_data['order_name']) line = line.replace('[START_DATE]', fix_date(ext_data['start_date'])) line = line.replace('[DUE_DATE]', fix_date(ext_data['due_date'])) filled = '%s%s' % (filled, line) #If there were files attached (which there should be), show what they are in the email transaction_ticket = server.get_transaction_ticket() upload_dir = Environment.get_upload_dir(transaction_ticket) filled = '%s\nMATTACHMENT:%s/%s' % (filled, upload_dir, file_name) ext_template.close() filled_in_email = '/var/www/html/formatted_emails/ext_note_inserted_%s.html' % note_id filler = open(filled_in_email, 'w') filler.write(filled) filler.close() if addressed_to not in [None,'']: adt = addressed_to.split(',') for adta in adt: if '@2gdigital' not in adta and adta not in ext_data['ext_ccs']: if ext_data['ext_ccs'] == '': ext_data['ext_ccs'] = adta else: ext_data['ext_ccs'] = '%s;%s' % (ext_data['ext_ccs'], adta) the_command = "php /opt/spt/custom/formatted_emailer/trusty_emailer.php '''%s''' '''%s''' '''%s''' '''%s''' '''%s''' '''%s'''" % (filled_in_email, ext_data['to_email'], ext_data['from_email'], ext_data['from_name'], subject, ext_data['ext_ccs'].replace(';','#Xs*')) if ext_data['to_email'] not in [None,''] and ext_data['ext_ccs'] not in [None,'',';']: os.system(the_command) #Now do internal email if title: full_title = title.get('title') if title.get('episode') not in [None,'']: full_title = '%s: %s' % (full_title, title.get('episode')) title_row = "<div id='pagesubTitle3'>Title: <strong>%s</strong> | Title Code: <strong>%s</strong></div>" % (full_title, title.get('code')) if proj: proj_row = "<div id='pagesubTitle3'>Project: <strong>%s</strong> | Project Code: <strong>%s</strong></div>" % (proj.get('process'), proj.get('code')) int_template = open(internal_template_file, 'r') filled = '' for line in int_template: line = line.replace('[ORDER_CODE]', int_data['order_code']) line = line.replace('[PO_NUMBER]', int_data['po_number']) line = line.replace('[CLIENT_EMAIL]', int_data['client_email']) line = line.replace('[EMAIL_CC_LIST]', int_data['int_ccs']) line = line.replace('[SCHEDULER_EMAIL]', int_data['scheduler_email']) line = line.replace('[SUBJECT]', subject_see) line = line.replace('[MESSAGE]', message) line = line.replace('[CLIENT]', int_data['client_name']) line = line.replace('[CLIENT_LOGIN]', int_data['client_login']) line = line.replace('[ORDER_NAME]', int_data.get('order_hyperlink', int_data['order_name'])) line = line.replace('[START_DATE]', fix_date(int_data['start_date'])) line = line.replace('[DUE_DATE]', fix_date(int_data['due_date'])) line = line.replace('[TITLE_ROW]', title_row) line = line.replace('[PROJ_ROW]', proj_row) filled = '%s%s' % (filled, line) #If there were files attached (which there should be), show what they are in the email transaction_ticket = server.get_transaction_ticket() upload_dir = Environment.get_upload_dir(transaction_ticket) filled = '%s\nMATTACHMENT:%s/%s' % (filled, upload_dir, file_name) int_template.close() filled_in_email = '/var/www/html/formatted_emails/int_note_inserted_%s.html' % note_id filler = open(filled_in_email, 'w') filler.write(filled) filler.close() if addressed_to not in [None,'']: adt = addressed_to.split(',') for adta in adt: if '@2gdigital' in adta and adta not in int_data['int_ccs']: if int_data['int_ccs'] == '': int_data['int_ccs'] = adta else: int_data['int_ccs'] = '%s;%s' % (int_data['int_ccs'], adta) login_email = server.eval("@GET(sthpw/login['login','%s'].email)" % login) if login_email: int_data['from_email'] = login_email[0] the_command = "php /opt/spt/custom/formatted_emailer/trusty_emailer.php '''%s''' '''%s''' '''%s''' '''%s''' '''%s''' '''%s'''" % (filled_in_email, int_data['to_email'], int_data['from_email'], int_data['from_name'], subject, int_data['int_ccs'].replace(';','#Xs*')) if int_data['to_email'] not in [None,''] and int_data['int_ccs'] not in [None,'',';']: #Do it. Send the email os.system(the_command) elif '_icon' in file_name: snapshot_code = sobject.get('snapshot_code') extension = 'jpg' #I don't know why I can't get the actual info on this file right now. Kinda tarded. We just have to assume that it will end up being a jpg fsplit = file_name.split('.') sexpr = "@SOBJECT(sthpw/snapshot['code','%s'])" % snapshot_code snapshot = server.eval(sexpr)[0] version = int(snapshot.get('version')) if version > 9: version = 'v0%s' % version elif version > 99: version = 'v%s' % version else: version = 'v00%s' % version parent_type = snapshot.get('search_type').split('?')[0] find_str = parent_type.split('/')[1].upper().split('?')[0] process = snapshot.get('process') if process == 'icon': id = snapshot.get('id') search_id = snapshot.get('search_id') sea_t = snapshot.get('search_type').split('?')[0] upper_st = sea_t.split('/')[1].upper() srch_id = snapshot.get('search_id') full_ending = make_right_code_ending(srch_id) parent_code = '%s%s' % (upper_st, full_ending) parent_sk = server.build_search_key(sea_t, parent_code) parent_type = parent_sk.split('?')[0] #This is to set the icon for orders if find_str == 'ORDER': preview_path = server.get_path_from_snapshot(snapshot.get('code'), mode="web") preview_path_i = preview_path.split('/') fn2 = preview_path_i[len(preview_path_i) - 1] fn3_s = fn2.split('.') fn3 = '%s_icon_%s.%s' % (fn3_s[0], version, extension) preview_path = preview_path.replace(fn2, fn3) server.update(parent_sk, {'icon_path': preview_path}) except AttributeError as e: traceback.print_exc() print str(e) + '\nMost likely the server object does not exist.' raise e except KeyError as e: traceback.print_exc() print str(e) + '\nMost likely the input dictionary does not exist.' raise e except Exception as e: traceback.print_exc() print str(e) raise e
def execute(my): filenames = my.kwargs.get("filenames") upload_dir = Environment.get_upload_dir() base_dir = upload_dir search_type = my.kwargs.get("search_type") key = my.kwargs.get("key") relative_dir = my.kwargs.get("relative_dir") if not relative_dir: project_code = Project.get_project_code() search_type_obj = SearchType.get(search_type) table = search_type_obj.get_table() relative_dir = "%s/%s" % (project_code, table) server = TacticServerStub.get() parent_key = my.kwargs.get("parent_key") category = my.kwargs.get("category") keywords = my.kwargs.get("keywords") extra_data = my.kwargs.get("extra_data") if extra_data: extra_data = jsonloads(extra_data) else: extra_data = {} # TODO: use this to generate a category category_script_path = my.kwargs.get("category_script_path") """ ie: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") return date.split(" ")[0] """ if not SearchType.column_exists(search_type, "name"): raise TacticException('The Ingestion puts the file name into the name column which is the minimal requirement. Please first create a "name" column for this sType.') for count, filename in enumerate(filenames): # first see if this sobjects still exists search = Search(search_type) search.add_filter("name", filename) if relative_dir and search.column_exists("relative_dir"): search.add_filter("relative_dir", relative_dir) sobject = search.get_sobject() # else create a new one if not sobject: sobject = SearchType.create(search_type) sobject.set_value("name", filename) if relative_dir and sobject.column_exists("relative_dir"): sobject.set_value("relative_dir", relative_dir) # extract metadata file_path = "%s/%s" % (base_dir, File.get_filesystem_name(filename)) # TEST: convert on upload try: convert = my.kwargs.get("convert") if convert: message_key = "IngestConvert001" cmd = ConvertCbk(**convert) cmd.execute() except Exception, e: print "WARNING: ", e if not os.path.exists(file_path): raise Exception("Path [%s] does not exist" % file_path) # get the metadata from this image if SearchType.column_exists(search_type, "relative_dir"): if category and category not in ['none', None]: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") if not date: date_str = "No-Date" else: date_str = str(date) # this can't be parsed correctly by dateutils parts = date_str.split(" ") date_str = parts[0].replace(":", "-") date_str = "%s %s" % (date_str, parts[1]) from dateutil import parser orig_date = parser.parse(date_str) if category == "by_day": date_str = orig_date.strftime("%Y/%Y-%m-%d") elif category == "by_month": date_str = orig_date.strftime("%Y-%m") elif category == "by_week": date_str = orig_date.strftime("%Y/Week-%U") full_relative_dir = "%s/%s" % (relative_dir, date_str) sobject.set_value("relative_dir", full_relative_dir) if parent_key: parent = Search.get_by_search_key(parent_key) if parent: sobject.set_sobject_value(sobject) if SearchType.column_exists(search_type, "keywords"): if keywords: sobject.set_value("keywords", keywords) for key, value in extra_data.items(): if SearchType.column_exists(search_type, key): sobject.set_value(key, value) """ if category: if SearchType.column_exists(search_type, "category"): sobject.set_value("category", category) if SearchType.column_exists(search_type, "relative_dir"): full_relative_dir = "%s/%s" % (relative_dir, category) sobject.set_value("relative_dir", category) """ sobject.commit() search_key = sobject.get_search_key() # use API to check in file process = my.kwargs.get("process") if not process: process = "publish" if process == "icon": context = "icon" else: context = "%s/%s" % (process, filename) server.simple_checkin(search_key, context, filename, mode='uploaded') percent = int((float(count)+1) / len(filenames)*100) print "checking in: ", filename, percent msg = { 'progress': percent, 'description': 'Checking in file [%s]' % filename, } server.log_message(key, msg, status="in progress")
def execute(my): filenames = my.kwargs.get("filenames") upload_dir = Environment.get_upload_dir() base_dir = upload_dir update_mode = my.kwargs.get("update_mode") search_type = my.kwargs.get("search_type") key = my.kwargs.get("key") relative_dir = my.kwargs.get("relative_dir") if not relative_dir: project_code = Project.get_project_code() search_type_obj = SearchType.get(search_type) table = search_type_obj.get_table() relative_dir = "%s/%s" % (project_code, table) server = TacticServerStub.get() parent_key = my.kwargs.get("parent_key") category = my.kwargs.get("category") keywords = my.kwargs.get("keywords") update_data = my.kwargs.get("update_data") extra_data = my.kwargs.get("extra_data") if extra_data: extra_data = jsonloads(extra_data) else: extra_data = {} # TODO: use this to generate a category category_script_path = my.kwargs.get("category_script_path") """ ie: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") return date.split(" ")[0] """ if not SearchType.column_exists(search_type, "name"): raise TacticException( 'The Ingestion puts the file name into the name column which is the minimal requirement. Please first create a "name" column for this sType.' ) input_prefix = update_data.get('input_prefix') non_seq_filenames = [] # For sequence mode, take all filenames, and regenerate the filenames based on the function "find_sequences" if update_mode == "sequence": non_seq_filenames_dict, seq_digit_length = my.find_sequences( filenames) # non_seq_filenames is a list of filenames that are stored in the None key, # which are the filenames that are not part of a sequence, or does not contain # a sequence pattern. non_seq_filenames = non_seq_filenames_dict[None] # delete the None key from list so filenames can be used in the latter for loop del non_seq_filenames_dict[None] filenames = non_seq_filenames_dict.keys() if filenames == []: raise TacticException( 'No sequences are found in files. Please follow the pattern of [filename] + [digits] + [file extension (optional)]. Examples: [abc_1001.png, abc_1002.png] [abc.1001.mp3, abc.1002.mp3] [abc_100_1001.png, abc_100_1002.png]' ) for count, filename in enumerate(filenames): # Check if files should be updated. # If so, attempt to find one to update. # If more than one is found, do not update. if update_mode in ["true", "True"]: # first see if this sobjects still exists search = Search(search_type) search.add_filter("name", filename) if relative_dir and search.column_exists("relative_dir"): search.add_filter("relative_dir", relative_dir) sobjects = search.get_sobjects() if len(sobjects) > 1: sobject = None elif len(sobjects) == 1: sobject = sobjects[0] else: sobject = None elif update_mode == "sequence": if not FileGroup.is_sequence(filename): raise TacticException( 'Please modify sequence naming to have at least three digits.' ) search = Search(search_type) search.add_filter("name", filename) if relative_dir and search.column_exists("relative_dir"): search.add_filter("relative_dir", relative_dir) sobjects = search.get_sobjects() if sobjects: sobject = sobjects[0] else: sobject = None else: sobject = None # Create a new file if not sobject: sobject = SearchType.create(search_type) sobject.set_value("name", filename) if relative_dir and sobject.column_exists("relative_dir"): sobject.set_value("relative_dir", relative_dir) # extract metadata #file_path = "%s/%s" % (base_dir, File.get_filesystem_name(filename)) if update_mode == "sequence": first_filename = non_seq_filenames_dict.get(filename)[0] last_filename = non_seq_filenames_dict.get(filename)[-1] file_path = "%s/%s" % (base_dir, first_filename) else: file_path = "%s/%s" % (base_dir, filename) # TEST: convert on upload try: convert = my.kwargs.get("convert") if convert: message_key = "IngestConvert001" cmd = ConvertCbk(**convert) cmd.execute() except Exception, e: print "WARNING: ", e if not os.path.exists(file_path): raise Exception("Path [%s] does not exist" % file_path) # get the metadata from this image if SearchType.column_exists(search_type, "relative_dir"): if category and category not in ['none', None]: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") if not date: date_str = "No-Date" else: date_str = str(date) # this can't be parsed correctly by dateutils parts = date_str.split(" ") date_str = parts[0].replace(":", "-") date_str = "%s %s" % (date_str, parts[1]) from dateutil import parser orig_date = parser.parse(date_str) if category == "by_day": date_str = orig_date.strftime("%Y/%Y-%m-%d") elif category == "by_month": date_str = orig_date.strftime("%Y-%m") elif category == "by_week": date_str = orig_date.strftime("%Y/Week-%U") full_relative_dir = "%s/%s" % (relative_dir, date_str) sobject.set_value("relative_dir", full_relative_dir) if parent_key: parent = Search.get_by_search_key(parent_key) if parent: sobject.set_sobject_value(sobject) for key, value in update_data.items(): if input_prefix: key = key.replace('%s|' % input_prefix, '') if SearchType.column_exists(search_type, key): if value: sobject.set_value(key, value) """ if SearchType.column_exists(search_type, "keywords"): if keywords: sobject.set_value("keywords", keywords) """ for key, value in extra_data.items(): if SearchType.column_exists(search_type, key): sobject.set_value(key, value) """ if category: if SearchType.column_exists(search_type, "category"): sobject.set_value("category", category) if SearchType.column_exists(search_type, "relative_dir"): full_relative_dir = "%s/%s" % (relative_dir, category) sobject.set_value("relative_dir", category) """ sobject.commit() search_key = sobject.get_search_key() # use API to check in file process = my.kwargs.get("process") if not process: process = "publish" if process == "icon": context = "icon" else: context = "%s/%s" % (process, filename.lower()) if update_mode == "sequence": pattern_expr = re.compile('^.*(\d{%d})\..*$' % seq_digit_length) m_first = re.match(pattern_expr, first_filename) m_last = re.match(pattern_expr, last_filename) # for files without extension # abc_1001, abc.1123_1001 if not m_first: no_ext_expr = re.compile('^.*(\d{%d})$' % seq_digit_length) m_first = re.match(no_ext_expr, first_filename) m_last = re.match(no_ext_expr, last_filename) # using second last index , to grab the set right before file type groups_first = m_first.groups() if groups_first: range_start = int(m_first.groups()[0]) groups_last = m_last.groups() if groups_last: range_end = int(m_last.groups()[0]) file_range = '%s-%s' % (range_start, range_end) file_path = "%s/%s" % (base_dir, filename) server.group_checkin(search_key, context, file_path, file_range, mode='uploaded') else: server.simple_checkin(search_key, context, filename, mode='uploaded') percent = int((float(count) + 1) / len(filenames) * 100) print "checking in: ", filename, percent msg = { 'progress': percent, 'description': 'Checking in file [%s]' % filename, } server.log_message(key, msg, status="in progress")