def get_info(my, dirname, basename): location = my.kwargs.get("location") # get some info about the file path = "%s/%s" % (dirname, basename) snapshot = my.snapshots.get(path) file_range = None if FileGroup.is_sequence(path) and snapshot: file_range = snapshot.get_file_range() #start_frame = file_range.get_frame_start() #end_frame = file_range.get_frame_end() if location == 'server': my.info = Common.get_dir_info(path, file_range=file_range) else: my.info = {} return my.info
def get_info(self, dirname, basename): location = self.kwargs.get("location") # get some info about the file path = "%s/%s" % (dirname, basename) snapshot = self.snapshots.get(path) file_range = None if FileGroup.is_sequence(path) and snapshot: file_range = snapshot.get_file_range() #start_frame = file_range.get_frame_start() #end_frame = file_range.get_frame_end() if location == 'server': self.info = Common.get_dir_info(path, file_range=file_range) else: self.info = {} return self.info
def execute(my): filenames = my.kwargs.get("filenames") upload_dir = Environment.get_upload_dir() base_dir = upload_dir update_mode = my.kwargs.get("update_mode") search_type = my.kwargs.get("search_type") key = my.kwargs.get("key") relative_dir = my.kwargs.get("relative_dir") if not relative_dir: project_code = Project.get_project_code() search_type_obj = SearchType.get(search_type) table = search_type_obj.get_table() relative_dir = "%s/%s" % (project_code, table) server = TacticServerStub.get() parent_key = my.kwargs.get("parent_key") category = my.kwargs.get("category") keywords = my.kwargs.get("keywords") update_data = my.kwargs.get("update_data") extra_data = my.kwargs.get("extra_data") if extra_data: extra_data = jsonloads(extra_data) else: extra_data = {} # TODO: use this to generate a category category_script_path = my.kwargs.get("category_script_path") """ ie: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") return date.split(" ")[0] """ if not SearchType.column_exists(search_type, "name"): raise TacticException( 'The Ingestion puts the file name into the name column which is the minimal requirement. Please first create a "name" column for this sType.' ) input_prefix = update_data.get('input_prefix') non_seq_filenames = [] # For sequence mode, take all filenames, and regenerate the filenames based on the function "find_sequences" if update_mode == "sequence": non_seq_filenames_dict, seq_digit_length = my.find_sequences( filenames) # non_seq_filenames is a list of filenames that are stored in the None key, # which are the filenames that are not part of a sequence, or does not contain # a sequence pattern. non_seq_filenames = non_seq_filenames_dict[None] # delete the None key from list so filenames can be used in the latter for loop del non_seq_filenames_dict[None] filenames = non_seq_filenames_dict.keys() if filenames == []: raise TacticException( 'No sequences are found in files. Please follow the pattern of [filename] + [digits] + [file extension (optional)]. Examples: [abc_1001.png, abc_1002.png] [abc.1001.mp3, abc.1002.mp3] [abc_100_1001.png, abc_100_1002.png]' ) for count, filename in enumerate(filenames): # Check if files should be updated. # If so, attempt to find one to update. # If more than one is found, do not update. if update_mode in ["true", "True"]: # first see if this sobjects still exists search = Search(search_type) search.add_filter("name", filename) if relative_dir and search.column_exists("relative_dir"): search.add_filter("relative_dir", relative_dir) sobjects = search.get_sobjects() if len(sobjects) > 1: sobject = None elif len(sobjects) == 1: sobject = sobjects[0] else: sobject = None elif update_mode == "sequence": if not FileGroup.is_sequence(filename): raise TacticException( 'Please modify sequence naming to have at least three digits.' ) search = Search(search_type) search.add_filter("name", filename) if relative_dir and search.column_exists("relative_dir"): search.add_filter("relative_dir", relative_dir) sobjects = search.get_sobjects() if sobjects: sobject = sobjects[0] else: sobject = None else: sobject = None # Create a new file if not sobject: sobject = SearchType.create(search_type) sobject.set_value("name", filename) if relative_dir and sobject.column_exists("relative_dir"): sobject.set_value("relative_dir", relative_dir) # extract metadata #file_path = "%s/%s" % (base_dir, File.get_filesystem_name(filename)) if update_mode == "sequence": first_filename = non_seq_filenames_dict.get(filename)[0] last_filename = non_seq_filenames_dict.get(filename)[-1] file_path = "%s/%s" % (base_dir, first_filename) else: file_path = "%s/%s" % (base_dir, filename) # TEST: convert on upload try: convert = my.kwargs.get("convert") if convert: message_key = "IngestConvert001" cmd = ConvertCbk(**convert) cmd.execute() except Exception, e: print "WARNING: ", e if not os.path.exists(file_path): raise Exception("Path [%s] does not exist" % file_path) # get the metadata from this image if SearchType.column_exists(search_type, "relative_dir"): if category and category not in ['none', None]: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") if not date: date_str = "No-Date" else: date_str = str(date) # this can't be parsed correctly by dateutils parts = date_str.split(" ") date_str = parts[0].replace(":", "-") date_str = "%s %s" % (date_str, parts[1]) from dateutil import parser orig_date = parser.parse(date_str) if category == "by_day": date_str = orig_date.strftime("%Y/%Y-%m-%d") elif category == "by_month": date_str = orig_date.strftime("%Y-%m") elif category == "by_week": date_str = orig_date.strftime("%Y/Week-%U") full_relative_dir = "%s/%s" % (relative_dir, date_str) sobject.set_value("relative_dir", full_relative_dir) if parent_key: parent = Search.get_by_search_key(parent_key) if parent: sobject.set_sobject_value(sobject) for key, value in update_data.items(): if input_prefix: key = key.replace('%s|' % input_prefix, '') if SearchType.column_exists(search_type, key): if value: sobject.set_value(key, value) """ if SearchType.column_exists(search_type, "keywords"): if keywords: sobject.set_value("keywords", keywords) """ for key, value in extra_data.items(): if SearchType.column_exists(search_type, key): sobject.set_value(key, value) """ if category: if SearchType.column_exists(search_type, "category"): sobject.set_value("category", category) if SearchType.column_exists(search_type, "relative_dir"): full_relative_dir = "%s/%s" % (relative_dir, category) sobject.set_value("relative_dir", category) """ sobject.commit() search_key = sobject.get_search_key() # use API to check in file process = my.kwargs.get("process") if not process: process = "publish" if process == "icon": context = "icon" else: context = "%s/%s" % (process, filename.lower()) if update_mode == "sequence": pattern_expr = re.compile('^.*(\d{%d})\..*$' % seq_digit_length) m_first = re.match(pattern_expr, first_filename) m_last = re.match(pattern_expr, last_filename) # for files without extension # abc_1001, abc.1123_1001 if not m_first: no_ext_expr = re.compile('^.*(\d{%d})$' % seq_digit_length) m_first = re.match(no_ext_expr, first_filename) m_last = re.match(no_ext_expr, last_filename) # using second last index , to grab the set right before file type groups_first = m_first.groups() if groups_first: range_start = int(m_first.groups()[0]) groups_last = m_last.groups() if groups_last: range_end = int(m_last.groups()[0]) file_range = '%s-%s' % (range_start, range_end) file_path = "%s/%s" % (base_dir, filename) server.group_checkin(search_key, context, file_path, file_range, mode='uploaded') else: server.simple_checkin(search_key, context, filename, mode='uploaded') percent = int((float(count) + 1) / len(filenames) * 100) print "checking in: ", filename, percent msg = { 'progress': percent, 'description': 'Checking in file [%s]' % filename, } server.log_message(key, msg, status="in progress")
def execute(my): filenames = my.kwargs.get("filenames") upload_dir = Environment.get_upload_dir() base_dir = upload_dir update_mode = my.kwargs.get("update_mode") search_type = my.kwargs.get("search_type") key = my.kwargs.get("key") relative_dir = my.kwargs.get("relative_dir") if not relative_dir: project_code = Project.get_project_code() search_type_obj = SearchType.get(search_type) table = search_type_obj.get_table() relative_dir = "%s/%s" % (project_code, table) server = TacticServerStub.get() parent_key = my.kwargs.get("parent_key") category = my.kwargs.get("category") keywords = my.kwargs.get("keywords") update_data = my.kwargs.get("update_data") extra_data = my.kwargs.get("extra_data") if extra_data: extra_data = jsonloads(extra_data) else: extra_data = {} # TODO: use this to generate a category category_script_path = my.kwargs.get("category_script_path") """ ie: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") return date.split(" ")[0] """ if not SearchType.column_exists(search_type, "name"): raise TacticException('The Ingestion puts the file name into the name column which is the minimal requirement. Please first create a "name" column for this sType.') input_prefix = update_data.get('input_prefix') non_seq_filenames = [] # For sequence mode, take all filenames, and regenerate the filenames based on the function "find_sequences" if update_mode == "sequence": non_seq_filenames_dict, seq_digit_length = my.find_sequences(filenames) # non_seq_filenames is a list of filenames that are stored in the None key, # which are the filenames that are not part of a sequence, or does not contain # a sequence pattern. non_seq_filenames = non_seq_filenames_dict[None] # delete the None key from list so filenames can be used in the latter for loop del non_seq_filenames_dict[None] filenames = non_seq_filenames_dict.keys() if filenames == []: raise TacticException('No sequences are found in files. Please follow the pattern of [filename] + [digits] + [file extension (optional)]. Examples: [abc_1001.png, abc_1002.png] [abc.1001.mp3, abc.1002.mp3] [abc_100_1001.png, abc_100_1002.png]') for count, filename in enumerate(filenames): # Check if files should be updated. # If so, attempt to find one to update. # If more than one is found, do not update. if update_mode in ["true", "True"]: # first see if this sobjects still exists search = Search(search_type) search.add_filter("name", filename) if relative_dir and search.column_exists("relative_dir"): search.add_filter("relative_dir", relative_dir) sobjects = search.get_sobjects() if len(sobjects) > 1: sobject = None elif len(sobjects) == 1: sobject = sobjects[0] else: sobject = None elif update_mode == "sequence": if not FileGroup.is_sequence(filename): raise TacticException('Please modify sequence naming to have at least three digits.') search = Search(search_type) search.add_filter("name", filename) if relative_dir and search.column_exists("relative_dir"): search.add_filter("relative_dir", relative_dir) sobjects = search.get_sobjects() if sobjects: sobject = sobjects[0] else: sobject = None else: sobject = None # Create a new file if not sobject: sobject = SearchType.create(search_type) sobject.set_value("name", filename) if relative_dir and sobject.column_exists("relative_dir"): sobject.set_value("relative_dir", relative_dir) # extract metadata #file_path = "%s/%s" % (base_dir, File.get_filesystem_name(filename)) if update_mode == "sequence": first_filename = non_seq_filenames_dict.get(filename)[0] last_filename = non_seq_filenames_dict.get(filename)[-1] file_path = "%s/%s" % (base_dir, first_filename) else: file_path = "%s/%s" % (base_dir, filename) # TEST: convert on upload try: convert = my.kwargs.get("convert") if convert: message_key = "IngestConvert001" cmd = ConvertCbk(**convert) cmd.execute() except Exception, e: print "WARNING: ", e if not os.path.exists(file_path): raise Exception("Path [%s] does not exist" % file_path) # get the metadata from this image if SearchType.column_exists(search_type, "relative_dir"): if category and category not in ['none', None]: from pyasm.checkin import ExifMetadataParser parser = ExifMetadataParser(path=file_path) tags = parser.get_metadata() date = tags.get("EXIF DateTimeOriginal") if not date: date_str = "No-Date" else: date_str = str(date) # this can't be parsed correctly by dateutils parts = date_str.split(" ") date_str = parts[0].replace(":", "-") date_str = "%s %s" % (date_str, parts[1]) from dateutil import parser orig_date = parser.parse(date_str) if category == "by_day": date_str = orig_date.strftime("%Y/%Y-%m-%d") elif category == "by_month": date_str = orig_date.strftime("%Y-%m") elif category == "by_week": date_str = orig_date.strftime("%Y/Week-%U") full_relative_dir = "%s/%s" % (relative_dir, date_str) sobject.set_value("relative_dir", full_relative_dir) if parent_key: parent = Search.get_by_search_key(parent_key) if parent: sobject.set_sobject_value(sobject) for key, value in update_data.items(): if input_prefix: key = key.replace('%s|'%input_prefix, '') if SearchType.column_exists(search_type, key): if value: sobject.set_value(key, value) """ if SearchType.column_exists(search_type, "keywords"): if keywords: sobject.set_value("keywords", keywords) """ for key, value in extra_data.items(): if SearchType.column_exists(search_type, key): sobject.set_value(key, value) """ if category: if SearchType.column_exists(search_type, "category"): sobject.set_value("category", category) if SearchType.column_exists(search_type, "relative_dir"): full_relative_dir = "%s/%s" % (relative_dir, category) sobject.set_value("relative_dir", category) """ sobject.commit() search_key = sobject.get_search_key() # use API to check in file process = my.kwargs.get("process") if not process: process = "publish" if process == "icon": context = "icon" else: context = "%s/%s" % (process, filename.lower()) if update_mode == "sequence": pattern_expr = re.compile('^.*(\d{%d})\..*$'%seq_digit_length) m_first = re.match(pattern_expr, first_filename) m_last = re.match(pattern_expr, last_filename) # for files without extension # abc_1001, abc.1123_1001 if not m_first: no_ext_expr = re.compile('^.*(\d{%d})$'%seq_digit_length) m_first = re.match(no_ext_expr, first_filename) m_last = re.match(no_ext_expr, last_filename) # using second last index , to grab the set right before file type groups_first = m_first.groups() if groups_first: range_start = int(m_first.groups()[0]) groups_last = m_last.groups() if groups_last: range_end = int(m_last.groups()[0]) file_range = '%s-%s' % (range_start, range_end) file_path = "%s/%s" % (base_dir, filename) server.group_checkin(search_key, context, file_path, file_range, mode='uploaded') else: server.simple_checkin(search_key, context, filename, mode='uploaded') percent = int((float(count)+1) / len(filenames)*100) print "checking in: ", filename, percent msg = { 'progress': percent, 'description': 'Checking in file [%s]' % filename, } server.log_message(key, msg, status="in progress")