def init_fs_with_entity(entity_type, drive_name="home", entity_name="test1"): fs = FileSystem() fs.create_drive(drive_name) new_entity = fs.create(entity_type, entity_name, drive_name) return fs, new_entity
def test_drive_cannot_be_moved(self): drive_name = "home" fs = FileSystem() fs.create_drive(drive_name) with self.assertRaises(IllegalFileSystemOperationException): fs.move(drive_name, f"new_{drive_name}")
def test_delete_drive(self): drive_name = "home" fs = FileSystem() fs.create_drive(drive_name) fs.delete(drive_name) self.assertEqual(len(fs._drives), 0)
def test_check_that_item_does_not_exist_and_parent_drive_does_not_exist(self): fs = FileSystem() self.assertFalse(fs.exists("home\\test_folder"))
def test_check_that_file_does_not_exist(self): fs = FileSystem() fs.create_drive("home") self.assertFalse(fs.exists("home\\test1.txt"))
def test_create_new_filesystem(self): FileSystem()
def test_cannot_move_non_existent_file(self): fs = FileSystem() with self.assertRaises(PathNotFoundException): fs.move("home\\first", "home\\second")
def computeResponse(self): """ Computes the response to be sent back to the browser """ string = "" # cookies = {'phpmyadmin': {'phpMyAdmin': "onesdfk", "expires": "Fri, # 25-May-2018 09:46:00 GMT", "Max-Age": "2592000", # "path": "/phk/jhkl/"}, # 'user-1': {"user-1": "Jesus", "path": "/path/about/", # "expires": "Fri, 25-May-2018 09:46:00 GMT"}} if self.requested_file == '': return '' # calculation of the data the we will be sending self.Files = FileSystem(self.parent_folder, self.host) self.Files.request_method = self.request_method self.Files.content_type = self.content_type self._extension = self.Files._file_extension self.Files.post_data = self.requested_body self.Files.cookies = self.cookies self.Files.cookie_str = self.cookie_str self.Files.user_agent_str = self.user_agent_str self.Files.search(self.requested_file) # All variables self.data = self.Files.data self._encoding = self.Files.encoding self._content_length = self.Files.contentLength self._extension = self.Files._file_extension self._contentType() self.send_headers['Content-Length'] = str(self._contentLength()) self.status_code = self.Files.status_code self.status_str = self.Files.status_str # status code string += self._status(self.status_str, self.status_code) # the actual date this whole event was completed string += self._date() # *** Coming from PHP *** self.send_headers.update(self.Files.additional_head_str) # Header calculator for header in self.send_headers: string += header string += ': ' string += self.send_headers[header] string += '\r\n' if header == 'X-Powered-By': string += self.add_set_cookies() # this kinda ends the response header string += '\r\n' # ---- if type(self.data) == str: total = bytes(string + self.data, self._encoding) else: total = bytes(string, self._encoding) + self.data return total
#!/usr/bin/env python3 # encoding: utf8 # # http://python3porting.com/problems.html ''' Core implementation of the encrypted git-like filesystem. ''' import logging from config import Config from fs import FileSystem from ccrypt import CCrypto logging.basicConfig() logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) if __name__ == '__main__': fs = FileSystem(CCrypto, Config()) fs.analyze(wd='tests')
def test_create_drive(self): fs = FileSystem() fs.create_drive("home") self.assertEqual(len(fs._drives), 1)
def test_cannot_create_drive_with_create(self): fs = FileSystem() with self.assertRaises(ValueError): fs.create(EntityType.DRIVE, "home", "")
# python psync.py source dest if __name__ == "__main__": source_root, dest_root = sys.argv[1:] # TODO: What else can we use for peerids when there are no peers? source_peerid = source_root dest_peerid = dest_root # TODO: implement reading .psync. source_groupids = Groupids({"": source_root}) dest_groupids = Groupids({"": dest_root}) conf = Config() clock = Clock() slog = StatusLog(clock) fs = FileSystem(slog) source_db_path = os.path.join(source_root, conf.db_path) dest_db_path = os.path.join(dest_root, conf.db_path) revisions_root = os.path.join(dest_root, conf.revisions_path) fs.create_parent_dirs(source_db_path) fs.create_parent_dirs(dest_db_path) with sqlite3.connect(source_db_path) as source_db: with sqlite3.connect(dest_db_path) as dest_db: source_history_store = HistoryStore(SqlDb(source_db), slog) dest_history_store = HistoryStore(SqlDb(dest_db), slog) revisions = RevisionStore(fs, revisions_root) merge_log = MergeLog(SqlDb(source_db), clock) source_history = scan_and_update_history(
class Header(): """ This handles the calculation of the header variables. Apart from the computeResponse function, every one of the functions are returning a string and only a string The computeResponse output should be a byte of-course """ def __init__(self, parent_folder, url): super.__self__ self.parent_folder = parent_folder self.host = url self.port = 0 self.content_type = "" self.request_method = '' self.requested_file = '' self.requested_body = '' self._encoding = 'UTF-8' self._extension = '' self.Files = FileSystem(self.parent_folder, self.host) self._content_length = 0 self.raw_headers = "" self.status_code = 0 self.status_str = "" self.headerPair = {} self.status_stat = { 200: 'OK', 300: 'NOT FOUND', 301: 'MOVED PERMANENTLY', 302: 'FOUND', 303: 'SEE OTHER', 304: 'NOT MODIFIED', 307: 'Temporary Redirect', 308: 'Permanent Redirect', 400: 'Bad Request', 401: 'Unathourized', 402: 'Payment Required', 403: 'Forbidden', 404: 'NOT FOUND', 405: 'Method Not Allowed', 406: 'Not Acceptable', 407: 'Proxy Authentication Required', 408: 'Request Timeout', 409: 'Conflict', 410: 'Gone', 411: 'Length Required', 412: 'Precondition Failed', 413: 'Payload Too Large', 414: 'URI Too Long', 415: 'Unsupported Media Type', 416: 'Requested Range Not Satisfiable', 417: 'Expectation Failed', 418: "I'm a teapot", 421: 'Misdirected Request', 422: 'Unprocessable Entity', 423: 'Locked', 424: 'Failed Dependency', 425: 'Too Early', 426: 'Upgrade Required', 428: 'Precondition Required', 429: 'Too Many Requests', 431: 'Request Header Fields Too Large', 451: 'Unavailable For Legal Reasons', 500: 'Internal Server Error', 501: 'Not Implemented', 502: 'Bad Gateway', 503: 'Service Unavailable', 504: 'Gateway Timeout', 505: 'HTTP Version Not Supported', 506: 'Variant Also Negotiates', 507: 'Insufficient Storage', 508: 'Loop Detected', 510: 'Not Extended', 511: 'Network Authentication Required' } self.send_headers = { 'Server': 'Peter (Python/3.7)', 'X-Frame-Options': 'SAMEORIGIN', 'Accept-Ranges': 'bytes', 'Content-Length': '0', 'Keep-Alive': 'timeout=5, max=99', 'Connection': 'Keep-Alive', 'Content-Type': 'text/html' } self.data = '' self._extMap = { 'html': 'text/html', 'htm': 'text/html', 'php': 'text/html', 'css': 'text/css', 'py': 'text/html', 'js': 'application/javascript', 'json': 'application/json', 'png': 'image/png', 'jpeg': 'image/jpeg', 'gif': 'image/gif', 'svg': 'image/svg+xml', 'tiff': 'image/tiff', 'aces': 'image/aces', 'avci': 'image/avci', 'avcs': 'image/avcs', 'bmp': 'image/bmp', 'cgm': 'image/cgm', 'dicom-rle': 'image/dicom-rle', 'emf': 'image/emf', 'example': 'image/example', 'fits': 'image/fits', 'g3fax': 'image/g3fax', 'heic': 'image/heic', 'heic-sequence': 'image/heic-sequence', 'heif': 'image/heif', 'heif-sequence': 'image/heif-sequence', 'hej2k': 'image/hej2k', 'hsj2': 'image/hsj2', 'ief': 'image/ief', 'jls': 'image/jls', 'jp2': 'image/jp2', 'jph': 'image/jph', 'jphc': 'image/jphc', 'jpm': 'image/jpm', 'jpx': 'image/jpx', 'jxr': 'image/jxr', 'jxrA': 'image/jxrA', 'jxrS': 'image/jxrS', 'jxs': 'image/jxs', 'jxsc': 'image/jxsc', 'jxsi': 'image/jxsi', 'jxss': 'image/jxss', 'ktx': 'image/ktx', 'naplps': 'image/naplps', 'prs.btif': 'image/prs.btif', 'prs.pti': 'image/prs.pti', 'pwg-raster': 'image/pwg-raster', 't38': 'image/t38', 'tiff-fx': 'image/tiff-fx', 'wmf': 'image/wmf', 'ico': 'image/ico' } self.functions = { 'Host': self._getHost, 'X-Powered-By': self._powered, 'User-Agent': self._getUserAgent, 'Content-Type': self._getContentType, 'Cookie': self._getCookies } self.cookies = {} self.cookie_str = "" self.user_agent_str = "" def computeResponse(self): """ Computes the response to be sent back to the browser """ string = "" # cookies = {'phpmyadmin': {'phpMyAdmin': "onesdfk", "expires": "Fri, # 25-May-2018 09:46:00 GMT", "Max-Age": "2592000", # "path": "/phk/jhkl/"}, # 'user-1': {"user-1": "Jesus", "path": "/path/about/", # "expires": "Fri, 25-May-2018 09:46:00 GMT"}} if self.requested_file == '': return '' # calculation of the data the we will be sending self.Files = FileSystem(self.parent_folder, self.host) self.Files.request_method = self.request_method self.Files.content_type = self.content_type self._extension = self.Files._file_extension self.Files.post_data = self.requested_body self.Files.cookies = self.cookies self.Files.cookie_str = self.cookie_str self.Files.user_agent_str = self.user_agent_str self.Files.search(self.requested_file) # All variables self.data = self.Files.data self._encoding = self.Files.encoding self._content_length = self.Files.contentLength self._extension = self.Files._file_extension self._contentType() self.send_headers['Content-Length'] = str(self._contentLength()) self.status_code = self.Files.status_code self.status_str = self.Files.status_str # status code string += self._status(self.status_str, self.status_code) # the actual date this whole event was completed string += self._date() # *** Coming from PHP *** self.send_headers.update(self.Files.additional_head_str) # Header calculator for header in self.send_headers: string += header string += ': ' string += self.send_headers[header] string += '\r\n' if header == 'X-Powered-By': string += self.add_set_cookies() # this kinda ends the response header string += '\r\n' # ---- if type(self.data) == str: total = bytes(string + self.data, self._encoding) else: total = bytes(string, self._encoding) + self.data return total def getRequest(self, header): """ Breaks the req header down to key value pairs and then send them to be processed by their corresponding functions. """ if header == '': return 1 # break splited = header.split(b'\r\n\r\n', 1) # This is the request body that came # if it was a post we will use it self.raw_headers = str(splited[0], 'ascii') if len(splited) > 1: self.requested_body = splited[-1] else: self.requested_body = '' # clear it to save ram splited.clear() # Break into individual lines lines = self.raw_headers.split('\r\n') # This the request either get or post # It does not follow the pairing protocol of the rest self._getFile(lines[0]) # Break into key-value pairs for pair in lines: # for now we are breaking with ': ' to escpace # the port no. eg. localhost':'9999 splits = pair.split(": ") # if it was a key-value pair if len(splits) > 1: # make it a part of the header pair dict self.headerPair[splits[0]] = splits[1] # loop through the functions we have set and declared for func in self.functions: # key exist in the headers that was sent by client if func in self.headerPair: # find its corresponding function and set it to a new variable function = self.functions[func] # run the function with the self and required values # This means, every corresponding function must strictly # accept a single value function(self.headerPair[func]) def _getContentType(self, content_type_str): self.content_type = content_type_str def _getHost(self, hostname_str): """ Gets Host and its port """ # for now just put everything as hostname # later we break it self.host = hostname_str def _getUserAgent(self, user_agent_str): self.user_agent_str = user_agent_str def _getFile(self, req_str): """ """ # strip the http protocol off parsed = req_str[:-8] # Strip it by space to avoid escaping the forward-slash # There will be three entries # the last will just be empty splits = parsed.split(' ') # the request method (eg. GET or POST) self.request_method = splits[0] # to avoid an index error if len(splits) > 1: # the file requested for self.requested_file = splits[1] def _getCookies(self, cookie_str): """ Breaks the cookie string into individual cookies And store them """ # split them in main entries self.cookie_str = cookie_str splits = cookie_str.split('; ') for pair in splits: # split into key-value pairs pairs = pair.split('=') # Add the key-value pairs to the cookies variable self.cookies[pairs[0]] = pairs[1] def _status(self, status=None, digit=None): string = 'HTTP/1.1 ' if status: string += status + "\r\n" return string elif digit: string += str(digit) + " " + self.status_stat[digit] + "\r\n" return string def _date(self): string_time = "Date: " string_time += time.strftime('%a, %d %b %Y %H:%M:%S %Z') return string_time + "\r\n" def _contentLength(self): if self._content_length: return self._content_length else: if type(self.data) == bytes: self._content_length = len(self.data) else: ddata = bytes(self.data, self._encoding) # len of data from outside self._content_length = len(ddata) """string = 'Content-Length: ' # Now we are just continuing with the content length string if 'Content-Disposition' in self.send_headers: string = '' else: string += str(self._content_length) + '\r\n'""" return self._content_length def _cookie(self, cookies=None): # set string to empty string = "" if cookies: # cookie's name in cookies multi-dimensional array for name in cookies: string += "Set-Cookie: " # set actual cookie as a "cookie": {} cookie = cookies[name] # each value that has been listed for val in cookie: string += val + "=" + str(cookie[val]) + "; " # add the httponly string += "HttpOnly\r\n" return string def _powered(self, statement): string = 'X-Powered-By: ' + statement return string + "\r\n" def add_set_cookies(self): string = "" for l in self.Files.additional_set_cookie: string += l + "\r\n" return string def _contentType(self): # If content type has already been set if self.Files.mime_type: self.send_headers['Content-Type'] = self.Files.mime_type if self.send_headers['Content-Type'] == 'text/html': self.send_headers[ 'Content-Type'] += '; charset=' + self._encoding # find the extension in the extension map elif self._extension in self._extMap: # add the corresponding format to the string self.send_headers['Content-Type'] = self._extMap[self._extension] # if its a css file elif self._extension in ['css', 'js']: pass elif 'image' in self._extMap[self._extension]: pass else: # it is not a css file self.send_headers['Content-Type'] += '; charset=' + self._encoding return
def __init__(self, parent_folder, url): super.__self__ self.parent_folder = parent_folder self.host = url self.port = 0 self.content_type = "" self.request_method = '' self.requested_file = '' self.requested_body = '' self._encoding = 'UTF-8' self._extension = '' self.Files = FileSystem(self.parent_folder, self.host) self._content_length = 0 self.raw_headers = "" self.status_code = 0 self.status_str = "" self.headerPair = {} self.status_stat = { 200: 'OK', 300: 'NOT FOUND', 301: 'MOVED PERMANENTLY', 302: 'FOUND', 303: 'SEE OTHER', 304: 'NOT MODIFIED', 307: 'Temporary Redirect', 308: 'Permanent Redirect', 400: 'Bad Request', 401: 'Unathourized', 402: 'Payment Required', 403: 'Forbidden', 404: 'NOT FOUND', 405: 'Method Not Allowed', 406: 'Not Acceptable', 407: 'Proxy Authentication Required', 408: 'Request Timeout', 409: 'Conflict', 410: 'Gone', 411: 'Length Required', 412: 'Precondition Failed', 413: 'Payload Too Large', 414: 'URI Too Long', 415: 'Unsupported Media Type', 416: 'Requested Range Not Satisfiable', 417: 'Expectation Failed', 418: "I'm a teapot", 421: 'Misdirected Request', 422: 'Unprocessable Entity', 423: 'Locked', 424: 'Failed Dependency', 425: 'Too Early', 426: 'Upgrade Required', 428: 'Precondition Required', 429: 'Too Many Requests', 431: 'Request Header Fields Too Large', 451: 'Unavailable For Legal Reasons', 500: 'Internal Server Error', 501: 'Not Implemented', 502: 'Bad Gateway', 503: 'Service Unavailable', 504: 'Gateway Timeout', 505: 'HTTP Version Not Supported', 506: 'Variant Also Negotiates', 507: 'Insufficient Storage', 508: 'Loop Detected', 510: 'Not Extended', 511: 'Network Authentication Required' } self.send_headers = { 'Server': 'Peter (Python/3.7)', 'X-Frame-Options': 'SAMEORIGIN', 'Accept-Ranges': 'bytes', 'Content-Length': '0', 'Keep-Alive': 'timeout=5, max=99', 'Connection': 'Keep-Alive', 'Content-Type': 'text/html' } self.data = '' self._extMap = { 'html': 'text/html', 'htm': 'text/html', 'php': 'text/html', 'css': 'text/css', 'py': 'text/html', 'js': 'application/javascript', 'json': 'application/json', 'png': 'image/png', 'jpeg': 'image/jpeg', 'gif': 'image/gif', 'svg': 'image/svg+xml', 'tiff': 'image/tiff', 'aces': 'image/aces', 'avci': 'image/avci', 'avcs': 'image/avcs', 'bmp': 'image/bmp', 'cgm': 'image/cgm', 'dicom-rle': 'image/dicom-rle', 'emf': 'image/emf', 'example': 'image/example', 'fits': 'image/fits', 'g3fax': 'image/g3fax', 'heic': 'image/heic', 'heic-sequence': 'image/heic-sequence', 'heif': 'image/heif', 'heif-sequence': 'image/heif-sequence', 'hej2k': 'image/hej2k', 'hsj2': 'image/hsj2', 'ief': 'image/ief', 'jls': 'image/jls', 'jp2': 'image/jp2', 'jph': 'image/jph', 'jphc': 'image/jphc', 'jpm': 'image/jpm', 'jpx': 'image/jpx', 'jxr': 'image/jxr', 'jxrA': 'image/jxrA', 'jxrS': 'image/jxrS', 'jxs': 'image/jxs', 'jxsc': 'image/jxsc', 'jxsi': 'image/jxsi', 'jxss': 'image/jxss', 'ktx': 'image/ktx', 'naplps': 'image/naplps', 'prs.btif': 'image/prs.btif', 'prs.pti': 'image/prs.pti', 'pwg-raster': 'image/pwg-raster', 't38': 'image/t38', 'tiff-fx': 'image/tiff-fx', 'wmf': 'image/wmf', 'ico': 'image/ico' } self.functions = { 'Host': self._getHost, 'X-Powered-By': self._powered, 'User-Agent': self._getUserAgent, 'Content-Type': self._getContentType, 'Cookie': self._getCookies } self.cookies = {} self.cookie_str = "" self.user_agent_str = ""
def test_check_that_parent_of_file_does_not_exist(self): fs = FileSystem() self.assertFalse(fs.exists("home\\test_folder\\test1.txt"))
def test_cannot_create_entity_if_no_drives_exist(self): fs = FileSystem() with self.assertRaises(PathNotFoundException): fs.create(EntityType.ZIP_FILE, "file.zip", "")
def test_cannot_create_file_if_parent_does_not_exist(self): fs = FileSystem() fs.create_drive("home") with self.assertRaises(PathNotFoundException): fs.create(EntityType.FILE, "file1.txt", "home\\does_not_exist")
def test_cannnot_create_drives_with_same_name(self): fs = FileSystem() fs.create_drive("home") with self.assertRaises(PathAlreadyExistsException): fs.create_drive("home")
def test_cannot_delete_non_existent_file(self): fs = FileSystem() with self.assertRaises(PathNotFoundException): fs.delete("home\\test")
if __name__ == "__main__": source_root, dest_root = sys.argv[1:] # TODO: What else can we use for peerids when there are no peers? source_peerid = source_root dest_peerid = dest_root # TODO: implement reading .psync. source_groupids = Groupids({"": source_root}) dest_groupids = Groupids({"": dest_root}) conf = Config() clock = Clock() slog = StatusLog(clock) fs = FileSystem(slog) source_db_path = os.path.join(source_root, conf.db_path) dest_db_path = os.path.join(dest_root, conf.db_path) revisions_root = os.path.join(dest_root, conf.revisions_path) fs.create_parent_dirs(source_db_path) fs.create_parent_dirs(dest_db_path) with sqlite3.connect(source_db_path) as source_db: with sqlite3.connect(dest_db_path) as dest_db: source_history_store = HistoryStore(SqlDb(source_db), slog) dest_history_store = HistoryStore(SqlDb(dest_db), slog) revisions = RevisionStore(fs, revisions_root) merge_log = MergeLog(SqlDb(source_db), clock) source_history = scan_and_update_history(
def import_photos(iphoto_dir, shotwell_db, photos_dir, force_copy): _log.debug("Arguments") _log.debug("\t- iPhoto dir : %s", iphoto_dir) _log.debug("\t- Shotwell db : %s", shotwell_db) _log.debug("\t- Shotwell dir : %s", photos_dir) _log.debug("\t- force copy : %s", force_copy) fs = FileSystem(force_copy) # Sanity check the iPhoto dir and Shotwell DB. _log.debug("Performing sanity checks on iPhoto and Shotwell DBs.") now = int(time.time()) album_data_filename = join_path(iphoto_dir, "AlbumData.xml") if not os.path.exists(album_data_filename): _log.error("Failed to find expected file inside iPhoto library: %s", album_data_filename) sys.exit(1) if not os.path.exists(shotwell_db): _log.error("Shotwell DB not found at %s", shotwell_db) sys.exit(2) db = sqlite3.connect(shotwell_db) #@UndefinedVariable backingPhotoTable = BackingPhotoTable(db) with db: cursor = db.execute("SELECT schema_version from VersionTable;") schema_version = cursor.fetchone()[0] if schema_version not in SUPPORTED_SHOTWELL_SCHEMAS: _log.error( "Shotwell DB uses unsupported schema version %s. " "Giving up, just to be safe.", schema_version) sys.exit(3) _log.debug("Sanity checks passed.") # Back up the Shotwell DB. fmt_now = time.strftime('%Y-%m-%d_%H%M%S') db_backup = "%s.iphotobak_%s" % (shotwell_db, fmt_now) _log.debug("Backing up shotwell DB to %s", db_backup) shutil.copy(shotwell_db, db_backup) _log.debug("Backup complete") # Load and parse the iPhoto DB. _log.debug( "Loading the iPhoto library file. Might take a while for a large DB!" ) album_data = plistlib.readPlist(album_data_filename) _log.debug("Finished loading the iPhoto library.") path_prefix = album_data["Archive Path"] def fix_prefix(path, new_prefix=iphoto_dir): if path: if path[:len(path_prefix)] != path_prefix: raise AssertionError("Path %s didn't begin with %s" % (path, path_prefix)) path = path[len(path_prefix):] path = join_path(new_prefix, path.strip(os.path.sep)) return path photos = {} # Map from photo ID to photo info. copy_queue = [] # id = 224 # filename = /home/shaun/Pictures/Photos/2008/03/24/DSCN2416 (Modified (2)).JPG # width = 1600 # height = 1200 # filesize = 480914 # timestamp = 1348718403 # exposure_time = 1206392706 # orientation = 1 #original_orientation = 1 # import_id = 1348941635 # event_id = 3 # transformations = # md5 = 3ca3cf05312d0c1a4c141bb582fc43d0 # thumbnail_md5 = # exif_md5 = cec27a47c34c89f571c0fd4e9eb4a9fe # time_created = 1348941635 # flags = 0 # rating = 0 # file_format = 0 # title = # backlinks = # time_reimported = # editable_id = 1 # metadata_dirty = 1 # developer = SHOTWELL # develop_shotwell_id = -1 # develop_camera_id = -1 # develop_embedded_id = -1 skipped = [] for key, i_photo in album_data["Master Image List"].items(): mod_image_path = fix_prefix(i_photo.get("ImagePath", None)) orig_image_path = fix_prefix(i_photo.get("OriginalPath", None)) new_mod_path = fix_prefix(i_photo.get("ImagePath"), new_prefix=photos_dir) new_orig_path = fix_prefix(i_photo.get("OriginalPath", None), new_prefix=photos_dir) if not orig_image_path or not os.path.exists(mod_image_path): orig_image_path = mod_image_path new_orig_path = new_mod_path new_mod_path = None mod_image_path = None mod_file_size = None else: mod_file_size = os.path.getsize(mod_image_path) if not os.path.exists(orig_image_path): _log.error("Original file not found %s", orig_image_path) skipped.append(orig_image_path) continue copy_queue.append((orig_image_path, new_orig_path)) if mod_image_path: copy_queue.append((mod_image_path, new_mod_path)) mime, _ = mimetypes.guess_type(orig_image_path) sys.stdout.write('.') sys.stdout.flush() if mime not in ("image/jpeg", "image/png", "image/x-ms-bmp", "image/tiff"): print _log.error("Skipping %s, it's not an image, it's a %s", orig_image_path, mime) skipped.append(orig_image_path) continue caption = i_photo.get("Caption", "") img = Image.open(orig_image_path) w, h = img.size md5 = fs.md5_for_file(orig_image_path) orig_timestamp = int(os.path.getmtime(orig_image_path)) mod_w, mod_h, mod_md5, mod_timestamp = None, None, None, None if mod_image_path: try: mod_img = Image.open(mod_image_path) except Exception: _log.error("Failed to open modified image %s, skipping", mod_image_path) orig_image_path = mod_image_path new_orig_path = new_mod_path new_mod_path = None mod_image_path = None mod_file_size = None else: mod_w, mod_h = mod_img.size mod_md5 = fs.md5_for_file(mod_image_path) mod_timestamp = int(os.path.getmtime(mod_image_path)) file_format = FILE_FORMAT.get(mime, -1) if file_format == -1: raise Exception("Unknown image type %s" % mime) photo = { "orig_image_path": orig_image_path, "mod_image_path": mod_image_path, "new_mod_path": new_mod_path, "new_orig_path": new_orig_path, "orig_file_size": os.path.getsize(orig_image_path), "mod_file_size": mod_file_size, "mod_timestamp": mod_timestamp, "orig_timestamp": orig_timestamp, "caption": caption, "rating": i_photo["Rating"], "event": i_photo["Roll"], "orig_exposure_time": int(parse_date(i_photo["DateAsTimerInterval"])), "width": w, "height": h, "mod_width": mod_w, "mod_height": mod_h, "orig_md5": md5, "mod_md5": md5, "file_format": file_format, "time_created": now, "import_id": now, } # May be it's available in previous versions if schema_version >= 20: photo['comment'] = i_photo["Comment"] def read_metadata(path, photo, prefix="orig_"): photo[prefix + "orientation"] = 1 photo[prefix + "original_orientation"] = 1 try: meta = ImageMetadata(path) meta.read() try: photo[prefix + "orientation"] = meta[ "Exif.Image.Orientation"].value photo[prefix + "original_orientation"] = meta[ "Exif.Image.Orientation"].value except KeyError: print _log.debug("Failed to read the orientation from %s" % path) exposure_dt = meta["Exif.Image.DateTime"].value photo[prefix + "exposure_time"] = exif_datetime_to_time(exposure_dt) except KeyError: pass except Exception: print _log.exception("Failed to read date from %s", path) raise try: read_metadata(orig_image_path, photo, "orig_") photo["orientation"] = photo["orig_orientation"] if mod_image_path: read_metadata(mod_image_path, photo, "mod_") photo["orientation"] = photo["mod_orientation"] except Exception: _log.error("**** Skipping %s" % orig_image_path) skipped.append(orig_image_path) continue photos[key] = photo events = {} for event in album_data["List of Rolls"]: key = event["RollID"] events[key] = { "date": parse_date(event["RollDateAsTimerInterval"]), "key_photo": event["KeyPhotoKey"], "photos": event["KeyList"], "name": event["RollName"] } for photo_key in event["KeyList"]: assert photo_key not in photos or photos[photo_key][ "event"] == key # Insert into the Shotwell DB. for _, event in events.items(): c = db.execute( """ INSERT INTO EventTable (time_created, name) VALUES (?, ?) """, (event["date"], event["name"])) assert c.lastrowid is not None event["row_id"] = c.lastrowid for photo_key in event["photos"]: if photo_key in photos: photos[photo_key]["event_id"] = event["row_id"] for key, photo in photos.items(): if "event_id" not in photo: _log.error("Photo didn't have an event: %s", photo) skipped.append(photo["orig_image_path"]) continue editable_id = -1 if photo["mod_image_path"] is not None: # This photo has a backing image editable_id = backingPhotoTable.insert(photo) photo["editable_id"] = editable_id try: c = db.execute( """ INSERT INTO PhotoTable (filename, width, height, filesize, timestamp, exposure_time, orientation, original_orientation, import_id, event_id, md5, time_created, flags, rating, file_format, title, editable_id, metadata_dirty, developer, develop_shotwell_id, develop_camera_id, develop_embedded_id, comment) VALUES (:new_orig_path, :width, :height, :orig_file_size, :orig_timestamp, :orig_exposure_time, :orientation, :orig_original_orientation, :import_id, :event_id, :orig_md5, :time_created, 0, :rating, :file_format, :caption, :editable_id, 1, 'SHOTWELL', -1, -1, -1, :comment); """, photo) except Exception: _log.exception("Failed to insert photo %s" % photo) raise print >> sys.stderr, "Skipped importing these files:\n", "\n".join( skipped) print >> sys.stderr, "%s file skipped (they will still be copied)" % len( skipped) for src, dst in copy_queue: fs.safe_link_file(src, dst) db.commit()