def generate_key(*args, **kwargs): """Helper function to generate the md5 hash of all the passed in args.""" md5 = hashlib.md5() for data in args: md5.update(to_bytes(data)) for key, value in kwargs.items(): md5.update(to_bytes(key) + to_bytes(value)) return md5.hexdigest()
def import_content(srcdir, destdir, meta_filenames, fsal, notifications, notifications_db): """Discover content directories under ``srcdir`` using the first generation folder structure and copy them into ``destdir``, while dropping the old nested structure and putting them into a single folder which name is generated from the slugified title of the content.""" srcdir = os.path.abspath(srcdir) if not os.path.exists(srcdir): logging.info(u"Content directory: {0} does not exist.".format(srcdir)) return logging.info(u"Starting content import of {0}".format(srcdir)) added = 0 for src_path in find_content_dirs(srcdir, meta_filenames): meta = read_meta(src_path, meta_filenames) if not meta: logging.error(u"Content import of {0} skipped. No valid metadata " "was found.".format(src_path)) continue # metadata couldn't be found or read, skip this item # process and save the found metadata upgrade_meta(meta) meta_path = os.path.join(src_path, meta_filenames[0]) with open(meta_path, 'w') as meta_file: json.dump(meta, meta_file) # delete any other meta files delete_old_meta(src_path, meta_filenames) # move content folder into library title = to_unicode( to_bytes( safe_title(meta['title']) or safe_title(meta['url']) or get_random_title())[:MAX_TITLE_LENGTH]) match = FIRST_CHAR.search(title) first_letter = (match.group() if match else None) or title[0] dest_path = os.path.join(destdir, first_letter.upper(), title) if not fsal.exists(dest_path, unindexed=True): (success, error) = fsal.transfer(src_path, dest_path) if not success: logging.error(u"Content import of {0} failed with " "{1}".format(src_path, error)) continue # adding to database will happen when we're notified by fsal about # the event added += 1 success_msg = "{0} content items imported from {1}.".format(added, srcdir) logging.info(success_msg) notifications.send(success_msg, db=notifications_db)
def import_content(srcdir, destdir, meta_filenames, fsal, notifications, notifications_db): """Discover content directories under ``srcdir`` using the first generation folder structure and copy them into ``destdir``, while dropping the old nested structure and putting them into a single folder which name is generated from the slugified title of the content.""" srcdir = os.path.abspath(srcdir) if not os.path.exists(srcdir): logging.info(u"Content directory: {0} does not exist.".format(srcdir)) return logging.info(u"Starting content import of {0}".format(srcdir)) added = 0 for src_path in find_content_dirs(srcdir, meta_filenames): meta = read_meta(src_path, meta_filenames) if not meta: logging.error(u"Content import of {0} skipped. No valid metadata " "was found.".format(src_path)) continue # metadata couldn't be found or read, skip this item # process and save the found metadata upgrade_meta(meta) meta_path = os.path.join(src_path, meta_filenames[0]) with open(meta_path, 'w') as meta_file: json.dump(meta, meta_file) # delete any other meta files delete_old_meta(src_path, meta_filenames) # move content folder into library title = to_unicode(to_bytes(safe_title(meta['title']) or safe_title(meta['url']) or get_random_title())[:MAX_TITLE_LENGTH]) match = FIRST_CHAR.search(title) first_letter = (match.group() if match else None) or title[0] dest_path = os.path.join(destdir, first_letter.upper(), title) if not fsal.exists(dest_path, unindexed=True): (success, error) = fsal.transfer(src_path, dest_path) if not success: logging.error(u"Content import of {0} failed with " "{1}".format(src_path, error)) continue # adding to database will happen when we're notified by fsal about # the event added += 1 success_msg = "{0} content items imported from {1}.".format(added, srcdir) logging.info(success_msg) notifications.send(success_msg, db=notifications_db)
def test_to_bytes(): assert mod.to_bytes('foobar', 'latin1') == b'foobar'
def preprocess_path(self, value): return hashlib.md5(to_bytes(value)).hexdigest()
def get_cache_key(cls, path): generated = generate_key(path) return to_bytes(cls.CACHE_KEY_TEMPLATE.format(generated))
def action_map(cls): """ Return JSON serialized form of :py:attr:`~OverlayForm.ACTIONS` """ actions = dict((k, to_bytes(v)) for (k, v) in cls.ACTIONS) return json.dumps(actions)
def test_to_bytes(): assert mod.to_bytes("foobar", "latin1") == b"foobar"