def index_content(root_dir, file_types, content_type): """ Scan the media directory, creating an index of file properties for display and serving """ logger.debug('indexing') hasher = sha1() content_dir = os.path.join(root_dir, app.config['CONTENT_DIR']) files = file_paths(filtered_walk( content_dir, included_files=file_types, )) for contentfile in files: rel_path = os.path.relpath(contentfile, root_dir) filepath = os.path.join(root_dir, rel_path) filename = os.path.split(contentfile)[1] local_path = os.path.relpath(filepath, root_dir) if os.path.exists(os.path.join(filepath, 'folder.jpg')): img = os.path.join(filepath, 'folder.jpg') else: img = '' hasher.update(local_path) file_key = hasher.hexdigest() tags = _get_tags(filepath) media = Media() media.type = content_type media.path = filepath media.filename = filename media.file_id = file_key media.tags = tags media.img = img media.type = content_type media.save()
def index_content(root_dir, file_types, content_type): """ Scan the media directory, creating an index of file properties for display and serving """ logger.debug('indexing') hasher = sha1() content_dir = os.path.join(root_dir, app.config['CONTENT_DIR']) files = file_paths(filtered_walk(content_dir, included_files=file_types, )) for contentfile in files: rel_path = os.path.relpath(contentfile, root_dir) filepath = os.path.join(root_dir, rel_path) filename = os.path.split(contentfile)[1] local_path = os.path.relpath(filepath, root_dir) if os.path.exists(os.path.join(filepath, 'folder.jpg')): img = os.path.join(filepath, 'folder.jpg') else: img = '' hasher.update(local_path) file_key = hasher.hexdigest() tags = _get_tags(filepath) media = Media() media.type = content_type media.path = filepath media.filename = filename media.file_id = file_key media.tags = tags media.img = img media.type = content_type media.save()
def mediamgnt(filename=None): if request.method == "GET": if "fileid" not in request.args: abort(400) else: media = Media.get_by_id(request.args["fileid"]) if not media or media.filename != filename: abort(404) return send_from_directory( current_app.config["UPLOAD_FOLDER"], media.local_filename ) elif request.method == "POST": f = request.files["files[]"] if f: filename = f.filename version = Media.get_version(filename) local_filename = Media.new_local_filename(filename, version) filepath = os.path.join( current_app.config['UPLOAD_FOLDER'], local_filename) f.save(filepath) filesize = os.stat(filepath).st_size now = datetime.now() hashstr = local_filename + now.strftime("%Y-%m-%d %H:%M:%S") hashstr = hashstr.encode("utf8") media = Media( fileid=hashlib.sha256(hashstr).hexdigest(), filename=filename, version=Media.get_version(filename), content_type=f.content_type, size=filesize, create_time=now, display=True ) media.save() return json.dumps( {"files": []}) elif request.method == "DELETE": removelist = request.json for eachfile in removelist: fileid = eachfile["fileid"] filename = eachfile["filename"] onemedia = Media.get_by_id(fileid) if onemedia.filename != filename: continue onemedia.delete() return jsonify( success=True, message="success")
def medias(filename=None): if request.method == "GET": media = Media.get_media(filename=filename) if not media: abort(404) return send_from_directory(current_app.config["UPLOAD_FOLDER"], media.filename) elif request.method == "POST": f = request.files["files[]"] if not f: return filename = f.filename # save file to local folder, if file exists, delete it filepath = os.path.join(current_app.config['UPLOAD_FOLDER'], filename) if os.path.exists(filepath): os.remove(filepath) f.save(filepath) filesize = os.stat(filepath).st_size now = datetime.now() # if file with same name exists, replace it media = Media.get_one(Media.filename == filename) if not media: media = Media(filename=filename) media.fileid = hashlib.sha256(filename.encode("utf-8")).hexdigest() media.size = filesize media.content_type = f.content_type media.size = filesize media.create_time = now media.save() return jsonify({"files": []}) elif request.method == "DELETE": removelist = request.json for eachfile in removelist: fileid = eachfile["fileid"] filename = eachfile["filename"] onemedia = Media.get_one(Media.fileid == fileid) if onemedia.filename != filename: continue # remove file from folder try: os.remove( onemedia.filepath(current_app.config["UPLOAD_FOLDER"])) except Exception: pass # remove file from database onemedia.delete_instance() return jsonify(success=True, message="success")
def setUp(self): # set TEMPLATE_DEBUG to True to ensure {% include %} will raise # exceptions since that is how inlines are rendered and #9498 will # bubble up if it is an issue. self.original_template_debug = settings.TEMPLATE_DEBUG settings.TEMPLATE_DEBUG = True self.client.login(username='******', password='******') # Can't load content via a fixture (since the GenericForeignKey # relies on content type IDs, which will vary depending on what # other tests have been run), thus we do it here. e = Episode.objects.create(name='This Week in Django') m = Media(content_object=e, url='http://example.com/podcast.mp3') m.save()
def setUp(self): # set TEMPLATE_DEBUG to True to ensure {% include %} will raise # exceptions since that is how inlines are rendered and #9498 will # bubble up if it is an issue. self.original_template_debug = settings.TEMPLATE_DEBUG settings.TEMPLATE_DEBUG = True self.client.login(username='******', password='******') # Can't load content via a fixture (since the GenericForeignKey # relies on content type IDs, which will vary depending on what # other tests have been run), thus we do it here. e = Episode.objects.create(name='This Week in Django') m = Media(content_object=e, url='http://example.com/podcast.mp3') m.save()
def job_finished_cb(patero, job): tmp = [] for filename in job['output']['files']: copy_or_link(filename, common.output_dir) os.unlink(filename) tmp.append( os.path.join(common.output_dir, os.path.basename(filename))) job['output']['files'] = tmp job.save() # here tell Caspa the file is ready media = Media() for key in ['metadata', 'stat']: media.update(job['output'][key]) media['_id'] = job['output']['checksum'] media['checksum'] = job['output']['checksum'] media['files'] = job['output']['files'] media['file'] = job['output']['transcoded'] media['stat'] = job['output']['stat'] media.save()
def import_blog(): f = request.files["file"] try: data = f.stream.read().decode("utf-8") data = json.loads(data) links = data.pop("links", []) medias = data.pop("medias", []) posts = data.pop("posts", []) for link in links: new_link = Link.get_by_href(link["href"]) if new_link: continue else: new_link = Link() for item in link: new_link.__dict__[item] = link[item] new_link.link_id = None new_link.create_time = \ datetime.fromtimestamp(new_link.create_time) new_link.save() for media in medias: new_media = Media.get_by_fileid(media["fileid"]) if new_media: continue else: new_media = Media() for item in media: new_media.__dict__[item] = media[item] # Notice, media id should not be set to None new_media.media_id = None new_media.create_time = \ datetime.fromtimestamp(new_media.create_time) new_media.save() for post in posts: # If posts exist, continue new_post = Post.get_by_url(post["url"], public_only=False) if new_post: continue else: new_post = Post() for item in post: new_post.__dict__[item] = post[item] new_post.post_id = None new_post.create_time = \ datetime.fromtimestamp(new_post.create_time) new_post.update_time = \ datetime.fromtimestamp(new_post.update_time) new_post.raw_content = re.sub('<[^<]+?>', "", new_post.content) newtags = new_post.tags new_post.tags = "" new_post.update_tags(newtags) new_post.save() # Restore all posts comments = post["commentlist"] for comment in comments: new_comment = Comment() for item in comment: new_comment.__dict__[item] = comment[item] new_comment.post_id = new_post.post_id new_comment.comment_id = None new_comment.create_time = \ datetime.fromtimestamp(new_comment.create_time) new_comment.save() except Exception as e: return str(e) return "Done"