def fetch_sub(location, directory=None, token=None): """Filename is the full path of the movie file dirname is the full path of the movie dir, fetch_sub will search for video file diretory is the directory where sub should be stored, default to the movie dir """ my_open_sub = MyOpenSub(OPENSUB_URL, token) if not token: my_open_sub.login(USERNAME, PASSWORD, LANGUAGE, USER_AGENT) filenames = find_video(location) if not filenames: print "No video file found" return for filename in find_video(location): print "Fetching subtitle for %s" % filename file_hash, file_bytes = hashfile(filename) dl_links = my_open_sub.search_sub_links(file_hash, file_bytes) if not dl_links: print "Sorry, no subtitle found" return if not directory: directory = os.path.dirname(filename) for dl_link in dl_links: try: filename = wget.download(dl_link, out=directory) print "Download finished: %s" % filename filename = unzip(filename, directory) if filename: print "Unzipped to %s" % filename except IOError as io_error: print io_error
def file_download(uuid, url, filename, expires_at): print "task executing", url print "filename", filename server_uuid=get_setting("server_uuid") file_dir = create_file_directories(uuid) print "file_dir", file_dir #uuid_filename file_path = file_dir+"/"+filename urllib.urlretrieve(url, file_path) sha256_hash = hashfile(file_path) f=File.objects.create(uuid=uuid, name=filename, file_sha256=sha256_hash) f.save() data={ "username": username, "ip_address": clientserver_ip, 'file_uuid': uuid, 'file_hash': sha256_hash, } to_hash = 'FILE_DOWNLOADED%s%s%s%s' % (data["ip_address"], data["file_uuid"], data["file_hash"], server_uuid) test_hash = hashlib.sha256(to_hash).hexdigest() print (data["ip_address"], data["file_uuid"], data["file_hash"], server_uuid) print to_hash, test_hash data['sign_key_clientserver'] = test_hash results=call_api("/clientserver/file_downloaded", urllib.urlencode(data)) print results return uuid+url
def generate_test_file(): filename="test.dat" uuid=b58encode(os.urandom(16)) file_dir = create_file_directories(uuid) file_path = file_dir+"/"+filename file_size=1024 fh = open(file_path, 'w') while os.path.getsize(file_path) <= file_size: fh.write(os.urandom(16)) fh.close() sha256_hash = hashfile(file_path) f=File.objects.create(uuid=uuid, name=filename, file_sha256=sha256_hash) f.save() set_setting("test_file", str(f.id)) pass
def newsong(self, filename): """ Create a new song in database """ datas = None try: unicode(filename) except UnicodeDecodeError: return cur = self.conn.cursor() try: datas = mutagen.File(filename, easy=True) except: query = """INSERT INTO caro_logs (filename, message, date_import) VALUES (%s, 'ERROR 01', now());""" cur.execute(query, (filename,)) if datas is not None: artist = None album = None title = None genre = None try: artist = datas['artist'][0] album = datas['album'][0] title = datas['title'][0] genre = datas['genre'][0] except KeyError as e: msg = str(sys.exc_type), ":", "%s is not in the list." % sys.exc_value query = """INSERT INTO caro_logs (filename, message, date_import) VALUES (%s, %s, now());""" cur.execute(query, (filename, msg, )) if artist and album and genre and title: fsig = hashfile(filename) chk = self.checkfile(fsig) if chk == 0: self.insertfile([filename, artist, album, title, genre, fsig]) else: self.update_path(filename, fsig) else: print "Missing tag" self.conn.commit()
def upload_file(): # init the resp object response = {"success": False, "imageId": None} # get file from request file = request.files['file'] # validate if not file or not allowed_file(file.filename): raise FileNotAllowedException("File not allowed to be uploaded | transfer error", 400, response) filename = secure_filename(file.filename) # create md5 from file md5 = hashfile(file, hashlib.sha256()) app.logger.info("MD5 of file %s is %s", filename, md5) success = False failure_counter = 0 #retry 3x if fails while not success and failure_counter <= 3: # create unique key candidate = generate_unique_filename(filename, md5) app.logger.info("Unique filename proposed %s", candidate) # attempt upload to s3 success = upload_file_to_s3(file, candidate, md5) failure_counter += 1 if failure_counter > 3: # logically this exception should never be thrown, the entropy level should be good enough # (if it's not sufficient we can still use object versioning to serve as a the ID collision mitigator like in a hash table) raise UploadToS3Exception("Couldn't find a unique filename after 3 tries, upload failed", 500, response) response["imageId"] = candidate app.logger.info("imageIdUrlEncoded %s", quote_plus(candidate)) response["imageIdUrlEncoded"] = quote_plus(candidate) response["success"] = success return jsonify(response)