def delete_block(self): result = RDateBasePool().execute( "SELECT SUM(t.block_length) AS size FROM" "(SELECT DISTINCT ON(id) id,block_length FROM block WHERE id IN " "(SELECT block_id FROM cache WHERE upload<1)) AS t", ()) size = result[0]['size'] if not size: return if size < self.config.cache_max_size: return blocks = RDateBasePool().execute( "SELECT DISTINCT ON(id) id,block_length,upload FROM block JOIN cache ON block_id " "WHERE upload<1 ORDER BY priority", ()) for block in blocks: if size < self.config.cache_max_size: break print("Delete block %s." % block['id']) RDateBasePool().execute("DELETE FROM cache WHERE block_id=%s", (block['id'], )) if block['upload'] == 0: path = self.config.work_dir + self.config.block_path + block[ 'id'] os.remove(path) size -= block['block_length'] blocks = blocks[1:]
class RAuth(Singleton): def __init__(self): if hasattr(self, '_init'): return self._init = True self.db = RDateBasePool() def get_credential(self, id): result = self.db.execute("SELECT * FROM auth WHERE id = %s", (id, )) result = random.choice(result) store = Storage(result['credential_file']) credentials = store.get() if not credentials or credentials.invalid: return self.generate_credential(id) http = credentials.authorize(httplib2.Http()) service = discovery.build('drive', 'v3', http=http) return service def get_auth(self, id=None): if id: result = self.db.execute("SELECT * FROM auth WHERE id = %s", (id, )) else: result = self.db.execute("SELECT * FROM auth;", ()) result = random.choice(result) return GoogleAuth(settings_file=result['setting_file'] ), result['id'], result['folder']
def generate(self): self.db = RDateBasePool().begin() self.load_file() while self.generate_a_block(): pass self.db.commit() self.delete_file() self.upload_blocks()
def generate_credentials(id, secret, folder): path = RConfig().work_dir + RConfig().credential_path output = str(uuid.uuid4()) output_file = path + "credential/" + output + ".json" secret_file = path + "secret/" + secret # print(secret_file) store = Storage(output_file) credentials = store.get() if credentials and credentials.invalid: return flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args( ['--noauth_local_webserver']) flow = client.flow_from_clientsecrets( secret_file, 'https://www.googleapis.com/auth/drive') flow.user_agent = "GDWeb" tools.run_flow(flow, store, flags) setting_file = path + "setting/" + output + ".yaml" setting_content = """client_config_file: %s save_credentials: True save_credentials_backend: file save_credentials_file: %s get_refresh_token: True oauth_scope: - https://www.googleapis.com/auth/drive """ % (secret_file, output_file) with open(setting_file, "w") as f: f.write(setting_content) RDateBasePool().execute( "INSERT INTO auth(id, secret_file, credential_file, setting_file, folder) VALUES (%s, %s, %s, %s, %s);", (id, secret_file, output_file, setting_file, folder))
def upload_blocks(self): results = RDateBasePool().execute( "SELECT * FROM cache WHERE upload = 1", ()) for result in results: self.db = RDateBasePool().begin() try: self.upload_a_block(result) self.upload_a_block(result) except AuthError: self.db.rollback() print("Auth Error when upload %s." % (result['block_id'])) continue except IOError as e: self.db.rollback() print("IO Error when upload %s, %s." % (result['block_id'], str(e))) continue self.db.commit()
def test(): result = RDateBasePool().execute("SELECT * FROM auth", ()) result = result[0] gauth = GoogleAuth(settings_file=result['setting_file']) drive = GoogleDrive(auth=gauth) file_list = drive.ListFile({ 'q': "'root' in parents and trashed=false" }).GetList() for file1 in file_list: print('title: %s, id: %s' % (file1['title'], file1['id']))
def re_upload_block(self): blocks = RDateBasePool().execute( "SELECT DISTINCT ON(id) id,file_id FROM block WHERE status<=0", ()) for block in blocks: self.db = RDateBasePool().begin() result = self.db.execute("SELECT * FROM cache WHERE block_id = %s", (block['id'])) if result and result[0]['upload'] >= 0: self.db.execute( "UPDATE cache SET upload=1 WHERE id = %s;" "DELETE FROM block WHERE file_id=%s", (block['id'], block['file_id'])) self.db.commit() continue elif not result: self.db.execute( "INSERT INTO cache(block_id, priority, upload) VALUES (%s,'%s',1);" "DELETE FROM block WHERE file_id=%s", (block['id'], self.config.default_priority, block['file_id'])) else: self.db.execute( "UPDATE cache SET upload=1 WHERE id = %s;" "DELETE FROM block WHERE file_id=%s", (block['id'], block['file_id'])) try: self.download_one_block(block['id']) self.delete_block() except AuthError: self.db.rollback() print("Auth Error when download %s。" % (block['id'])) continue except IOError as e: self.db.rollback() print("IO Error when download %s, %s。" % (block['id'], str(e))) continue except BlockBroken: self.db.rollback() print("Block Broken at %s." % (block['id'])) continue self.db.commit() print("Re Download block %s." % block['id'])
def __init__(self, file_id: str, db: RDataBaseConnection, start: int = 0, length: int = -1): io.RawIOBase.__init__(self) self.config = RConfig() self.db = db self.start = start self.length = length self.splits = list(db.execute("SELECT * FROM file WHERE id =%s ORDER BY split DESC", (file_id,))) if not self.splits: raise RError(404) RDateBasePool().execute("UPDATE cache SET priority=priority+%s " "WHERE upload>-1 AND block_id IN (SELECT block_id FROM file WHERE id = %s)", (self.config.add_priority, file_id)) RDateBasePool().execute("INSERT INTO cache(block_id, priority, upload) " "SELECT block_id, '%s',-1 FROM file WHERE id = %s " "AND block_id NOT IN(SELECT block_id FROM cache) GROUP BY block_id;", (self.config.default_priority, file_id)) self.size = self.splits[0]['size'] self.split_id = -1 self.split_data = io.BytesIO() self.split = None
def download(self): self.delete_block() blocks = RDateBasePool().execute("SELECT * FROM cache WHERE upload=-1", ()) for block in blocks: self.db = RDateBasePool().begin() try: self.download_one_block(block['block_id']) except AuthError: self.db.rollback() print("Auth Error when download %s。" % (block['block_id'])) continue except IOError as e: self.db.rollback() print("IO Error when download %s, %s。" % (block['block_id'], str(e))) continue except BlockBroken: self.db.rollback() print("Block Broken at %s." % (block['block_id'])) continue self.db.commit() print("Downloaded block %s." % block['block_id'])
def load_split(self, split): self.split_id = split['split'] self.split = DSplit() self.split.offset = split['split_offset'] self.split.length = split['split_length'] self.split.crc = split['split_crc'] self.split.block_id = split['block_id'] self.split.block_offset = split['block_offset'] result = self.db.execute("SELECT * FROM cache WHERE block_id=%s AND upload>-1;", (self.split.block_id,)) if result: self.split_data = io.BytesIO() path = self.config.work_dir + self.config.block_path + result[0]['block_id'] with open(path, "rb") as block_data: block_data.seek(self.split.block_offset) tmp = block_data.read(self.split.length) if zlib.crc32(tmp) != self.split.crc: raise RError(2) if not tmp: return self.split_data.write(tmp) self.split_data.seek(0) else: result = self.db.execute("SELECT * FROM block WHERE id =%s AND status>0; ", (self.split.block_id,)) self.split_data = io.BytesIO() if not result: return result = result[0] file_id = result['file_id'] service = RAuth().get_credential(result['auth_id']) request = service.files().get_media(fileId=file_id) request.headers['Range'] = "bytes=%s-%s" % \ (self.split.block_offset, self.split.block_offset + self.split.length - 1) try: print("Get Split %s at Block %s in %s." % (self.split_id, self.split.block_id, str(request.headers['Range']))) tmp = request.execute() if zlib.crc32(tmp) != self.split.crc: raise RError(2) self.split_data.write(tmp) self.split_data.seek(0) self.db.execute("UPDATE block SET status='%s' WHERE file_id = %s", (self.config.re_upload_limit, file_id)) except Exception: RDateBasePool().execute("UPDATE block SET status=status-1 WHERE file_id = %s", (file_id,)) raise DownloadError(self.split.block_id)
def __init__(self): if hasattr(self, '_init'): return self._init = True self.db = RDateBasePool()
def down_priority(self): if self.time + 3600 < time.time(): RDateBasePool().execute("UPDATE cache SET priority=priority-'%s';", (self.config.minus_priority, )) self.time = time.time()
class RManage: def __init__(self): self.config = RConfig() self.db = None self.auth = RAuth() self.time = time.time() def run(self): while True: time.sleep(1) self.download() self.down_priority() self.re_upload_block() def download(self): self.delete_block() blocks = RDateBasePool().execute("SELECT * FROM cache WHERE upload=-1", ()) for block in blocks: self.db = RDateBasePool().begin() try: self.download_one_block(block['block_id']) except AuthError: self.db.rollback() print("Auth Error when download %s。" % (block['block_id'])) continue except IOError as e: self.db.rollback() print("IO Error when download %s, %s。" % (block['block_id'], str(e))) continue except BlockBroken: self.db.rollback() print("Block Broken at %s." % (block['block_id'])) continue self.db.commit() print("Downloaded block %s." % block['block_id']) def download_one_block(self, block_id): blocks = self.db.execute( "SELECT * FROM block WHERE id =%s AND status>0", (block_id, )) if not blocks: raise BlockBroken() block = random.choice(blocks) print("Download block %s from file %s." % (block['id'], block['file_id'])) auth, no_use, no_use = self.auth.get_auth(block['auth_id']) drive = GoogleDrive(auth) f = drive.CreateFile({'id': block['file_id']}) path = self.config.work_dir + self.config.block_path + block_id f.GetContentFile(path) self.db.execute("UPDATE cache SET upload=0 WHERE block_id = %s", (block_id, )) def down_priority(self): if self.time + 3600 < time.time(): RDateBasePool().execute("UPDATE cache SET priority=priority-'%s';", (self.config.minus_priority, )) self.time = time.time() def delete_block(self): result = RDateBasePool().execute( "SELECT SUM(t.block_length) AS size FROM" "(SELECT DISTINCT ON(id) id,block_length FROM block WHERE id IN " "(SELECT block_id FROM cache WHERE upload<1)) AS t", ()) size = result[0]['size'] if not size: return if size < self.config.cache_max_size: return blocks = RDateBasePool().execute( "SELECT DISTINCT ON(id) id,block_length,upload FROM block JOIN cache ON block_id " "WHERE upload<1 ORDER BY priority", ()) for block in blocks: if size < self.config.cache_max_size: break print("Delete block %s." % block['id']) RDateBasePool().execute("DELETE FROM cache WHERE block_id=%s", (block['id'], )) if block['upload'] == 0: path = self.config.work_dir + self.config.block_path + block[ 'id'] os.remove(path) size -= block['block_length'] blocks = blocks[1:] def re_upload_block(self): blocks = RDateBasePool().execute( "SELECT DISTINCT ON(id) id,file_id FROM block WHERE status<=0", ()) for block in blocks: self.db = RDateBasePool().begin() result = self.db.execute("SELECT * FROM cache WHERE block_id = %s", (block['id'])) if result and result[0]['upload'] >= 0: self.db.execute( "UPDATE cache SET upload=1 WHERE id = %s;" "DELETE FROM block WHERE file_id=%s", (block['id'], block['file_id'])) self.db.commit() continue elif not result: self.db.execute( "INSERT INTO cache(block_id, priority, upload) VALUES (%s,'%s',1);" "DELETE FROM block WHERE file_id=%s", (block['id'], self.config.default_priority, block['file_id'])) else: self.db.execute( "UPDATE cache SET upload=1 WHERE id = %s;" "DELETE FROM block WHERE file_id=%s", (block['id'], block['file_id'])) try: self.download_one_block(block['id']) self.delete_block() except AuthError: self.db.rollback() print("Auth Error when download %s。" % (block['id'])) continue except IOError as e: self.db.rollback() print("IO Error when download %s, %s。" % (block['id'], str(e))) continue except BlockBroken: self.db.rollback() print("Block Broken at %s." % (block['id'])) continue self.db.commit() print("Re Download block %s." % block['id'])
class GDBlock: def __init__(self): self.config = RConfig() self.db = None self.splits = queue.PriorityQueue() self.files = {} def generate(self): self.db = RDateBasePool().begin() self.load_file() while self.generate_a_block(): pass self.db.commit() self.delete_file() self.upload_blocks() def delete_file(self): for k, file in self.files.items(): file_path = self.config.work_dir + self.config.upload_path + k os.remove(file_path) def load_file(self): results = self.db.execute( "SELECT * FROM path WHERE type=1 AND status=1;", ()) for result in results: file_id = result['id'] self.files[file_id] = dict(result) file_path = self.config.work_dir + self.config.upload_path + result[ 'id'] with open(file_path, 'rb') as input_file: split_id = 0 size = 0 while True: data = input_file.read(self.config.split_size) if not data: break split_id += 1 split = SplitData() split.id = split_id split.file_id = file_id split.data = data split.offset = size split.length = len(data) size += split.length split.crc = zlib.crc32(data) self.splits.put( (self.config.split_size - split.length, split)) self.files[file_id]['size'] = size self.db.execute("UPDATE path SET status=0 WHERE id=%s", (file_id, )) def generate_a_block(self): block_id = str(uuid.uuid4()) path = self.config.work_dir + self.config.block_path + block_id if self.splits.empty(): return False with open(path, "wb") as block: length = 0 while True: if self.splits.empty(): break split = self.splits.get()[1] if split.length + length > self.config.max_block_size: self.splits.put( (self.config.split_size - split.length, split)) break block.write(split.data) self.db.execute( "INSERT INTO file(id, size, split, split_offset, split_length, split_crc, block_id, block_offset)" "VALUES (%s,'%s','%s','%s','%s','%s',%s,'%s');", (split.file_id, self.files[split.file_id]['size'], split.id, split.offset, split.length, split.crc, block_id, length)) length += split.length self.db.execute( "INSERT INTO cache(block_id, priority, upload) VALUES (%s, '%s', 1);", (block_id, 1)) return True def upload_blocks(self): results = RDateBasePool().execute( "SELECT * FROM cache WHERE upload = 1", ()) for result in results: self.db = RDateBasePool().begin() try: self.upload_a_block(result) self.upload_a_block(result) except AuthError: self.db.rollback() print("Auth Error when upload %s." % (result['block_id'])) continue except IOError as e: self.db.rollback() print("IO Error when upload %s, %s." % (result['block_id'], str(e))) continue self.db.commit() def upload_a_block(self, block): auth, user_id, folder = RAuth().get_auth() drive = GoogleDrive(auth) path = self.config.work_dir + self.config.block_path + block['block_id'] f = drive.CreateFile({ "title": block['block_id'], "parents": [{ "kind": "drive#fileLink", "id": folder }] }) f.SetContentFile(path) f.Upload() print("Block %s Uploaded" % block['block_id']) self.db.execute( "INSERT INTO block(id, block_length, file_id, status, auth_id) VALUES (%s,%s,%s,%s,%s);" "UPDATE cache SET upload=0 WHERE block_id= %s;", (block['block_id'], f['fileSize'], f['id'], self.config.re_upload_limit, user_id, block['block_id']))
def process_request(self, req, resp): req.context['sql'] = RDateBasePool().begin()