def test_read_returns_line1(self): buffer = "ABC\n" f = open(self.m_filepath, "w") f.write(buffer) self.m_class=File(self.m_filepath, self.m_mode) self.assertEquals(buffer, self.m_class.read())
def run(self): cmd = f"youtube-dl --no-mtime {self.url}" cwd = self._get_download_directory() p = subprocess.Popen( cmd.split(), cwd=cwd, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) self.file = File.new_file(self.url, Type.TYPE_YOUTUBE_ID, cwd) self.id = self.file.id while True: output = p.stdout.readline() if output == "" and p.poll() is not None: if p.poll() == 0: self.file.complete() else: self.file.error() break if output: updates = {} updates.update(self.parse_name(output, self.id)) updates.update(self.parse_stats(output)) self.file.update_db(updates) sleep(0.1) # Sleep between log entries to prevent DB locks
def formatting(self): """Create file of FS and fill it""" sb = self.mk_super_block() sb = sb + b' ' * (self.__cluster_size - len(sb)) data = b'root' + b' ' * 10 + hashlib.md5(b'314ton').digest() + (1).to_bytes(1, byteorder='big') + \ (1).to_bytes(1, byteorder='big') clust, fat = self.mk_fat() f = File('users', '', '0110100', clust, 1, data, '111') try: with open('../os.txt', 'wb') as file: file.write(sb) file.write(fat * 2) file.write(f.get_file_bytes() + b' ' * (self.__main_dir_size - len(f.get_file_bytes()))) file.write( data + b' ' * (self.__hd_size - self.__data_area_offset - len(data))) except Exception: print('Something wrong!')
def get_downloads(): status = request.args.get("status", None) if status is not None and "," in status: status = status.split(",") files = File.get_all_files( status=status, page=request.args.get("page", None), limit=request.args.get("limit", None), ) return json_response(files, 200)
class FileTest(unittest.TestCase): def setUp(self): self.m_filepath = r"/tmp/ls.d" self.m_mode = "rw" self.m_class = File(self.m_filepath, self.m_mode) def test_read_returns_line1(self): buffer = "ABC\n" f = open(self.m_filepath, "w") f.write(buffer) self.m_class=File(self.m_filepath, self.m_mode) self.assertEquals(buffer, self.m_class.read()) def test_self_params(self): self.m_class.params(1)
def run(self): try: html = requests.get(self.url) self.soup = BeautifulSoup(html.text, "html.parser") cwd = self._get_download_directory() self.file = File.new_file(self.url, Type.TYPE_SW_ID, cwd) self.id = self.file.id self.sw_download(cwd) self.file.complete() except Exception as err: self.file.error() raise err
def __init__(self, os_path): """Read params of super block from file and init super block object""" self.__record_size = 64 with open(os_path, 'rb') as file: self.__name = str(file.read(4), 'ansi') self.__cluster_size = int.from_bytes(file.read(2), byteorder='big') self.__clusters_count = int.from_bytes(file.read(4), byteorder='big') self.__hd_size = int.from_bytes(file.read(4), byteorder='big') self.__fat_offset = int.from_bytes(file.read(2), byteorder='big') self.__fat_copy_offset = int.from_bytes(file.read(4), byteorder='big') self.__main_dir = File(file_bytes=file.read(self.record_size))
class FileTests(TestCase): def setUp(self): self.file = File() @patch( "builtins.open", new_callable=mock_open, read_data="10|1|SELL|toaster_1|10.00|20", ) def test_parse(self, mock_file): current_dir = Path(__file__).parent filename = f"{current_dir}/test_input.txt" self.file.parse(filename) mock_file.assert_called_with(filename, encoding="utf-8") @patch("app.file.csv.writer") def test_save(self, mock_writer): current_dir = Path(__file__).parent filename = f"{current_dir}/test_output.txt" test_data = ["23", "45"] self.file.save(test_data, filename) self.assertTrue(mock_writer.called)
def create_new_file( db, name="Filename", directory="/downloads", url="http://url.com", status_id=1, type_id=1, percent=50.0, ): file = File( name=name, directory=directory, url=url, status_id=status_id, type_id=type_id, percent=percent, ) db.session.add(file) return file
def __rewrite_record(self, f, directory): """Rewrite a file record""" clusters = self.__get_clusters_seq(directory.first_cluster) with open('os.txt', 'r+b') as file: offset = (clusters.pop(0) - 1) * self.super_block.cluster_size file.seek(offset) while True: if file.tell() == offset + self.super_block.cluster_size: if clusters: offset = (clusters.pop(0) - 1) * self.super_block.cluster_size file.seek(offset) else: return None fil = File(file_bytes=file.read(self.super_block.record_size)) if fil.name.strip() == f.name.strip(): file.seek(-self.super_block.record_size, 1) file.write(f.get_file_bytes()) return True
def make_file(self, path, mod='0110100', data='', attr='000'): """Make a new file""" path, name = self.__slice_path(path) # slice path and name directory = self.__check_dir_w_permission(path) if self.__read_directory(directory).get(name): # check existed files raise FSExeption( 'File with name "{}" is already exist!'.format(name)) name, ext = self.__parse_file_name(name) # parse file name count = self.__get_cluster_count(data) # get required cluster's count clusters = self.__get_free_clusters( count) # get numbers of free clusters self.__set_cluster_engaged(clusters) size = len( data ) if mod[0] == '0' else self.super_block.cluster_size * len(clusters) self.__write_record( File(name, ext, mod, clusters[0], self.user.id, attr, size), directory, path) self.__write_data(clusters, bytes(data, 'ansi'))
def __init__(self, hd_size=256, cluster_size=4096): """Init params of super block""" self.__record_size = 64 self.__name = 'ATOS' self.__hd_size = hd_size * 1024 * 1024 self.__cluster_size = cluster_size self.__clusters_count = self.__hd_size // self.__cluster_size self.__main_dir_size = math.ceil( 1000 / (self.__cluster_size / self.__record_size)) * self.__cluster_size self.__fat_offset = self.__cluster_size self.__fat_copy_offset = self.__fat_offset + self.__clusters_count * 4 self.__main_dir_offset = self.__fat_copy_offset + self.__clusters_count * 4 self.__data_area_offset = self.__main_dir_offset + self.__main_dir_size first_cluster = self.__main_dir_offset // self.__cluster_size + 1 file = File(name='main_dir', mod='1111111', first_cluster=first_cluster, size=self.__main_dir_size) self.__main_dir = file
def __read_directory(self, f): """Returns a dict of files""" result = dict() clusters = self.__get_clusters_seq(f.first_cluster) with open('os.txt', 'rb') as file: offset = (clusters.pop(0) - 1) * self.super_block.cluster_size file.seek(offset) while True: if file.tell() >= offset + self.super_block.cluster_size: if not clusters: break offset = (clusters.pop(0) - 1) * self.super_block.cluster_size file.seek(offset) record = file.read(self.super_block.record_size) if not record.rstrip(): break if record[:1] != b' ': f = File(file_bytes=record) result[f.full_name] = f return result
def run(self): cmd = f"gallery-dl {self.url} -d ." cwd = self._get_download_directory() p = subprocess.Popen( cmd.split(), cwd=cwd, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) self.file = File.new_file(self.url, Type.TYPE_GALLERY_ID, cwd, name=self.url) self.id = self.file.id while True: output = p.stdout.readline() if output == "" and p.poll() is not None: if p.poll() == 0: self.file.complete() else: self.file.error() break
from app.auction import Auction from app.file import File file = File() bids, sells = file.parse("./input.txt") auction = Auction(bids, sells) sold_items = auction.get_sold_items() file.save(sold_items, "./output.txt")
def setUp(self): self.m_filepath = r"/tmp/ls.d" self.m_mode = "rw" self.m_class = File(self.m_filepath, self.m_mode)
def setUp(self): self.file = File()
def download(file_id): _file = File.get_file(file_id) if _file is None: raise NotFound return json_response(_file.marshal(), 200)