def make_watch(cache, machine=1): global query data_obj = data_layer.DataLayer() while 1: time.sleep(2) if not cache.empty(): with sem: number = data_obj.get_max_id(machine) generation = data_obj.get_max_generation() + 1 while 1: try: if not query: x = cache.get(timeout=1) if not data_obj: data_obj = data_layer.DataLayer() data_obj.cursor = data_obj.database.cursor() number += 1 data_layer.edit_status('watch', []) data_layer.edit_status('watch', [x[1]]) if x[0] == 'created': data_obj.insert_data(number, x[1], x[2], x[3], generation, machine, real_path=x[6]) elif x[0] == 'deleted': g = data_obj.delete_data(x[1], x[2], machine) data_obj.add_action(str(x), g) else: g = data_obj.update_data(x[1:], machine) data_obj.add_action(str(x), g) if query: data_obj.database.commit() data_obj.close() data_obj = None while query: time.sleep(0.5) except Empty: break except Exception as e: raise e while query: time.sleep(0.5) if data_obj: data_obj.edit_date(machine) data_obj.database.commit() data_layer.edit_status('watch', [])
def device_added_callback(*args): global collection try: values = args[1]['org.freedesktop.UDisks2.Job'] except KeyError: return try: operation = values['Operation'] if operation == 'filesystem-mount': block = values['Objects'][0] _id, block, name = get_mount_point(block) data = data_layer_py.DataLayer() exist = data.get_id_from_device(_id) data.close() if exist: execute(exist, block, collection[block], False) elif operation == 'filesystem-unmount': block = values['Objects'][0] try: if collection[block][4] and collection[block][4].is_alive(): try: collection[block][4]._stop() except Exception: pass del collection[block] except Exception: return elif operation == 'cleanup': pass except KeyError: return
def execute(exist, block, device_name, re_index): if exist and re_index: size = extra_functions.convert_to_human_readable(collection[block][len(collection[block]) - 1]) data = data_layer_py.DataLayer() with data_layer_py.semaphore: data.delete_drive(exist) data.close() machine = _add_device_(device_name[0], device_name[2], device_name[1], size) queue = Queue() collection[block][3] = add_watch(device_name[0], queue) t = Thread(target=watch_layer.make_watch, args=(queue, machine)) t.start() collection[block][4] = t if exist: queue = Queue() collection[block][3] = add_watch(device_name[0], queue) t = Thread(target=watch_layer.make_watch, args=(queue, exist)) t.start() collection[block][4] = t else: queue = Queue() size = extra_functions.convert_to_human_readable(collection[block][len(collection[block]) - 1]) machine = _add_device_(device_name[0], device_name[2], device_name[1], size) collection[block][3] = add_watch(device_name[0], queue) t = Thread(target=watch_layer.make_watch, args=(queue, machine)) t.start() collection[block][4] = t
def create(path=None): # TODO: put path in None data_layer = data_layer_py.DataLayer() paths = [] if not path: paths = ef.get_initials_paths() else: paths = [path] _queue = Queue() path2 = path.split(os.sep) path2 = path2[len(path2) - 1] data_layer.insert_peer() peer = data_layer.get_uuid_from_peer() data_layer.insert_data(id=1, file_name=path2, file_type='Folder', parent=path, generation=0, first=True, peer=peer) for x in paths: path = x t = Thread(target=dfs, args=(path, _queue)) t.start() t2 = Thread(target=save_to_disk, args=(data_layer, _queue, peer)) t2.start() t.join() t2.join() start([path])
def start(): data_obj = data_layer.DataLayer() while 1: try: password = data_obj.get_password() except sqlite3.OperationalError: time.sleep(10) continue if not password: time.sleep(10) else: break t = Thread(target=receive_broadcast, args=(data_obj,)) t.start() network_main(data_obj=data_obj)
def check_paths(list_parents, real_path, peer): data_obj = data_layer.DataLayer('database.db') for j in range(0, len(real_path)): path = real_path[j] tmp = [] for i in range(0, len(list_parents)): if path != list_parents[i]: list_parents.remove(i) else: for x in data_obj.cursor.execute('SELECT * FROM File WHERE name_ext=? AND machine=?', (list_parents[i], peer)): tmp.append(x[1]) if len(list_parents) == 1: return list_parents[0] list_parents = tmp
def receive_broadcast(data_obj): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('', PORT)) s.listen(1) threads = [] while 1: try: sock, address = s.accept() data_layer.edit_status('network', []) data_layer.edit_status('network', [address[0]]) threads.append(Thread(target=checking_client, args=(sock, address, data_layer.DataLayer()))) threads[len(threads) - 1].start() except socket.error: continue except socket.timeout: continue
def checking_client(sock, address, data_obj): password = data_obj.get_password() cipher = ef.get_cipher(password) ran_str = ef.random_string() sol = cipher.encrypt(ran_str) sock.settimeout(2) sock.send(sol) value = sock.recv(1000) sock.settimeout(15) if value.decode('LATIN-1') == ran_str: sock.send(b'OK') machine = data_obj.get_id_from_peer() name_machine = data_obj.get_peer_from_uuid(1) sock.send(json.dumps({'machine': machine, 'name_machine': name_machine}).encode()) generation = sock.recv(1024) sender(sock, address, int(generation), data_layer.DataLayer()) else: data_layer.edit_status('network', []) sock.close()
def _add_device_(path, device_name, device_id, device_size=0): global collection with data_layer_py.semaphore: data_layer = data_layer_py.DataLayer() data_layer.insert_peer(uuid=device_id, pc_name=device_name, memory=1, size=device_size, date=datetime.datetime.now().timestamp()) peer = data_layer.get_id_from_uuid(device_id) data_layer.insert_data(id=1, file_name='', file_type='Folder', parent=path, generation=0, first=True, peer=peer) data_layer.database.commit() _queue = Queue() t = Thread(target=dfs, args=(path, _queue)) t.start() t2 = Thread(target=save_to_disk, args=(data_layer, _queue, peer)) t2.start() t.join() t2.join() data_layer.database.close() return peer
def add_device(name, re_index): global collection device_name = None block = None for x in collection.keys(): if name.strip() == collection[x][0].strip(): device_name = collection[x] block = x if device_name: if collection[block][4] and collection[block][4].is_alive(): try: collection[block][4]._stop() except Exception: pass collection[block][4] = None data = data_layer_py.DataLayer() exist = data.get_id_from_device(device_name[1]) data.close() thread = Thread(target=execute, args=(exist, block, device_name, True)) thread.start() return device_name
def receiver(sock, uuid, data_obj): global query sock.send(data_obj.get_id_from_peer().encode()) password = data_obj.get_password() cipher = ef.get_cipher(password) devices = data_obj.get_memory_devices() a = json.dumps({'devices': devices}) time.sleep(2) sock.send(a.encode()) _dict = {'add': [], 'delete': [], 'generation': ''} with sem: cont = 0 test = b'' balance = 0 sock.settimeout(60) while 1: data = sock.recv(1000) if not data: break test += data if data: for x in data.decode(): if x == '{': balance += 1 if x == '}': balance -= 1 if not balance: break try: _dict = json.loads(test.decode()) except ValueError: data_layer.edit_status('network', []) return for data in _dict['add']: value = cipher.decrypt(base64.b64decode(data)) try: elements = re.split('\\?+', value.decode(encoding='LATIN-1')) except UnicodeDecodeError: elements = re.split('\\?+', value.decode(encoding='utf_8')) if not data_obj: data_obj = data_layer.DataLayer() elements[0] = elements[0][1:] elements[len(elements) - 1] = ef.unpad(elements[len(elements) - 1]) elements = [x.strip() for x in elements] elements[len(elements) - 2] = data_obj.get_id_from_uuid(elements[len(elements) - 2]) if elements[4] == '-1': data_obj.insert_data(id=elements[0], file_name=elements[1], parent=elements[2], file_type=elements[3], generation=elements[5], peer=elements[6], first=True, date=elements[len(elements) - 1]) else: data_obj.insert_data(id=elements[0], file_name=elements[1], parent=elements[4], file_type=elements[3], generation=elements[5], peer=elements[6], first=False, date=elements[len(elements) - 1]) if cont > 10000: data_obj.database.commit() cont = 0 if query: data_obj.database.commit() data_obj.close() data_obj = None while query: time.sleep(0.5) data_obj.database.commit() if _dict['generation']: data_obj.edit_generation(uuid, _dict['generation']) with sem: cont = 0 for data in _dict['delete']: value = cipher.decrypt(base64.b64decode(data)) try: elements = re.split('\\?+', value.decode(encoding='LATIN-1')) except UnicodeDecodeError: elements = re.split('\\?+', value.decode(encoding='utf_8')) if not data_obj: data_obj = data_layer.DataLayer() elements[0] = elements[0][1:] elements[len(elements) - 1] = ef.unpad(elements[len(elements) - 1], 0) elements = ef.convert_to_tuple(elements[0]) elements = [x.strip()[1:-1] for x in elements] if elements[0] == 'deleted': data_obj.delete_data(elements[1], elements[2], data_obj.get_id_from_uuid(uuid)) elif elements[0] == 'updated': data_obj.update_data(elements[1:], data_obj.get_id_from_uuid(uuid)) if cont > 10000: data_obj.database.commit() cont = 0 if query: data_obj.database.commit() data_obj.close() data_obj = None while query: time.sleep(0.5) data_obj.database.commit() cont = 0 for key in _dict['devices'].keys(): for data in _dict['devices'][key]: _id = data_obj.get_id_from_uuid(key) description = _dict['devices_description'][key][0] if _id: data_obj.delete_files_from_drive(description[0]) else: data_obj.insert_peer(description[0], description[2], 1, description[3], datetime.datetime.now().timestamp()) value = cipher.decrypt(base64.b64decode(data)) try: elements = re.split('\\?+', value.decode(encoding='LATIN-1')) except UnicodeDecodeError: elements = re.split('\\?+', value.decode(encoding='utf_8')) if not data_obj: data_obj = data_layer.DataLayer() elements[0] = elements[0][1:] elements[len(elements) - 1] = ef.unpad(elements[len(elements) - 1]) elements = [x.strip() for x in elements] peer = data_obj.get_id_from_uuid(description[0]) if elements[4] == '-1': data_obj.insert_data(id=elements[0], file_name=elements[1], parent=elements[2], file_type=elements[3], generation=elements[5], peer=peer, first=True, date=elements[len(elements) - 1]) else: data_obj.insert_data(id=elements[0], file_name=elements[1], parent=elements[4], file_type=elements[3], generation=elements[5], peer=peer, first=False, date=elements[len(elements) - 1]) if cont > 10000: data_obj.database.commit() cont = 0 if query: data_obj.database.commit() data_obj.close() data_obj = None while query: time.sleep(0.5) data_obj.database.commit() data_layer.edit_status('network', []) sock.close()
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) return socket.inet_ntoa( fcntl.ioctl(s.fileno(), 0x891b, struct.pack('256s', ifname.encode()))[20:24]) def a(): print('I am A') time.sleep(1) print('I am leaving A') import data_layer if __name__ == '__main__': d = data_layer.DataLayer() res = [] q = d.get_files(int(b'-1'), int(1)) for x in q: print(x[2]) res.append(x[2]) print(len(res)) q.close() # print(netifaces.interfaces()) # for x in netifaces.interfaces(): # addrs = netifaces.ifaddresses(x) # try: # q = addrs[netifaces.AF_INET] # except KeyError: # continue
paths = [] if not path: paths = ef.get_initials_paths() else: paths = [path] password = getpass.getpass() while len(password) > 32: print('Password must be smaller than 32 characters') password = getpass.getpass() jump = 1 data_layer = None if not path: path = '/' if not os.path.exists('./database.db'): jump = 0 data_layer = data_layer_py.DataLayer('database.db') data_layer.create_databases() sha = hashlib.md5(password.encode()) data_layer.insert_password(sha.hexdigest()) _queue = Queue() path2 = path.split(os.sep) path2 = path2[len(path2) - 1] data_layer.insert_peer() peer = data_layer.get_uuid_from_peer() data_layer.insert_data(id=1, file_name=path2, file_type='Folder', parent=path, generation=0, first=True, peer=peer)