async def upload_to_hoster(hoster, data, file, _logger=None): if not _logger: _logger = logger ip = hoster.ip _logger.info(f'Uploading file metadata to hoster... | file: {file.hash} | hoster ip: {ip}') try: async with aiohttp.ClientSession() as session: async with session.post( f'http://{ip}/files/', json=data, timeout=10) as resp1: # ToDo: handle 402 if not resp1.status == 201: return hoster, False _logger.info(f'Uploading file body to hoster... | file: {file.hash} | hoster ip: {ip}') async with session.put( f'http://{ip}/files/{file.hash}/', data=file.get_filelike()) as resp2: if not resp2.status == 200: await session.delete(f'http://{ip}/files/{file.hash}/') return hoster, False _logger.info(f'File is uploaded to hoster | file: {file.hash} | hoster ip: {ip}') return hoster, True except (ClientConnectorError, asyncio.TimeoutError) as err: _logger.warning(f'Uploading to hoster failed | file: {file.hash} | hoster: {hoster.address} ' f'| message: {err.__class__.__name__} {str(err)}') return hoster, False except Exception as err: raven_client.captureException() _logger.error(f'Uploading to hoster failed | file: {file.hash} | hoster: {hoster.address} ' f'| message: {err.__class__.__name__} {str(err)}') return hoster, False
async def error_middleware(request, handler): try: response = await handler(request) return response except web.HTTPException as ex: return web.json_response({ "status": "error", "message": ex.reason }, status=ex.status) except settings.Locked: return web.json_response({ "status": "error", "message": 'locked' }, status=403) except settings.InvalidPassword: return web.json_response( { "status": "error", "message": 'invalid_password' }, status=403) except Exception as ex: traceback.print_exc() raven_client.captureException() return web.json_response( { "status": "error", "message": f'{ex.__class__.__name__}: {ex}' }, status=500)
async def create_metadata(cls, file_hash, owner_key, signature, client_address, size, hosts=None, replacing=None): if len(signature) != 128: raise InvalidSignature try: instance = cls( hash_=file_hash, owner_key=owner_key, signature=signature, client_address=client_address, size=size ) instance.save() if hosts: if replacing: index = hosts.index(replacing) hosts[index] = settings.address # for properly setting monitoring num else: hosts.append(settings.address) instance.add_hosts(hosts) instance.send_data_to_contract_after_uploading_body = True if replacing: instance.replacing_host_address = replacing instance.save() except IntegrityError: raven_client.captureException() session.rollback() raise cls.AlreadyExists return instance
def current_version(self): try: return self.contract.version() except BadFunctionCallOutput: raven_client.captureException( extra={ "contract_name": self.contract_name, "contract_address": self.address } ) return 0 # old contract; version not specified
async def upload_to_hoster(hoster, data, file, _logger=None): # ToDo: mv to hoster if not _logger: _logger = logger ip = hoster.ip _logger.info( f'Uploading file metadata to hoster... | file: {file.hash} | hoster ip: {ip}' ) try: async with aiohttp.ClientSession() as session: async with session.post(f'http://{ip}/files/', json=data, timeout=10) as resp1: resp_data: dict = await resp1.json() if resp_data.get('status') != 'success': raise Exit( f'Uploading metadata failed: {resp_data.get("message")}' ) _logger.info( f'Uploading file body to hoster... | file: {file.hash} | hoster ip: {ip}' ) async with session.put(f'http://{ip}/files/{file.hash}/', data=file.get_filelike(), timeout=None) as resp2: if not resp2.status == 200: import json raise Exit( json.dumps({ "status": resp2.status, "response": await resp2.text(), "ip": ip, "hash": file.hash })) # return hoster, False _logger.info( f'File is uploaded to hoster | file: {file.hash} | hoster ip: {ip}' ) notify_user(f'Uploaded to {hoster.address}') return hoster, True except (ClientConnectorError, asyncio.TimeoutError) as err: _logger.warning( f'Uploading to hoster failed | file: {file.hash} | hoster: {hoster.address} ' f'| message: {err.__class__.__name__} {str(err)}') return hoster, False except Exception as err: raven_client.captureException() _logger.error( f'Uploading to hoster failed | file: {file.hash} | hoster: {hoster.address} ' f'| message: {err.__class__.__name__} {str(err)}') return hoster, False
def run(self): try: self.prepare() self.event_loop.run_forever() except KeyboardInterrupt: pass except Exception: traceback.print_exc() raven_client.captureException() finally: self.cleanup()
async def _unlock_account(): try: logger.info(f'Unlocking account | address: {settings.address}') password = await ask_for_password() address = settings.address w3.personal.unlockAccount(address, password) except Exception as err: raven_client.captureException() logger.error( f'Account unlocking failed | address: {settings.address} ' f'| exception: {err.__class__.__name__} | message: {str(err)}') raise
def run(): app = QApplication(sys.argv) try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('127.0.0.1', settings.renter_app_port)) s.close() del s s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('127.0.0.1', settings.hoster_app_port)) s.close() del s except OSError as err: if err.errno == errno.EADDRINUSE: error( 'Ports are already in use!\n' 'Seems like Memority Core is already running or another application uses them.' ) sys.exit(0) else: raven_client.captureException() loop = QEventLoop(app) asyncio.set_event_loop(loop) memority_core = MemorityCore( event_loop=loop, _run_geth=True ) password = settings.load_locals().get('password') if password: try: memority_core.set_password(password) return except settings.InvalidPassword: error('Invalid password in settings file!') while True: try: password_dialog: QDialog = uic.loadUi(ui_settings.ui_enter_password) password_dialog.password_input.setFocus() if not password_dialog.exec_(): sys.exit(0) password = password_dialog.password_input.text() memority_core.set_password(password) break except settings.InvalidPassword: error('Invalid password!') continue main_window = LoggerWindow(memority_core) with redirect_stdout(R(main_window.logger)): with redirect_stderr(R(main_window.logger)): memority_core.run()
async def error_middleware(request, handler): try: response = await handler(request) return response except web.HTTPException as ex: return web.json_response({ "status": "error", "message": ex.reason }, status=ex.status) except settings.Locked: return web.json_response({ "status": "error", "message": 'locked' }, status=403) except settings.InvalidPassword: return web.json_response( { "status": "error", "message": 'invalid_password' }, status=403) except Exit as err: return web.json_response({ "status": "error", "message": str(err) }, status=400) except json.JSONDecodeError as err: return web.json_response({ "status": "error", "message": str(err) }, status=400) except Exception as ex: traceback.print_exc() sample_rate = 1.0 if 'No client contract for address' in str(ex): sample_rate = .2 raven_client.captureException(sample_rate=sample_rate) return web.json_response( { "status": "error", "message": f'{ex.__class__.__name__}: {ex}' }, status=500)
async def error_middleware(request, handler): try: response = await handler(request) return response except web.HTTPException as ex: return web.json_response({ "status": "error", "message": ex.reason }, status=ex.status) except Exception as ex: raven_client.captureException() return web.json_response( { "status": "error", "message": f'{ex.__class__.__name__}: {ex}' }, status=500)
def ensure_addr_not_in_use(self): try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('127.0.0.1', daemon_settings.renter_app_port)) s.close() del s s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('127.0.0.1', daemon_settings.hoster_app_port)) s.close() del s except OSError as err: if err.errno == errno.EADDRINUSE: self.error( 'Ports are already in use!\n' 'Seems like Memority is already running or another application uses them.' ) sys.exit(0) else: raven_client.captureException()
async def upload_file_host_list_to_hoster(hoster, data, file): ip = hoster.ip logger.info(f'Uploading file host list | file: {file.hash} | hoster ip: {ip}') try: async with aiohttp.ClientSession() as session: async with session.put( f'http://{ip}/files/{file.hash}/metadata/', json=data) as resp: if not resp.status == 200: resp_data = await resp.read() logger.warning(f'Uploading host list to hoster failed | file: {file.hash} ' f'| hoster: {hoster.address} ' f'| message: {resp_data}') return hoster, False logger.info(f'File host list is uploaded | file: {file.hash} | hoster ip: {ip}') return hoster, True except Exception as err: raven_client.captureException() logger.warning(f'Uploading host list to hoster failed | file: {file.hash} | hoster: {hoster.address} ' f'| message: {err.__class__.__name__} {str(err)}') return hoster, False
async def upload_file(**kwargs): path = kwargs.get('path', None) logger.info(f'Started file uploading | path: {path}') await notify_user(f'Started file uploading | path: {path}') if not path: logger.warning('Path is not specified') return _error_response("path is not specified") file = await RenterFile.open(path) try: logger.info(f'Preparing file for uploading | path: {path}') await notify_user(f'Preparing file for uploading | path: {path}') file.prepare_to_uploading() except IntegrityError: if file.hash in client_contract.get_files(): logger.warning(f'The file is already uploaded | path: {path} | hash: {file.hash}') return _error_response("The file is already uploaded!") if not await token_contract.get_deposit(file_hash=file.hash): token_balance = token_contract.get_mmr_balance() tokens_to_deposit = await ask_user_for__( 'tokens_to_deposit', 'Choose token amount for file deposit\n' f'({token_contract.wmmr_to_mmr(token_contract.tokens_per_byte_hour*file.size*10*24*14)} MMR for 2 weeks)', type_='float' ) if not tokens_to_deposit: return _error_response('Invalid value') tokens_to_deposit = float(tokens_to_deposit) if tokens_to_deposit > token_balance: return _error_response(f'Deposit can not be bigger than your balance.' f'| mmr balance: {token_balance}') await notify_user(f'Creating deposit for file {file.hash}, value: {tokens_to_deposit} MMR...' f'This can take up to 60 seconds, as transaction is being written in blockchain.') await client_contract.make_deposit(value=tokens_to_deposit, file_hash=file.hash) if not await token_contract.get_deposit(file_hash=file.hash): file.delete() return _error_response(f'Failed deposit creation | file: {file.hash}') await notify_user('Deposit successfully created.') file.update_status(RenterFile.UPLOADING) # region Upload to 10 hosters data = { "file_hash": file.hash, "owner_key": settings.public_key, "signature": file.signature, "client_contract_address": settings.client_contract_address, "size": file.size } logger.info('Trying to get hoster list') hosters = set(Host.get_n(n=10)) if not hosters: logger.error(f'No hosters available | file: {file.hash}') file.delete() return _error_response("No hosters available!") logger.info(f'Uploading to hosters | file: {file.hash} ' f'| hosters: {", ".join([hoster.address for hoster in hosters])}') await notify_user('Uploading file to hosters') hosts_success = set() hosts_error = set() while True: done, _ = await asyncio.wait( [ asyncio.ensure_future( upload_to_hoster( hoster=hoster, data=data, file=file ) ) for hoster in hosters ] ) for task in done: hoster, ok = task.result() if ok: hosts_success.add(hoster) else: hosts_error.add(hoster) if len(hosts_success) >= 10: break else: logger.info(f'Failed uploading to some hosters | file: {file.hash} ' f'| hosters: {", ".join([hoster.address for hoster in hosts_error])}') hosters = set(Host.get_n(n=10 - len(hosts_success))) \ .difference(hosts_success) \ .difference(hosts_error) if not hosters: if hosts_success: break logger.error(f'No hosters available | file: {file.hash}') file.delete() return _error_response("No hosters available!") hosters = hosts_success logger.info(f'Uploaded to hosters | file: {file.hash} ' f'| hosters: {", ".join([hoster.address for hoster in hosters])}') await notify_user('Uploaded.') # endregion file.update_status(RenterFile.UPLOADED) # region Save file metadata to contract file_metadata_for_contract = { "file_name": file.name, "file_size": file.size, "signature": file.signature, "file_hash": file.hash, "hosts": [hoster.address for hoster in hosters] } try: logger.info(f'Sending file metadata to contract | file: {file.hash}') await notify_user(f'Sending file metadata to contract | file: {file.hash}...\n' f'This can take up to 60 seconds, as transaction is being written in blockchain.') await client_contract.add_hosts(**file_metadata_for_contract) except Exception as err: raven_client.captureException() async with aiohttp.ClientSession() as session: for hoster in hosters: await session.delete(f'http://{hoster.ip}/files/{file.hash}/') logger.info(f'Deleted from hoster | file: {file.hash} | hoster ip: {hoster.ip}') file.delete() logger.warning(f'Saving data to contract failed | file: {file.hash} ' f'| message: {err.__class__.__name__} {str(err)}') return _error_response(f'Saving data to contract failed | file: {file.hash} ' f'| message: {err.__class__.__name__} {str(err)}') # endregion # region Upload metadata to all hosters logger.info(f'Uploading host list to hosters | file: {file.hash} ' f'| hosters: {", ".join([hoster.address for hoster in hosters])}') await notify_user(f'Uploading host list to hosters | file: {file.hash} ' f'| hosters: {", ".join([hoster.address for hoster in hosters])}') file_hosters = { "hosts": [hoster.address for hoster in hosters] } done, _ = await asyncio.wait( [ asyncio.ensure_future( upload_file_host_list_to_hoster( hoster=hoster, data=file_hosters, file=file ) ) for hoster in hosters ] ) for task in done: hoster, ok = task.result() if ok: print(f'Success: {hoster}') else: print(f'Error: {hoster}') logger.info(f'Uploaded host list to hosters | file: {file.hash} ' f'| hosters: {", ".join([hoster.address for hoster in hosters])}') # endregion file.add_hosters(hosters) logger.info(f'Finished file uploading | path: {path} | hash: {file.hash}') await notify_user(f'Finished file uploading | path: {path} | hash: {file.hash}') return { "status": "success", "details": "uploaded", "data": { "file": await file.to_json() } }