class USBSneaky: """ Detects plugged-in USBs, gets its info and sends it to a WorkbenchServer. USBSneaky is constantly sending the info about the USB it has plugged-in and notifies if it is has been removed. If USBSneaky doesn't update in some time WorkbenchServer will interpret the silence as the computer is off and then, unplug the USB. This is done because the computer can die or be shut down in any moment. USBSneaky is thought to be executed as a worker in a single process. """ def __init__(self, uuid: UUID, workbench_server: str): self.uuid = str(uuid) self.session = BaseUrlSession(base_url=workbench_server) self.session.verify = False urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) while True: sleep(1) try: # We keep sending this so workbench server will # notice our silence if we die and remove the USB # from its list of plugged in USBs pen = plugged_usbs(multiple=False) except NoUSBFound: with suppress(NameError): # Pen was defined, therefore we had a pen before self.send_unplug(pen['hid']) # We remove it so we are not sending it all the time del pen else: # We have found an usb pen['_uuid'] = self.uuid self.send_plug(pen) sleep(2.25) # Don't stress Workbench Server def send_plug(self, pen: dict): self.session.post('/usbs/plugged/{}'.format(pen['hid']), json=pen) def send_unplug(self, hid: str): self.session.delete('/usbs/plugged/{}'.format(hid))
class AppConnect: """App connection object. A wrapper for requests BaseUrlSession to hold Atlassian keys across command runs. Parameters ---------- server: base url of app server. username: username for connection. password: password for connection. cookie_store: path to file for cookie_store. session_headers: default headers added to every call. """ _server: str username: str _password: str session: BaseUrlSession = None auth: HTTPBasicAuth = None _response: requests = None cookie_store: os.path = None def __init__(self, server: str, username: str = None, password: str = None, cookie_store: os.path = None, session_headers: dict = None) -> None: self.server = server self.session = BaseUrlSession(base_url=server) if username: self.username = username if password: self.password = password if cookie_store: self.cookie_store = cookie_store if username and password: self.auth = HTTPBasicAuth(self.username, self.password) if session_headers: self.session.headers.update(session_headers) self.reload_cookies() @property def server(self): """server baseUrl for connection""" return self._server @server.setter def server(self, server: str): self._server = server if self.session: self.session.base_url = server @property def password(self): """password for connection.""" return base64.decodebytes(self._password).decode() @password.setter def password(self, password: str): self._password = base64.encodebytes(password.encode()) def get(self, api, headers: dict = None, params: dict = None, data: dict = None, auth: bool = False, allow_redirects=True): """send http get request. Parameters ---------- api: str url path appended to baseUrl. headers: dict of headers. params: dict of url query parameters. data: dict of data to send. auth: bool(False) send BasicAuth. allow_redirects Returns ------- """ # url = urljoin(self.server, api) url = api try: self._response = self.session.get(url, headers=headers, params=params, data=data, auth=self.auth if auth else None, allow_redirects=allow_redirects) self._response.raise_for_status() except requests.exceptions.ConnectionError as err: raise SystemExit(err) except requests.exceptions.Timeout as err: raise SystemExit(err) except requests.exceptions.TooManyRedirects as err: raise SystemExit(err) except requests.exceptions.HTTPError as err: raise SystemExit(err) return self.json_response(self._response) def delete(self, api, headers: dict = None, params=None, auth: bool = False): """send http delete request. Parameters ---------- api: str url path appended to baseUrl. headers: dict of headers. params: dict of url query parameters. auth: bool(False) send BasicAuth. Returns ------- ->json """ url = api try: self._response = self.session.delete( url, headers=headers, params=params, auth=self.auth if auth else None) self._response.raise_for_status() except requests.exceptions.ConnectionError as err: raise SystemExit(err) except requests.exceptions.Timeout as err: raise SystemExit(err) except requests.exceptions.TooManyRedirects as err: raise SystemExit(err) except requests.exceptions.HTTPError as err: raise SystemExit(err) return self.json_response(self._response) def post(self, api: str, headers: dict = None, params: dict = None, data: dict = None, auth: bool = False, allow_redirects: bool = True): """send http post request. Parameters ---------- api: str url path appended to baseUrl. headers: dict of headers. params: dict of url query parameters. data: dict of data to send. auth: bool(False) send BasicAuth. allow_redirects Returns ------- ->json """ # url = urljoin(self.server, api) url = api try: self._response = self.session.post( url, headers=headers, params=params, data=data, auth=self.auth if auth else None, allow_redirects=allow_redirects) # self._response.raise_for_status() except requests.exceptions.ConnectionError as err: raise SystemExit(err) except requests.exceptions.Timeout as err: raise SystemExit(err) except requests.exceptions.TooManyRedirects as err: raise SystemExit(err) # except requests.exceptions.HTTPError as err: # raise SystemExit(err) return self.json_response(self._response) def put(self, api: str, headers: dict = None, params: dict = None, data: dict = None, auth: bool = False): """send http put request. Parameters ---------- api: str url path appended to baseUrl. headers: dict of headers. params: dict of url query parameters. data: dict of data to send. auth: bool(False) send BasicAuth. Returns ------- ->json """ url = api try: self._response = self.session.put(url, headers=headers, params=params, data=data, auth=self.auth if auth else None) self._response.raise_for_status() except requests.exceptions.ConnectionError as err: raise SystemExit(err) except requests.exceptions.Timeout as err: raise SystemExit(err) except requests.exceptions.TooManyRedirects as err: raise SystemExit(err) except requests.exceptions.HTTPError as err: raise SystemExit(err) return self.json_response(self._response) def json_response(self, res: requests): """Always return a json response. Parameters ---------- res: requests response. Returns ------- ->json """ _json = None if res.ok: if res.cookies: self.session.cookies.update(res.cookies) self.cache_cookies() try: _json = res.json() except JSONDecodeError as err: SystemExit(err) if not _json: if res.ok: _json = json.dumps({ 'success': self._response.ok, 'status code': self._response.status_code, 'elapsed seconds': self._response.elapsed.seconds }) else: _json = json.dumps({ 'ok': self._response.ok, 'status_code': self._response.status_code, 'reason': self._response.text, 'request-url': self._response.request.url, 'request-method': self._response.request.method, 'text': self._response.text, 'redirect': self._response.is_redirect, 'elapsed': self._response.elapsed.seconds }) return _json def update_cookies(self, cookies: dict = None): """add cookie(s) to cookie jar. Parameters ---------- cookies """ self.session.cookies.update(cookies) self.cache_cookies() def cache_cookies(self): """cache cookies to file.""" if self.session.cookies: with open(self.cookie_store, 'wb') as f: pickle.dump(self.session.cookies, f) def reload_cookies(self): """reload cookies from file.""" if os.path.isfile(self.cookie_store): with open(self.cookie_store, 'rb') as f: self.session.cookies.update(pickle.load(f))
class RCTFAdminV1: session: requests.Session def __init__(self, endpoint: str, login_token: Optional[str]): self.session = BaseUrlSession(urljoin(endpoint, "api/v1/admin/")) if login_token is not None: login_resp = requests.post( urljoin(endpoint, "api/v1/auth/login"), json={"teamToken": login_token} ).json() if login_resp["kind"] == "goodLogin": auth_token = login_resp["data"]["authToken"] self.session.headers["Authorization"] = f"Bearer {auth_token}" else: raise ValueError( f"Invalid login_token provided (reason: {login_resp['kind']})" ) @staticmethod def assertResponseKind(response: Any, kind: str) -> None: if response["kind"] != kind: raise RuntimeError(f"Server error: {response['kind']}") def list_challenges(self) -> List[Dict[str, Any]]: r = self.session.get("challs").json() self.assertResponseKind(r, "goodChallenges") return r["data"] def put_challenge(self, chall_id: str, data: Dict[str, Any]) -> None: r = self.session.put("challs/" + quote(chall_id), json={"data": data}).json() self.assertResponseKind(r, "goodChallengeUpdate") def delete_challenge(self, chall_id: str) -> None: r = self.session.delete("challs/" + quote(chall_id)).json() self.assertResponseKind(r, "goodChallengeDelete") def create_upload(self, uploads: Dict[str, bytes]) -> Dict[str, str]: """ :param uploads: uploads {name: data} :return: urls {name: url} """ if len(uploads) == 0: return {} payload = [ {"name": name, "data": "data:;base64," + b64encode(data).decode()} for name, data in uploads.items() ] r = self.session.post("upload", json={"files": payload}).json() self.assertResponseKind(r, "goodFilesUpload") return {f["name"]: f["url"] for f in r["data"]} def get_url_for_files(self, files: Dict[str, str]) -> Dict[str, Optional[str]]: """ :param files: files to get {name: sha256} :return: urls {name: url} """ payload = [{"name": name, "sha256": sha256} for name, sha256 in files.items()] r = self.session.post("upload/query", json={"uploads": payload}).json() self.assertResponseKind(r, "goodUploadsQuery") return {f["name"]: f["url"] for f in r["data"]}
def main(): if len(sys.argv) < 5: print( "usage: multinet.py <instance-url> <workspace> <api-token> <volume>", file=sys.stderr ) return 1 # Extract args _, base_url, workspace, api_token, volume = sys.argv # Inject auth token into every request api_client = BaseUrlSession(base_url=base_url) api_client.headers.update({"Authorization": f"Bearer {api_token}"}) print("Uploading files...") # Upload all files to S3 s3ff_client = S3FileFieldClient("/api/s3-upload/", api_client) # Upload nodes.csv with open("artifacts/nodes.csv", "rb") as file_stream: nodes_field_value = s3ff_client.upload_file( file_stream, "nodes.csv", "api.Upload.blob" )["field_value"] # Upload links.csv with open("artifacts/links.csv", "rb") as file_stream: links_field_value = s3ff_client.upload_file( file_stream, "links.csv", "api.Upload.blob" )["field_value"] # Update base url, since only workspace endpoints are needed now api_client.base_url = f"{base_url}/api/workspaces/{workspace}/" # Get names of all networks and tables networks = [x["name"] for x in api_client.get("networks/").json().get("results")] tables = [x["name"] for x in api_client.get("tables/").json().get("results")] # Filter names to ones we want to remove (like the volume) networks = list(filter(lambda x: volume in x, networks)) tables = list(filter(lambda x: volume in x, tables)) # Delete network and tables if they exist for network in networks: api_client.delete(f"networks/{network}/") for table in tables: api_client.delete(f"tables/{table}/") # Generate new network and table names NODE_TABLE_NAME = f"{volume}_nodes" EDGE_TABLE_NAME = f"{volume}_links" NETWORK_NAME = f"{volume}_{datetime.now(pytz.timezone('America/Denver')).strftime('%Y-%m-%d_%H-%M')}" # Create nodes table r = api_client.post( "uploads/csv/", json={ "field_value": nodes_field_value, "edge": False, "table_name": NODE_TABLE_NAME, "columns": { "TypeID": "category", "Verified": "boolean", "Confidence": "number", "ParentID": "category", "Created": "date", "LastModified": "date", "TypeLabel": "category", "Volume (nm^3)": "number", "MaxDimension": "number", "MinZ": "number", "MaxZ": "number", }, }, ) raise_for_status(r) nodes_upload = r.json() # Create links table r = api_client.post( "uploads/csv/", json={ "field_value": links_field_value, "edge": True, "table_name": EDGE_TABLE_NAME, "columns": { "TotalChildren": "number", "LastModified": "date", "Bidirectional": "boolean", "Type": "category", "TotalSourceArea(nm^2)": "number", "TotalTargetArea(nm^2)": "number", }, }, ) raise_for_status(r) links_upload = r.json() print("Processing files...") # Wait for nodes and links tables to be created await_tasks_finished(api_client, [nodes_upload, links_upload]) # Create network raise_for_status( api_client.post( "networks/", json={"name": NETWORK_NAME, "edge_table": EDGE_TABLE_NAME}, ) ) print("Network created.") print("Synchronization finished.")