def update_chain(): chain = request.form["chain"] publisher = request.form["publisher"] session = FuturesSession() for (id, address) in randomized_nodes(): session.put(address + "/chain", data={ "chain": chain, "publisher": publisher }); return success("PUBLISHING updated chain")
def update_transactions(self, budget_id, transactions): """ Updates multiple transactions. Pass transactions as a dictionary with transaction id's for keys, and the transaction's update for values. For example, updating the payee for multiple transactions: { 'transaction-id-1': {'payee_id': 'new-payee-id'}, 'transaction-id-2': {'payee_id': 'new-payee-id'}, 'etc' } """ fs = FuturesSession() futures = [] for i, tx_id in enumerate(transactions): path = '/budgets/{}/transactions/{}'.format(budget_id, tx_id) url = '{base_url}{path}'.format(base_url=self.__base_url, path=path) futures.append( fs.put(url, json={'transaction': transactions[tx_id]}, headers=self._headers())) while len(futures) > 0: print('\r{} remaining...'.format(len(futures)), end='') future = futures.pop(0) r = future.result() r.raise_for_status() print('\rNumber of transactions updated: {}'.format(len(transactions)), end='')
def replace_fixversion(issue, fv, search_url="http://cavcops01:9081/rest/api/2/issue/", username="******", password="******"): session = FuturesSession() headers = {"content-type": "application/json"} status = 1 # format required for jira API data_dict = {"fields": {"fixVersions": [{"name": fv}]}} data_json = json.dumps(data_dict) log(f"Attempting to update {issue['key']} to fixVersion {fv}") update_future = session.put(search_url + issue["key"], data=data_json, auth=(username, password), headers=headers) update_result = update_future.result() if (update_result.status_code < 300): log(f"Successfully updated {issue['key']} to fixVersion {fv}") status = 0 else: log(f"Warning: Could not update {issue['key']} to fixVersion {fv}") log(f"\tStatus Code: {update_result.status_code}") log(f"\tError: {update_result.text}") status = 1 return status
def set(self, obj, id, payload, action='', isasync=False): """ Function set Set an object by id @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @param action: specific action of an object ('power'...) @param payload: the dict of the payload @param isasync: should this request be async, if true use return.result() to get the response @return RETURN: the server response """ self.url = '{}{}/{}'.format(self.base_url, obj, id) self.method = 'PUT' if action: self.url += '/{}'.format(action) self.payload = json.dumps(payload) if isasync: session = FuturesSession() return session.put(url=self.url, auth=self.auth, headers=self.headers, data=self.payload, cert=self.ca_cert) else: self.resp = requests.put(url=self.url, auth=self.auth, headers=self.headers, data=self.payload, cert=self.ca_cert) if self.__process_resp__(obj): return self.res return False
def _post_hook_server(sender, instance, created, **kwargs): from Sensors import serializers requests = FuturesSession(max_workers=10) put_url = 'http://{url}/{type}/{name}/{id}/' post_url = 'http://{url}/{type}/{name}/' url = str.format(post_url if created else put_url, url=instance.peer.address, type='sensors', name=instance.__class__.__name__.lower(), id=instance.id) context = create_request_context() Serializer = getattr(serializers, instance.__class__.__name__ + 'Serializer') serializer_instance = Serializer(instance, context=context) if created: response = requests.post( url, json.dumps(serializer_instance.data), headers={'Content-Type': 'application/json'}) else: response = requests.put( url, json.dumps(serializer_instance.data), headers={'Content-Type': 'application/json'})
class Connection: ''' Connects to league client and communicates with it ''' def __init__(self): self.kwargs = None self.url = None self.session = FuturesSession() def get_connection(self, settings): ''' Parses connection url and port from lockfile ''' raise NotImplementedError('Please implement this method') def get(self, url, *args, **kwargs): ''' Wrapper around requests get method ''' return requests.get('{}{}'.format(self.url, url), *args, **kwargs, **self.kwargs) def post(self, url, *args, **kwargs): ''' Wrapper around requests post method ''' return requests.post('{}{}'.format(self.url, url), *args, **kwargs, **self.kwargs) def patch(self, url, *args, **kwargs): ''' Wrapper around requests patch method ''' return requests.patch('{}{}'.format(self.url, url), *args, **kwargs, **self.kwargs) def put(self, url, *args, **kwargs): ''' Wrapper around requests put method ''' return requests.put('{}{}'.format(self.url, url), *args, **kwargs, **self.kwargs) def delete(self, url, *args, **kwargs): ''' Wrapper around requests delete method ''' return requests.delete('{}{}'.format(self.url, url), *args, **kwargs, **self.kwargs) def async_get(self, url, *args, **kwargs): ''' Wrapper around requests get method ''' return self.session.get('{}{}'.format(self.url, url), *args, **kwargs, **self.kwargs) def async_post(self, url, *args, **kwargs): ''' Wrapper around requests post method ''' return self.session.post('{}{}'.format(self.url, url), *args, **kwargs, **self.kwargs) def async_patch(self, url, *args, **kwargs): ''' Wrapper around requests patch method ''' return self.session.patch('{}{}'.format(self.url, url), *args, **kwargs, **self.kwargs) def async_put(self, url, *args, **kwargs): ''' Wrapper around requests put method ''' return self.session.put('{}{}'.format(self.url, url), *args, **kwargs, **self.kwargs) def async_delete(self, url, *args, **kwargs): ''' Wrapper around requests delete method ''' return self.session.delete('{}{}'.format(self.url, url), *args, **kwargs, **self.kwargs)
def get_api_call_future(api_call: ApiCall): if api_call.method: session = FuturesSession() if api_call.method == 'GET': return session.get(url=api_call.url) elif api_call.method == 'POST': return session.post(url=api_call.url, data=api_call.body) elif api_call.method == 'PUT': return session.put(url=api_call.url, data=api_call.body) elif api_call.method == 'DELETE': return session.delete(url=api_call.url) else: raise ValueError('Invalid method type: {}'.format(api_call.method)) else: raise ValueError('No API method defined')
def __upload(self, payload): """Asyncronously upload a datapackage to Open Spending. """ # Prepare session = FuturesSession() responses = [] futures = [] files = [] # Start uploading for path, metadata in payload['filedata'].items(): fullpath = os.path.join(self.path, path) headers = { 'Content-Length': metadata['length'], 'Content-MD5': metadata['md5'], } file = io.open(fullpath, mode='rb') files.append(file) future = session.put( metadata['upload_url'], data=file, headers=headers, params=metadata['upload_query'], background_callback=self.__notify) futures.append(future) # Wait uploading for future in futures: exception = future.exception() if exception: raise exception response = future.result() responses.append(response) # Raise if errors for response in responses: if response.status_code != 200: url = self.__clean_url(response.url) message = ( 'Something went wrong with "%s" file.\n\n' 'Here is response we\'ve received:\n\n%s' % (url, response.text)) raise RuntimeError(message) # Close files for file in files: file.close()
def monitor(): command_sub = r.pubsub() command_sub.subscribe('http-commands') #print("Activar x coordinator...") #try: # serialConnection = serial.Serial( SERIAL_PORT, 9600,timeout=0.15) # xbee = ZigBee(serialConnection) # print "Conexión xbee serial...OK" #except: # logging.warning('Error serial/xbee') # print "Error serial/xbee" print('Iniciar ciclo') session = FuturesSession(max_workers=20) while True: message = command_sub.get_message() if (message and message['type'] == 'message'): #print(message['data']) message_in = json.loads(message['data']) device_name = message_in['device_name'] if message_in['type'] == 'get': #print(message_in) try: url_initial = 'http://' if (message_in['address'][:4] == "http"): url_initial = '' req = session.get( url_initial + message_in['address'] + '', #data = message_in['payload'], params=message_in['pars'], background_callback=process_response, timeout=4) except Exception as ex: print('Error http request get') print(format(ex)) if message_in['type'] == 'put': #try: #print('http://'+message_in['address']) #print(message_in['payload']) try: req = session.put('http://' + message_in['address'], data=message_in['payload'], background_callback=process_response, timeout=2) except: print('Error http request put')
def __upload(self, payload): """Asyncronously upload a datapackage to Open Spending. """ # Prepare session = FuturesSession() responses = [] futures = [] files = [] # Start uploading for path, metadata in payload['filedata'].items(): fullpath = os.path.join(self.path, path) headers = { 'Content-Length': metadata['length'], 'Content-MD5': metadata['md5'], } file = io.open(fullpath, mode='rb') files.append(file) future = session.put(metadata['upload_url'], data=file, headers=headers, params=metadata['upload_query'], background_callback=self.__notify) futures.append(future) # Wait uploading for future in futures: exception = future.exception() if exception: raise exception response = future.result() responses.append(response) # Raise if errors for response in responses: if response.status_code != 200: url = self.__clean_url(response.url) message = ('Something went wrong with "%s" file.\n\n' 'Here is response we\'ve received:\n\n%s' % (url, response.text)) raise RuntimeError(message) # Close files for file in files: file.close()
title='New Snipe!', description=f'Succuessfully sniped name `{wantedName}`!', color=0x72FF33) webhook.send() print(f'{swiftOutput}Sent message in #Snipes!') except: print(f'{swiftOutput}Seems like an invalid webhook ¯\_(ツ)_/¯') try: files = { 'model': 'slim', 'file': ('Skin.png', open('Skin.png', 'rb')) } response = session.put('https://api.mojang.com/user/profile/' + UUIDs[int(i / 20)] + '/skin', headers=({ "Authorization": Tokens[int((i + 1) / 20)] }), files=files) response = response.result() print( f'{swiftOutput}Attempted to change skin | Status code: {response.status_code}' ) except: print( f'{swiftOutput}Couldnt find a skin file, so not uploading skin' ) print( f'{swiftOutput}If you want to always upload a custom skin at snipe, create a file named Skin.png' )
class HTTPDriver(BaseDriver): """HTTPDriver The :class:`HTTPDriver` class reads SBP messages from an HTTP service for a device and writes out to a stream. This driver is like a file-handle with read and writes over two separately HTTP connections, but can also be enabled and disabled by its consumer. Parameters ---------- device_uid : uid Device unique id url : str HTTP endpoint retries : tuple Configure connect and read retry count. Defaults to (MAX_CONNECT_RETRIES, MAX_READ_RETRIES). timeout : tuple Configure connect and read timeouts. Defaults to (DEFAULT_CONNECT_TIMEOUT, DEFAULT_READ_TIMEOUT). """ def __init__( self, device_uid=None, url="https://broker.staging.skylark.swiftnav.com", retries=DEFAULT_RETRIES, timeout=DEFAULT_TIMEOUT, ): self._retry = Retry(connect=DEFAULT_RETRIES[0], read=DEFAULT_RETRIES[1], redirect=MAX_REDIRECTS, status_forcelist=[500], backoff_factor=DEFAULT_BACKOFF_FACTOR) self.url = url self.read_session = requests.Session() self.read_session.mount( "http://", HTTPAdapter(pool_connections=DEFAULT_POOLSIZE, pool_maxsize=DEFAULT_POOLSIZE, pool_block=DEFAULT_POOLBLOCK, max_retries=self._retry)) self.read_session.mount( "https://", HTTPAdapter(pool_connections=DEFAULT_POOLSIZE, pool_maxsize=DEFAULT_POOLSIZE, pool_block=DEFAULT_POOLBLOCK, max_retries=self._retry)) self.write_session = None self.device_uid = device_uid self.timeout = timeout self.read_response = None self.write_response = None self.source = None def flush(self): """File-flush wrapper (noop). """ pass def close(self): """File-handle close wrapper (noop). """ try: self.read_close() self.write_close() except: pass @property def write_ok(self): """ Are we connected for writes? """ # Note that self.write_response is either None or a Response # object, which cast to False for 4xx and 5xx HTTP codes. return bool(self.write_response) def connect_write(self, source, whitelist, device_uid=None, pragma=None): """Initialize a streaming write HTTP response. Manually connects the underlying file-handle. In the event of a network disconnection, use to manually reinitiate an HTTP session. Parameters ---------- source : sbp.client.handler.Handler Iterable source of SBP messages. whitelist : [int] Whitelist of messages to write """ header_device_uid = device_uid or self.device_uid headers = { 'Device-Uid': header_device_uid, 'Content-Type': BROKER_SBP_TYPE, 'Pragma': pragma } if not pragma: del headers['Pragma'] try: self.executor = ThreadPoolExecutor(max_workers=DEFAULT_POOLSIZE) self.write_session = FuturesSession(executor=self.executor) self.write_session.mount( "http://", HTTPAdapter(pool_connections=DEFAULT_POOLSIZE, pool_maxsize=DEFAULT_POOLSIZE, pool_block=DEFAULT_POOLBLOCK, max_retries=self._retry)) self.write_session.mount( "https://", HTTPAdapter(pool_connections=DEFAULT_POOLSIZE, pool_maxsize=DEFAULT_POOLSIZE, pool_block=DEFAULT_POOLBLOCK, max_retries=self._retry)) self.source = source.filter(whitelist) gen = (msg.pack() for msg, _ in self.source) self.write_session.put(self.url, data=gen, headers=headers) self.write_response = True except requests.exceptions.ConnectionError: msg = "Client connection error to %s with [PUT] headers %s" \ % (self.url, headers) warnings.warn(msg) except requests.exceptions.ConnectTimeout: msg = "Client connection timeout to %s with [PUT] headers %s" \ % (self.url, headers) warnings.warn(msg) except requests.exceptions.RetryError: msg = "Client retry error to %s with [PUT] headers %s" \ % (self.url, headers) warnings.warn(msg) except requests.exceptions.ReadTimeout: msg = "Client read timeout to %s with [PUT] headers %s" \ % (self.url, headers) warnings.warn(msg) return self.write_ok def write(self, data): """Write wrapper (noop). Actual stream is initiated by the write connection. Parameters ---------- data : object Data to write. """ pass def write_close(self): """File-handle close wrapper (noop). """ try: self.write_session.close() self.executor.shutdown(wait=False) self.source.breakiter() self.source = None self.executor = None self.write_session = None except: pass @property def read_ok(self): """ Are we connected for reads? """ return bool(self.read_response) def connect_read(self, device_uid=None, pragma=None): """Initialize a streaming read/write HTTP response. Manually connects the underlying file-handle. In the event of a network disconnection, use to manually reinitiate an HTTP session. """ header_device_uid = device_uid or self.device_uid headers = { 'Device-Uid': header_device_uid, 'Accept': BROKER_SBP_TYPE, 'Pragma': pragma } if not pragma: del headers['Pragma'] try: self.read_response = self.read_session.get(self.url, stream=True, headers=headers, timeout=self.timeout) except requests.exceptions.ConnectionError: msg = "Client connection error to %s with [GET] headers %s" \ % (self.url, headers) warnings.warn(msg) except requests.exceptions.ConnectTimeout: msg = "Client connection timeout to %s with [GET] headers %s" \ % (self.url, headers) warnings.warn(msg) except requests.exceptions.RetryError: msg = "Client retry error to %s with [GET] headers %s" \ % (self.url, headers) warnings.warn(msg) except requests.exceptions.ReadTimeout: msg = "Client read timeout to %s with [GET] headers %s" \ % (self.url, headers) warnings.warn(msg) return self.read_ok def read(self, size): """Read wrapper. If the client connection is closed or some other exception is thrown, raises an IOError. Parameters ---------- size : int Size to read (in bytes). Returns ---------- bytearray, or None """ if self.read_response is None or not self.device_uid: raise ValueError("Invalid/insufficient HTTP request parameters!") elif not self.read_ok or self.read_response.raw.closed: raise IOError("HTTP read closed?!") try: return self.read_response.raw.read(size) except: raise IOError("HTTP read error!") def read_close(self): """File-handle close wrapper (noop). """ try: self.read_response.close() self.read_response = None except: pass
class HTTPDriver(BaseDriver): """HTTPDriver The :class:`HTTPDriver` class reads SBP messages from an HTTP service for a device and writes out to a stream. This driver is like a file-handle with read and writes over two separately HTTP connections, but can also be enabled and disabled by its consumer. Parameters ---------- device_uid : uid Device unique id url : str HTTP endpoint retries : tuple Configure connect and read retry count. Defaults to (MAX_CONNECT_RETRIES, MAX_READ_RETRIES). timeout : tuple Configure connect and read timeouts. Defaults to (DEFAULT_CONNECT_TIMEOUT, DEFAULT_READ_TIMEOUT). """ def __init__(self, device_uid=None, url="https://broker.staging.skylark.swiftnav.com", retries=DEFAULT_RETRIES, timeout=DEFAULT_TIMEOUT,): self._retry = Retry(connect=DEFAULT_RETRIES[0], read=DEFAULT_RETRIES[1], redirect=MAX_REDIRECTS, status_forcelist=[500], backoff_factor=DEFAULT_BACKOFF_FACTOR) self.url = url self.read_session = requests.Session() self.read_session.mount("http://", HTTPAdapter(pool_connections=DEFAULT_POOLSIZE, pool_maxsize=DEFAULT_POOLSIZE, pool_block=DEFAULT_POOLBLOCK, max_retries=self._retry)) self.read_session.mount("https://", HTTPAdapter(pool_connections=DEFAULT_POOLSIZE, pool_maxsize=DEFAULT_POOLSIZE, pool_block=DEFAULT_POOLBLOCK, max_retries=self._retry)) self.write_session = None self.device_uid = device_uid self.timeout = timeout self.read_response = None self.write_response = None self.source = None def flush(self): """File-flush wrapper (noop). """ pass def close(self): """File-handle close wrapper (noop). """ try: self.read_close() self.write_close() except: pass @property def write_ok(self): """ Are we connected for writes? """ # Note that self.write_response is either None or a Response # object, which cast to False for 4xx and 5xx HTTP codes. return bool(self.write_response) def connect_write(self, source, whitelist, device_uid=None, pragma=None): """Initialize a streaming write HTTP response. Manually connects the underlying file-handle. In the event of a network disconnection, use to manually reinitiate an HTTP session. Parameters ---------- source : sbp.client.handler.Handler Iterable source of SBP messages. whitelist : [int] Whitelist of messages to write """ header_device_uid = device_uid or self.device_uid headers = {'Device-Uid': header_device_uid, 'Content-Type': BROKER_SBP_TYPE, 'Pragma': pragma} if not pragma: del headers['Pragma'] try: self.executor = ThreadPoolExecutor(max_workers=DEFAULT_POOLSIZE) self.write_session = FuturesSession(executor=self.executor) self.write_session.mount("http://", HTTPAdapter(pool_connections=DEFAULT_POOLSIZE, pool_maxsize=DEFAULT_POOLSIZE, pool_block=DEFAULT_POOLBLOCK, max_retries=self._retry)) self.write_session.mount("https://", HTTPAdapter(pool_connections=DEFAULT_POOLSIZE, pool_maxsize=DEFAULT_POOLSIZE, pool_block=DEFAULT_POOLBLOCK, max_retries=self._retry)) self.source = source.filter(whitelist) gen = (msg.pack() for msg, _ in self.source) self.write_session.put(self.url, data=gen, headers=headers) self.write_response = True except requests.exceptions.ConnectionError as err: msg = "Client connection error to %s with [PUT] headers %s: msg=%s" \ % (self.url, headers, err.message) warnings.warn(msg) except requests.exceptions.ConnectTimeout as err: msg = "Client connection timeout to %s with [PUT] headers %s: msg=%s" \ % (self.url, headers, err.message) warnings.warn(msg) except requests.exceptions.RetryError: msg = "Client retry error to %s with [PUT] headers %s: msg=%s" \ % (self.url, headers, err.message) warnings.warn(msg) except requests.exceptions.ReadTimeout: msg = "Client read timeout to %s with [PUT] headers %s: msg=%s" \ % (self.url, headers, err.message) warnings.warn(msg) return self.write_ok def write(self, data): """Write wrapper (noop). Actual stream is initiated by the write connection. Parameters ---------- data : object Data to write. """ pass def write_close(self): """File-handle close wrapper (noop). """ try: self.write_session.close() self.executor.shutdown(wait=False) self.source.breakiter() self.source = None self.executor = None self.write_session = None except: pass @property def read_ok(self): """ Are we connected for reads? """ return bool(self.read_response) def connect_read(self, device_uid=None, pragma=None): """Initialize a streaming read/write HTTP response. Manually connects the underlying file-handle. In the event of a network disconnection, use to manually reinitiate an HTTP session. """ header_device_uid = device_uid or self.device_uid headers = {'Device-Uid': header_device_uid, 'Accept': BROKER_SBP_TYPE, 'Pragma': pragma} if not pragma: del headers['Pragma'] try: self.read_response = self.read_session.get(self.url, stream=True, headers=headers, timeout=self.timeout) except requests.exceptions.ConnectionError as err: msg = "Client connection error to %s with [GET] headers %s: msg=%s" \ % (self.url, headers, err.message) warnings.warn(msg) except requests.exceptions.ConnectTimeout as err: msg = "Client connection timeout to %s with [GET] headers %s: msg=%s" \ % (self.url, headers, err.message) warnings.warn(msg) except requests.exceptions.RetryError: msg = "Client retry error to %s with [GET] headers %s: msg=%s" \ % (self.url, headers, err.message) warnings.warn(msg) except requests.exceptions.ReadTimeout: msg = "Client read timeout to %s with [GET] headers %s: msg=%s" \ % (self.url, headers, err.message) warnings.warn(msg) return self.read_ok def read(self, size): """Read wrapper. If the client connection is closed or some other exception is thrown, raises an IOError. Parameters ---------- size : int Size to read (in bytes). Returns ---------- bytearray, or None """ if self.read_response is None or not self.device_uid: raise ValueError("Invalid/insufficient HTTP request parameters!") elif not self.read_ok or self.read_response.raw.closed: raise IOError("HTTP read closed?!") try: return self.read_response.raw.read(size) except: raise IOError("HTTP read error!") def read_close(self): """File-handle close wrapper (noop). """ try: self.read_response.close() self.read_response = None except: pass
class Controller: """Interact with a UniFi controller. Uses the JSON interface on port 8443 (HTTPS) to communicate with a UniFi controller. Operations will raise unifi.controller.APIError on obvious problems (such as login failure), but many errors (such as disconnecting a nonexistant client) will go unreported. >>> from unifi.controller import Controller >>> c = Controller('192.168.1.99', 'admin', 'p4ssw0rd') >>> for ap in c.get_aps(): ... print 'AP named %s with MAC %s' % (ap.get('name'), ap['mac']) ... AP named Study with MAC dc:9f:db:1a:59:07 AP named Living Room with MAC dc:9f:db:1a:59:08 AP named Garage with MAC dc:9f:db:1a:59:0b """ def __init__(self, host, username, password, port=8443, version='v5', site_id='default', ssl_verify=True, sync=True): """ :param host: the address of the controller host; IP or name :param username: the username to log in with :param password: the password to log in with :param port: the port of the controller host :param version: the base version of the controller API [v4|v5] :param site_id: the site ID to connect to :param ssl_verify: Verify the controllers SSL certificate, can also be "path/to/custom_cert.pem" :params async: use requests-futures async session """ if float(version[1:]) < 4: raise APIError("%s controllers no longer supported" % version) self.log = logging.getLogger(__name__) self.host = host self.port = port self.version = version self.username = username self.password = password self.site_id = site_id self.url = 'https://' + host + ':' + str(port) + '/' self.ssl_verify = ssl_verify if ssl_verify is False: warnings.simplefilter("default", category=requests.packages.urllib3. exceptions.InsecureRequestWarning) if sync: self.session = requests.Session() else: from requests_futures.sessions import FuturesSession self.session = FuturesSession() self.session.verify = ssl_verify self.log.debug('Controller for %s', self.url) self._login() @staticmethod def _jsondec(data): obj = json.loads(data) if 'meta' in obj: if obj['meta']['rc'] != 'ok': raise APIError(obj['meta']['msg']) if 'data' in obj: return obj['data'] return obj def _api_url(self): return self.url + 'api/s/' + self.site_id + '/' @retry_login def _read(self, url, params=None): # Try block to handle the unifi server being offline. res = self.session.get(url, params=params) return self._jsondec(res.text) def _api_read(self, url, params=None): return self._read(self._api_url() + url, params) @retry_login def _write(self, url, params=None): res = self.session.post(url, json=params) return self._jsondec(res.text) def _api_write(self, url, params=None): return self._write(self._api_url() + url, params) @retry_login def _update(self, url, params=None): res = self.session.put(url, json=params) return self._jsondec(res.text) def _api_update(self, url, params=None): return self._update(self._api_url() + url, params) def _login(self): self.log.debug('login() as %s', self.username) # XXX Why doesn't passing in the dict work? params = {'username': self.username, 'password': self.password} login_url = self.url + 'api/login' res = self.session.post(login_url, json=params) if res.status_code != 200: raise APIError("Login failed - status code: %i" % res.status_code) def _logout(self): self.log.debug('logout()') self._api_write('logout') def switch_site(self, name): """ Switch to another site :param name: Site Name :return: True or APIError """ for site in self.get_sites(): if site['desc'] == name: self.site_id = site['name'] return True raise APIError("No site %s found" % name) def get_alerts(self): """Return a list of all Alerts.""" return self._api_write('stat/alarm') def get_alerts_unarchived(self): """Return a list of Alerts unarchived.""" return self._api_write('stat/alarm', params={'archived': False}) def get_statistics_last_24h(self): """Returns statistical data of the last 24h""" return self.get_statistics_24h(time()) def get_statistics_24h(self, endtime): """Return statistical data last 24h from time""" params = { 'attrs': ["bytes", "num_sta", "time"], 'start': int(endtime - 86400) * 1000, 'end': int(endtime - 3600) * 1000 } return self._write(self._api_url() + 'stat/report/hourly.site', params) def get_events(self): """Return a list of all Events.""" return self._api_read('stat/event') def get_aps(self): """Return a list of all APs, with significant information about each. """ # Set test to 0 instead of NULL params = {'_depth': 2, 'test': 0} return self._api_read('stat/device', params) def get_client(self, mac): """Get details about a specific client""" # stat/user/<mac> works better than stat/sta/<mac> # stat/sta seems to be only active clients # stat/user includes known but offline clients return self._api_read('stat/user/' + mac)[0] def get_clients(self): """Return a list of all active clients, with significant information about each. """ return self._api_read('stat/sta') def get_users(self): """Return a list of all known clients, with significant information about each. """ return self._api_read('list/user') def get_user_groups(self): """Return a list of user groups with its rate limiting settings.""" return self._api_read('list/usergroup') def get_sysinfo(self): """Return basic system informations.""" return self._api_read('stat/sysinfo') def get_healthinfo(self): """Return health information.""" return self._api_read('stat/health') def get_sites(self): """Return a list of all sites, with their UID and description""" return self._read(self.url + 'api/self/sites') def get_wlan_conf(self): """Return a list of configured WLANs with their configuration parameters. """ return self._api_read('list/wlanconf') def _run_command(self, command, params=None, mgr='stamgr'): params = params or {} self.log.debug('_run_command(%s)', command) params.update({'cmd': command}) return self._write(self._api_url() + 'cmd/' + mgr, params=params) def _mac_cmd(self, target_mac, command, mgr='stamgr', params=None): params = params or {} self.log.debug('_mac_cmd(%s, %s)', target_mac, command) params['mac'] = target_mac return self._run_command(command, params, mgr) def create_site(self, desc='desc'): """Create a new site. :param desc: Name of the site to be created. """ return self._run_command('add-site', params={"desc": desc}, mgr='sitemgr') def block_client(self, mac): """Add a client to the block list. :param mac: the MAC address of the client to block. """ return self._mac_cmd(mac, 'block-sta') def unblock_client(self, mac): """Remove a client from the block list. :param mac: the MAC address of the client to unblock. """ return self._mac_cmd(mac, 'unblock-sta') def disconnect_client(self, mac): """Disconnect a client. Disconnects a client, forcing them to reassociate. Useful when the connection is of bad quality to force a rescan. :param mac: the MAC address of the client to disconnect. """ return self._mac_cmd(mac, 'kick-sta') def restart_ap(self, mac): """Restart an access point (by MAC). :param mac: the MAC address of the AP to restart. """ return self._mac_cmd(mac, 'restart', 'devmgr') def restart_ap_name(self, name): """Restart an access point (by name). :param name: the name address of the AP to restart. """ if not name: raise APIError('%s is not a valid name' % str(name)) res = None for ap in self.get_aps(): if ap.get('state', 0) == 1 and ap.get('name', None) == name: res = self.restart_ap(ap['mac']) break return res def archive_all_alerts(self): """Archive all Alerts""" return self._run_command('archive-all-alarms', mgr='evtmgr') def create_backup(self): """Ask controller to create a backup archive file ..warning: This process puts significant load on the controller and may render it partially unresponsive for other requests. :return: URL path to backup file """ res = self._run_command('backup', mgr='system') return res[0]['url'] def get_backup(self, download_path=None, target_file='unifi-backup.unf'): """ :param download_path: path to backup; if None is given one will be created :param target_file: Filename or full path to download the backup archive to, should have .unf extension for restore. """ if not download_path: download_path = self.create_backup() res = self.session.get(self.url + download_path, stream=True) with open(target_file, 'wb') as _backfh: return shutil.copyfileobj(res.raw, _backfh) def authorize_guest(self, guest_mac, minutes, up_bandwidth=None, down_bandwidth=None, byte_quota=None, ap_mac=None): """ Authorize a guest based on his MAC address. :param guest_mac: the guest MAC address: 'aa:bb:cc:dd:ee:ff' :param minutes: duration of the authorization in minutes :param up_bandwidth: up speed allowed in kbps :param down_bandwidth: down speed allowed in kbps :param byte_quota: quantity of bytes allowed in MB :param ap_mac: access point MAC address """ cmd = 'authorize-guest' params = {'mac': guest_mac, 'minutes': minutes} if up_bandwidth: params['up'] = up_bandwidth if down_bandwidth: params['down'] = down_bandwidth if byte_quota: params['bytes'] = byte_quota if ap_mac: params['ap_mac'] = ap_mac return self._run_command(cmd, params=params) def unauthorize_guest(self, guest_mac): """ Unauthorize a guest based on his MAC address. :param guest_mac: the guest MAC address: 'aa:bb:cc:dd:ee:ff' """ cmd = 'unauthorize-guest' params = {'mac': guest_mac} return self._run_command(cmd, params=params) def get_firmware(self, cached=True, available=True, known=False, site=False): """ Return a list of available/cached firmware versions :param cached: Return cached firmwares :param available: Return available (and not cached) firmwares :param known: Return only firmwares for known devices :param site: Return only firmwares for on-site devices :return: List of firmware dicts """ res = [] if cached: res.extend(self._run_command('list-cached', mgr='firmware')) if available: res.extend(self._run_command('list-available', mgr='firmware')) if known: res = [fw for fw in res if fw['knownDevice']] if site: res = [fw for fw in res if fw['siteDevice']] return res def cache_firmware(self, version, device): """ Cache the firmware on the UniFi Controller .. warning:: Caching one device might very well cache others, as they're on shared platforms :param version: version to cache :param device: device model to cache (e.g. BZ2) :return: True/False """ return self._run_command('download', mgr='firmware', params={ 'device': device, 'version': version })[0]['result'] def remove_firmware(self, version, device): """ Remove cached firmware from the UniFi Controller .. warning:: Removing one device's firmware might very well remove others, as they're on shared platforms :param version: version to cache :param device: device model to cache (e.g. BZ2) :return: True/false """ return self._run_command('remove', mgr='firmware', params={ 'device': device, 'version': version })[0]['result'] def get_tag(self): """Get all tags and their member MACs""" return self._api_read('rest/tag') def upgrade_device(self, mac, version): """ Upgrade a device's firmware to verion :param mac: MAC of dev :param version: version to upgrade to """ self._mac_cmd(mac, 'upgrade', mgr='devmgr', params={'upgrade_to_firmware': version}) def provision(self, mac): """ Force provisioning of a device :param mac: MAC of device """ self._mac_cmd(mac, 'force-provision', mgr='devmgr') def get_setting(self, section=None, controller=False): """ Return settings for this site or controller :param controller: Return only controller-wide settings :param section: Only return this/these section(s) :return: {section:settings} """ res = {} settings = self._api_read('get/setting') if section and not isinstance(section, (list, tuple)): section = [section] for setting in settings: s_sect = setting['key'] if (controller and 'site_id' in setting) or \ (not controller and 'site_id' not in setting) or \ (section and s_sect not in section): continue for k in ('_id', 'site_id', 'key'): setting.pop(k, None) res[s_sect] = setting return res def update_setting(self, settings): """ Update settings :param settings: {section:{settings}} :return: resulting settings """ res = [] for sect, setting in settings.items(): res.extend(self._api_write('set/setting/' + sect, setting)) return res def update_user_group(self, group_id, down_kbps=-1, up_kbps=-1): """ Update user group bandwidth settings :param group_id: Group ID to modify :param down_kbps: New bandwidth in KBPS for download :param up_kbps: New bandwidth in KBPS for upload """ res = None groups = self.get_user_groups() for group in groups: if group["_id"] == group_id: # Apply setting change res = self._api_update( "rest/usergroup/{0}".format(group_id), { "qos_rate_max_down": down_kbps, "qos_rate_max_up": up_kbps, "name": group["name"], "_id": group_id, "site_id": self.site_id }) return res raise ValueError("Group ID {0} is not valid.".format(group_id)) def set_client_alias(self, mac, alias): """ Set the client alias. Set to "" to reset to default :param mac: The MAC of the client to rename :param alias: The alias to set """ client = self.get_client(mac)['_id'] return self._api_update('rest/user/' + client, {'name': alias})
class CayenneApiClient: def __init__(self, host): self.host = host self.auth = None self.session = FuturesSession(executor=ThreadPoolExecutor( max_workers=1)) def sendRequest(self, method, uri, body=None): if self.session is not None: headers = {} request_url = self.host + uri future = None self.session.headers['Content-Type'] = 'application/json' self.session.headers['Accept'] = 'application/json' if self.auth is not None: self.session.headers['Authorization'] = self.auth try: if method == 'GET': future = self.session.get(request_url) if method == 'POST': future = self.session.post(request_url, data=body) if method == 'PUT': future = self.session.put(request_url, data=body) if method == 'DELETE': future = self.session.delete(request_url) except Exception as ex: error('sendRequest exception: ' + str(ex)) return None try: response = future.result() except: return None return response exception("No data received") def getMessageBody(self, inviteCode): body = {'id': inviteCode} hardware = Hardware() if hardware.Serial and hardware.isRaspberryPi(): body['type'] = 'rpi' body['hardware_id'] = hardware.Serial else: hardware_id = hardware.getMac() if hardware_id: body['type'] = 'mac' body['hardware_id'] = hardware_id try: system_data = [] cayennemqtt.DataChannel.add(system_data, cayennemqtt.SYS_HARDWARE_MAKE, value=hardware.getManufacturer(), type='string', unit='utf8') cayennemqtt.DataChannel.add(system_data, cayennemqtt.SYS_HARDWARE_MODEL, value=hardware.getModel(), type='string', unit='utf8') config = Config(APP_SETTINGS) cayennemqtt.DataChannel.add(system_data, cayennemqtt.AGENT_VERSION, value=config.get( 'Agent', 'Version', __version__)) system_info = SystemInfo() capacity_data = system_info.getMemoryInfo((cayennemqtt.CAPACITY, )) capacity_data += system_info.getDiskInfo((cayennemqtt.CAPACITY, )) for item in capacity_data: system_data.append(item) body['properties'] = {} # body['properties']['pinmap'] = NativeGPIO().MAPPING if system_data: body['properties']['sysinfo'] = system_data except: exception('Error getting system info') return json.dumps(body) def authenticate(self, inviteCode): body = self.getMessageBody(inviteCode) url = '/things/key/authenticate' return self.sendRequest('POST', url, body) def activate(self, inviteCode): body = self.getMessageBody(inviteCode) url = '/things/key/activate' return self.sendRequest('POST', url, body) def getCredentials(self, content): if content is None: return None body = content.decode("utf-8") if body is None or body is "": return None return json.loads(body) def loginDevice(self, inviteCode): response = self.activate(inviteCode) if response and response.status_code == 200: return self.getCredentials(response.content) return None def getDataTypes(self): url = '/ui/datatypes' return self.sendRequest('GET', url)
place_id_iter = unique_place_id() activities = [] # Generate activities for each city for city in config.ROUTER_NAMES: router = common.OtpRouter(city) for i in range(0, config.ACTIVITY_NUM_PER_CITY): activities.append(common.make_activity(router)) # max workers set to 10, default is 2 session = FuturesSession(max_workers=10) # headers = {'Authorization': 'Token {}'.format(config.AUTH_TOKEN)} session.headers['Authorization'] = 'Token {}'.format(config.AUTH_TOKEN) futures = [] for activity in activities: start = datetime.now() from_id = next(place_id_iter) to_id = next(place_id_iter) url = config.LEADGEN_URL + 'activity/{}/{}/'.format(from_id, to_id) f = session.put(url, json=activity, background_callback=bg_cb) futures.append((f, start)) # wait for requests to complete index = 1 for f in futures: res = f[0].result() start = f[1] delta = res.end - start print(index, res.status_code, delta.microseconds / 1e3) index += 1
"order_id": str(order_id), "email": user_data['email'], "name": user_data['name'], "service_type": data["service_type"]}) print "sms block....", is_sms_block, type(is_sms_block) # SMS BLOCKING if not is_sms_block: sms.sms_order_placed.delay({ "order_id": str(order_id), "name": user_data['name']}) else: return jsonify({"status": 'failure', "error": "Failed to order"}), 403 # Assign hub for the order asyncronously by using its address try: arequests.put(app.config['API_SERVER']['private_dashboard'] + "/api/address/" + data["address_id"], data={'refresh_hub': 'true'}) except Exception, e: pass return jsonify(result) else: return jsonify({'status': 'failure', 'error': 'unauthorized access.'}), 403 def partner_get_order(customer_id=None, order_id=None): result = copy.deepcopy(session.get("partner", {})) if not ('partner' in result): return redirect("/partner") order = ordersDAO.get_order(order_id) if order.get("status", 1) == 5: return redirect("/partner/getreceipt/" + str(customer_id) + "/" + str(order_id)) else:
class BugcrowdClient(object): def __init__(self, api_token, **kwargs): """ Creates a Bugcrowd api client. """ self._api_token = api_token self.session = FuturesSession(max_workers=5) self.base_uri = 'https://api.bugcrowd.com/' self.session.headers.update({ 'Accept': 'application/vnd.bugcrowd.v3+json', 'Authorization': 'Token %s' % self._api_token, 'user-agent': 'Bugcrowd Python Client', }) def get_bounties(self): """ Returns bounties. """ resp = self.session.get(self.get_api_uri('bounties')).result() resp.raise_for_status() return resp.json()['bounties'] def get_submissions(self, bounty, **kwargs): """ Yields submissions for the given bounty or bounty uuid. By providing a params parameter submissions can be filtered as per https://docs.bugcrowd.com/v1.0/docs/submission . """ params = kwargs.get('params', None) submissions_uri = self.get_api_uri_for_bounty_submissions(bounty) submissions = [] step = 25 if params is None: params = {'sort': 'newest', 'offset': 0} initial_response = self.session.get( submissions_uri, params=params).result() initial_response.raise_for_status() data = initial_response.json() submissions += data['submissions'] total = data['meta']['count'] total_hits = data['meta']['total_hits'] for submission in submissions: yield submission if total < total_hits: async_fetches = [] for offset in range(step, total_hits, step): request_params = params.copy() request_params.update({'offset': offset}) async_fetches.append( self.session.get(submissions_uri, params=request_params)) for future_fetch in async_fetches: fetch = future_fetch.result() fetch.raise_for_status() data = fetch.json() for submission in data['submissions']: yield submission def get_comments_for_submission(self, submission): """ Yields comments for the given submission or submission uuid. """ comments_uri = self.get_api_uri_for_submission_comments(submission) resp = self.session.get(comments_uri).result() resp.raise_for_status() return resp.json() def get_api_uri(self, path): """ Returns the full api uri for the given path. """ return self.base_uri + url_quote(path) def get_api_uri_for_bounty_submissions(self, bounty): """ Returns the submissions uri for the provided bounty or bounty uuid. """ bounty_uuid = _get_uuid(bounty) return self.get_api_uri('bounties/%s/submissions' % bounty_uuid) def get_api_uri_for_submission(self, submission): """ Returns the uri for the given submission or submission uuid. """ submission_uuid = _get_uuid(submission) return self.get_api_uri('submissions/%s' % submission_uuid) def get_api_uri_for_submission_comments(self, submission): """ Returns the uri for comments on the given submission or submission uuid. """ return self.get_api_uri_for_submission(submission) + '/comments' def create_submission(self, bounty, submission_fields): """ Returns a future request creating a submission in the given bounty or bounty uuid. """ uri = self.get_api_uri_for_bounty_submissions(_get_uuid(bounty)) required_fields = {'title', 'submitted_at'} has_req_fields = required_fields & set(submission_fields.keys()) if len(has_req_fields) != 2: raise ValueError('The %s field is required' % (required_fields - has_req_fields)) submitted_at = submission_fields['submitted_at'] if hasattr(submitted_at, 'isoformat'): submission_fields = submission_fields.copy() submission_fields['submitted_at'] = submitted_at.isoformat() return self.session.post(uri, json={'submission': submission_fields}) def update_submission(self, submission, **kwargs): """ Returns a future request updating the given submission. """ uri = self.get_api_uri_for_submission(submission) fields = {} for key in ['title', 'vrt_id', 'custom_fields', 'bug_url']: val = kwargs.get(key, None) if val: fields[key] = val payload = {'submission': fields} return self.session.put(uri, json=payload) def comment_on_submission(self, submission, comment_text, comment_type='note'): """ Returns a future request commenting on the given submission. """ uri = self.get_api_uri_for_submission_comments(submission) payload = { 'comment': { 'body_markdown': comment_text, 'type': comment_type, } } return self.session.post(uri, json=payload) def transition_submission(self, submission, state, **kwargs): """ Returns a future request transition the given submission or submission uuid to a different state. """ uri = self.get_api_uri_for_submission(submission) + '/transition' payload = {'substate': state} duplicate_of = kwargs.get('duplicate_of', None) if duplicate_of: payload['duplicate_of'] = duplicate_of if state == 'duplicate' and duplicate_of is None: raise ValueError( 'The duplicate_of field is required when transitioning ' 'a submission to a duplicate status.') return self.session.post(uri, json=payload)
class Gateway: def __init__(self, gateway_url, config, log=None): self.gateway_url = gateway_url self.config = config self.log = log or logging.getLogger(__name__) self.headers = { 'Content-Type': 'application/json', 'Accept': 'application/json' } jwt = config.get('jwt') if jwt: self.set_jwt(jwt) self.session = Session() self.fsession = FuturesSession() def __del__(self): self.session.close() def bind(self, deviceId, endpointNum, clusterId): url = '/debug/device/{}/cmd/bind'.format(deviceId) params = { 'srcEndpoint': endpointNum, 'clusterId': clusterId, } self.put(url, data=params) def bindings(self, deviceId): url = '/debug/device/{}/cmd/bindings'.format(deviceId) self.put(url, data={}) def debugCmd(self, deviceId, cmd, params): url = '/debug/device/{}/cmd/{}'.format(deviceId, cmd) r = self.put(url, data=params) print('r.status_code =', r.status_code) print('r.text =', r.text) def device(self, name): r = self.get('/debug/device/' + name) if r is None: return return r.json() def devices(self): r = self.get('/debug/devices') if r is None: return # returns an array of objects. We just want the ids return [device['id'] for device in r.json()] def addThing(self, thing): r = self.post('/things', data=thing) if r is None: return return r def deleteThing(self, thingid): r = self.delete('/things/%s' % thingid) if r is None: return return r def newThingsWebsocket(self, cb=None): url = self.url('/new_things?jwt={}'.format(self.config.get('jwt'))).replace('http', 'ws', 1) return websocket_connect(url, on_message_callback=cb) def discoverAttr(self, deviceId, endpointNum, clusterId): url = '/debug/device/{}/cmd/discoverAttr'.format(deviceId) params = {} if endpointNum: params['endpoint'] = endpointNum if clusterId: params['clusterId'] = clusterId r = self.put(url, data=params) print('r.status_code =', r.status_code) print('r.text =', r.text) def get(self, path): while True: try: url = self.url(path) r = self.session.get(url, verify=False, headers=self.headers) except requests.exceptions.ConnectionError: self.log.error('Unable to connect to server: %s', url) return if r.status_code == 200: return r if r.status_code == 404: return if r.status_code != 401: self.log.error('GET failed: %s - %s', r.status_code, r.text) return # Unauthorized - need to get a valid JWT self.login() def login(self, email=None, password=None): while True: try: if email is None: email = input('Enter email: ') except EOFError: # Control-D print('') return try: if password is None: password = getpass.getpass(prompt='Enter password: '******'') return try: url = self.url('/login') r = self.session.post(url, verify=False, headers=self.headers, data=json.dumps({ 'email': email, 'password': password})) except requests.exceptions.ConnectionError: self.log.error('Unable to connect to server: %s', url) return if r.status_code == 200: jwt = r.json()['jwt'] self.set_jwt(jwt) return jwt self.log.error('Login failed: %s', r.text) def properties(self, id): url = '/things/{}/properties'.format(id) r = self.get(url) if r is None: return return r.json() def property(self, id_, propertyName, data=None, futures=False): url = '/things/{}/properties/{}'.format(id_, propertyName) if data is not None: r = self.put(url, data=data, futures=futures) else: r = self.get(url) if futures: return r if r is None: return return r.json() def thingWebsocket(self, cb=None): url = self.url('/things?jwt={}'.format(self.config.get('jwt'))).replace('http', 'ws', 1) return websocket_connect(url, on_message_callback=cb) def delete(self, path): while True: try: url = self.url(path) r = self.session.delete(url, verify=False, headers=self.headers) except session.exceptions.ConnectionError: self.log.error('Unable to connect to server: %s', url) return if r.status_code not in [ 200, 204 ]: self.log.error('DELETE failed: %s - %s', r.status_code, r.text) return r def post(self, path, data=None): while True: try: url = self.url(path) r = self.session.post(url, verify=False, headers=self.headers, data=json.dumps(data)) except requests.exceptions.ConnectionError: self.log.error('Unable to connect to server: %s', url) return if r.status_code not in [ 200, 201 ]: self.log.error('POST failed: %s - %s', r.status_code, r.text) return r def put(self, path, data=None, futures=False): if futures: try: url = self.url(path) f = self.fsession.put(url, verify=False, headers=self.headers, data=json.dumps(data)) except requests.exceptions.ConnectionError: self.log.error('Unable to connect to server: %s', url) return return f while True: try: url = self.url(path) r = self.session.put(url, verify=False, headers=self.headers, data=json.dumps(data)) except requests.exceptions.ConnectionError: self.log.error('Unable to connect to server: %s', url) return if r.status_code == 200: return r if r.status_code == 404: self.log.error('PUT %s failed: %s - %s', path, r.status_code, r.text) return if r.status_code != 401: self.log.error('PUT %s failed: %s - %s', path, r.status_code, r.text) return # Unauthorized - need to get a valid JWT self.login() def readAttr(self, deviceId, endpointNum, profileId, clusterId, attrIds): url = '/debug/device/{}/cmd/readAttr'.format(deviceId) params = { 'endpoint': endpointNum, 'profileId': profileId, 'clusterId': clusterId, 'attrId': attrIds } print('url =', url) print('params =', params) print('json.dumps(params) =', json.dumps(params)) r = self.put(url, data=params) print('r.status_code =', r.status_code) print('r.text =', r.text) def set_jwt(self, jwt): self.config.set('jwt', jwt) self.headers['Authorization'] = 'Bearer ' + jwt def thing(self, id): r = self.get('/things/' + id) if r is None: return return r.json() def things(self, info=False): r = self.get('/things') if r is None: return # returns an array of objects. We just want the names if info: return r.json() return [os.path.basename(thing['href']) for thing in r.json()] def url(self, path=''): return self.gateway_url + path def setAdapterConfig(self, port_start, num_things): ip = get_ip() data = { 'config': { 'pollInterval': 30, 'urls': ['http://{}:{}/'.format(ip, port) for port in range(port_start, port_start + num_things)], }, } self.put('/addons/thing-url-adapter/config', data=data) def clearAdapterConfig(self): data = { 'config': { 'pollInterval': 30, 'urls': [], }, } self.put('/addons/thing-url-adapter/config', data=data)
class FiscalDataPackage(DataPackage): """This class represents a fiscal data package. The class is a subclass of the :class:`datapackage.DataPackage` class. The constructor takes the same arguments as its parent class, except that the schema is "fiscal". :param target: The target is the full path to the fiscal datapackage JSON descriptor, but it can also be a dictionary representing the schema itself or a url pointing to a descriptor (for more information please refer to the documentation for the :class:`datapackage.DataPackage` class. :param user: a `gobble.user.user` object. """ def __init__(self, filepath, user=None, **kw): if not isfile(filepath): raise NotImplemented('%s is not a local path', filepath) super(FiscalDataPackage, self).__init__(filepath, schema='fiscal', **kw) self._check_file_formats() self._streams = [] self._session = FuturesSession() self._futures = [] self._responses = [] self.user = user self.name = self.descriptor.get('name') self.path = basename(filepath) self.filepath = filepath def validate(self, raise_error=True): """Validate a datapackage schema. :param raise_error: raise error on failure or not (default: True) :raise: :class:`ValidationError` if the schema is invalid :return True or a list of error messages (if `raise_error` is False). """ if raise_error: super(FiscalDataPackage, self).validate() else: try: super(FiscalDataPackage, self).validate() message = '%s (%s) is a valid fiscal datapackage descriptor' log.info(message, self, self.path) return [] except ValidationError: messages = [] for error in self.iter_errors(): messages.append(error.message) log.warn('%s ValidationError: %s', self, error.message) return messages def upload(self, publish=False): """Upload a fiscal datapackage to Open-Spending. It does this in 3 steps: * request upload urls for AWS S3 storage * upload all files to the owner's S3 bucket * insert the data into the Open-Spending datastore (PostgreSQL) By default, newly uploaded packages are kept private, but you can change that with the `publish` flag. Also note that if you upload the same fiscal data package again, the previous version will be overwritten. For now, the only valid datafile format is CSV. :param publish: toggle the datapackage to "published" after upload """ self.validate() log.info('Starting uploading process for %s', self) for s3_target in self._request_s3_upload(): self._push_to_s3(*s3_target) self._handle_promises() self._insert_into_datastore() while self.in_progress: sleep(POLL_PERIOD) if publish: self.toggle('public') return self.url @property def url(self): return join(settings.OS_URL, self.user.id + ':' + self.name) @property def in_progress(self): """Return true when the upload finished.""" query = dict(datapackage=self._descriptor_s3_url) answer = upload_status(params=query).json() args = self, answer['status'], answer['progress'], len(self) log.debug('%s is loading (%s) %s/%s', *args) return answer['status'] != 'done' def toggle(self, to_state): """Toggle public access to a fiscal datapackage Change the status of a fiscal data package from public to private or vice-versa. If something went wrong, whilst changing the status, you will get a :class:`upload.ToggleError`. :param to_state: the unique name of the datapackage :return: the new state of the package, i.e. "public" or "private" """ publish = True if to_state == 'public' else False package_id = self.user.id + ':' + self.name query = dict(jwt=self.user.token, id=package_id, publish=publish) answer = handle(toggle_publish(params=query)) if not answer['success']: message = 'Unable to toggle datapackage to %s' raise ToggleError(message, to_state) log.info('%s is now %s', package_id, to_state) return to_state def _check_file_formats(self): for resource in self: if resource.descriptor['mediatype'] != 'text/csv': message = 'Usupported format: %s, valid formats are %s' raise NotImplemented(message, resource.path, OS_DATA_FORMATS) @property def filedata(self): filedata = { resource.descriptor['path']: { 'name': resource.descriptor['name'], 'length': getsize(resource.local_data_path), 'md5': compute_hash(resource.local_data_path), 'type': resource.descriptor['mediatype'], } for resource in self } descriptor_file = { basename(self.filepath): { 'name': self.name, 'length': getsize(self.filepath), 'md5': compute_hash(self.filepath), 'type': 'application/octet-stream', } } filedata.update(descriptor_file) return { 'filedata': filedata, 'metadata': { 'owner': self.user.id, 'name': self.name } } def _get_header(self, path, content_type): filepath = join(self.base_path, path) return {'Content-Length': str(getsize(filepath)), 'Content-MD5': compute_hash(filepath), 'Content-Type': content_type} @property def _descriptor_s3_url(self): return join(settings.S3_BUCKET_URL, self.user.id, self.name, self.path) def _request_s3_upload(self): """Request AWS S3 upload urls for all files. """ response = request_upload(params=dict(jwt=self.user.token), json=self.filedata) files = handle(response)['filedata'] for path, info in files.items(): message = '%s is ready for upload to %s' log.info(message, path, info['upload_url']) query = {k: v[0] for k, v in info['upload_query'].items()} yield info['upload_url'], path, query, self._get_header(path, info['type']) def _push_to_s3(self, url, path, query, headers): """Send data files for upload to the S3 bucket. """ log.debug('Started uploading %s to %s', path, url) log.debug('Headers: %s', headers) log.debug('Query parameters: %s', query) absolute_path = join(self.base_path, path) stream = io.open(absolute_path, mode='rb') future = self._session.put(url, headers=headers, data=stream, params=query, background_callback=self._s3_callback) self._streams.append(stream) self._futures.append(future) @staticmethod def _s3_callback(_, response): handle(response) log.info('Successful S3 upload: %s', response.url) def _handle_promises(self): """Collect all promises from S3 uploads. """ for stream, future in zip(self._streams, self._futures): exception = future.exception() if exception: raise exception response = future.result() if response.status_code != 200: message = 'Something went wrong uploading %s to S3: %s' log.error(message, response.url, response.text) raise HTTPError(message) self._responses.append(response) stream.close() def _insert_into_datastore(self): """Transfer datafiles from S3 into the postgres datastore. :return: the url of the fiscal datapackage on Open-Spending """ query = dict(jwt=self.user.token, datapackage=self._descriptor_s3_url) response = upload_package(params=query) handle(response) log.info('Congratuations, %s was uploaded successfully!', self) log.info('You can find you fiscal datapackage here: %s', self.url) return self.url def __len__(self): return len(self.resources) def __repr__(self): return '<FiscalDataPackage [%s files]: %s>' % (len(self), self.name) def __str__(self): return self.name def __iter__(self): for resource in self.resources: yield resource def __getitem__(self, index): return self.resources[index]
class WaterlinkedGPS(): def __init__(self): rospy.loginfo('Waterlinked GPS object created...\n') ip = rospy.get_param("~ip", "192.168.2.94") port = rospy.get_param("~port", "80") """ Default TF Behaviour: If no datum point given, then waterlinked node sends utm->map transform referenced to the master GPS position and a utm->waterlinked transform referenced to the master GPS position and heading. Pose data is referenced to waterlinked frame. Datum TF Behaviour: Datum [latitude, longitude] overrides default behaviour. utm->map transform referenced to the datum point given, ENU alignment. utm->waterlinked transform referenced to the master GPS position and heading. Pose data is referenced to waterlinked frame. """ self._map_frame_id = rospy.get_param("~map_frame_id", "map") self._waterlinked_frame_id = rospy.get_param("~waterlinked_frame_id", "waterlinked") self._send_tf = rospy.get_param("~send_tf", False) self._datum = rospy.get_param("~datum", None) # if no datum specified self._master_gps_ns = rospy.get_param("~master_gps_ns", None) # None self._master_orientation_topic = rospy.get_param( "~master_imu_topic", None) self._tf_buffer = Buffer() self._tf_bcast = TransformBroadcaster() # The base URL is the one specified through: http://192.168.2.2:2770/waterlinked self._base_url = 'http://' + ip + ':' + port + '/api/v1' # self._base_url = 'http://192.168.2.94:80/api/v1' # The complete API can be found here: http://192.168.2.94/swagger/ # Divide the messages into a slow and a fast group. self._api_endpoints_slow = [ '/about', '/about/status', '/about/temperature', '/config/generic', '/config/receivers' ] self._api_endpoints_fast = [ '/external/orientation', '/position/acoustic/filtered', '/position/acoustic/raw', '/position/global', '/position/master' ] # Create lists of the full APIs (base URL + endpoint) self._urls_slow = [ self._base_url + api_endpoint for api_endpoint in self._api_endpoints_slow ] self._urls_fast = [ self._base_url + api_endpoint for api_endpoint in self._api_endpoints_fast ] # Specify the frequencies for the two groups self._rate_slow_hz = 0.25 self._rate_fast_hz = 4.0 # Print the URLs and their specified frequencies self.print_urls() # HTTP request session self._session = FuturesSession(max_workers=10) # If a master gps topic has been specified, then forward that to waterlinked for use if self._master_gps_ns is not None: self._gps_msg = { "cog": 0, "fix_quality": 1, "hdop": 0, "lat": 0.0, "lon": 0.0, "numsats": 11, "orientation": 0.0, "sog": 0.0 } self._gps_url = self._base_url + "/external/master" rospy.Subscriber(self._master_gps_ns + "/fix", NavSatFix, self._handle_master_gps) rospy.Subscriber(self._master_gps_ns + "/vel", TwistStamped, self._handle_master_vel) rospy.Timer(rospy.Duration.from_sec(1.0), self._forward_master_position) # Configure the slow and fast timer callbacks rospy.Timer(rospy.Duration.from_sec(1.0 / self._rate_slow_hz), self.slow_callback) rospy.Timer(rospy.Duration.from_sec(1.0 / self._rate_fast_hz), self.fast_callback) # Time logging variables self._show_loop_timing = False self._is_first_slow_loop = True self._is_first_fast_loop = True # Slow publishers self._pub_about = rospy.Publisher('waterlinked/about', About, queue_size=5) self._pub_about_status = rospy.Publisher('waterlinked/about/status', AboutStatus, queue_size=5) self._pub_about_temperature = rospy.Publisher( 'waterlinked/about/temperature', AboutTemperature, queue_size=5) self._pub_config_generic = rospy.Publisher( 'waterlinked/config/generic', ConfigGeneric, queue_size=5) self._pub_config_receivers = rospy.Publisher( 'waterlinked/config/receivers', ConfigReceivers, queue_size=5) # Fast publishers self._pub_external_orientation = rospy.Publisher( 'waterlinked/external/orientation', ExternalOrientation, queue_size=5) self._pub_position_acoustic_filtered = rospy.Publisher( 'waterlinked/position/acoustic/filtered', PositionAcousticFiltered, queue_size=5) self._pub_position_acoustic_raw = rospy.Publisher( 'waterlinked/position/acoustic/raw', PositionAcousticRaw, queue_size=5) self._pub_position_global = rospy.Publisher( 'waterlinked/position/global', PositionGlobal, queue_size=5) self._pub_position_master = rospy.Publisher( 'waterlinked/position/master', PositionMaster, queue_size=5) # Prepare sensor data for the robot_localization package self._pub_pos_with_covariance_stamped = rospy.Publisher( 'waterlinked/pose_with_cov_stamped', PoseWithCovarianceStamped, queue_size=5) # Enter infinite spinning rospy.spin() def connection_error(self): rospy.logerr_throttle( 10, "{} | Unable to connect to Waterlinked GPS on: {}".format( rospy.get_name(), self._base_url)) def print_urls(self): message = 'Waterlinked APIs to be requested (see https://demo.waterlinked.com/swagger/#/):\n' message += 'Slow (f = ' + str(self._rate_slow_hz) + ' Hz)\n' for url_str in self._urls_slow: message += '- ' + url_str + '\n' message += 'Fast (f = ' + str(self._rate_fast_hz) + ' Hz)\n' for url_str in self._urls_fast: message += '- ' + url_str + '\n' rospy.loginfo('{} | {}'.format(rospy.get_name(), message)) def _forward_master_position(self, event): """ If an external gps topic is subscribed, forward the latitude and longitude over to waterlinked.""" try: r = self._session.put(self._gps_url, json=self._gps_msg, timeout=2) r = r.result(10) except Exception as e: rospy.logerr_throttle( 10.0, "{} | {}".format(rospy.get_name(), e.message)) return if r.status_code != 200: rospy.logerr("Error setting master position: {} {}".format( r.status_code, r.text)) def _handle_master_gps(self, msg): """Fill in GPS information for message""" self._gps_msg["lat"] = msg.latitude self._gps_msg["lon"] = msg.longitude self._gps_msg["hdop"] = msg.position_covariance[0] def _handle_master_vel(self, msg): """Fill in GPS cog/sog for message""" self._gps_msg["sog"] = sqrt(msg.twist.linear.x**2 + msg.twist.linear.y**2) val = -1 * ( atan2(msg.twist.linear.y, msg.twist.linear.x) * 180.0 / pi - 90) val = 360 + val if val < 0 else val self._gps_msg["cog"] = val def slow_callback(self, event): """ Callback function that requests Waterlinked status and config settings at a low rate. """ # Request current time and use it for all messages tnow = rospy.Time.now() if self._is_first_slow_loop: self._is_first_slow_loop = False self.f_cum_slow = 0 self.n_slow = 0 self._slow_t0 = tnow.to_sec() else: f = 1 / (tnow.to_sec() - self._slow_t0) self.f_cum_slow += f self.n_slow += 1 f_avg = self.f_cum_slow / self.n_slow rospy.logdebug("slow loop (n = %d): f_avg = %.3f Hz" % (self.n_slow, f_avg)) # Initiate HTTP request to all URLs future_list = [ self._session.get(url, timeout=2.0) for url in self._urls_slow ] try: # waterlinked/about res_about = future_list[0].result() if res_about.ok: data = res_about.json() msg_about = About() msg_about.header.stamp = tnow msg_about.chipid = data['chipid'] msg_about.version = data['version'] self._pub_about.publish(msg_about) # waterlinked/about/status res_about_status = future_list[1].result() if res_about_status.ok: data = res_about_status.json() msg_about_status = AboutStatus() msg_about_status.header.stamp = tnow msg_about_status.gps = data['gps'] msg_about_status.imu = data['imu'] self._pub_about_status.publish(msg_about_status) # waterlinked/about/temperature res_about_temperature = future_list[2].result() if res_about_temperature.ok: data = res_about_temperature.json() msg_about_temperature = AboutTemperature() msg_about_temperature.header.stamp = tnow msg_about_temperature.board = data['board'] self._pub_about_temperature.publish(msg_about_temperature) # waterlinked/config/generic res_config_generic = future_list[3].result() if res_config_generic: data = res_config_generic.json() msg_config_generic = ConfigGeneric() msg_config_generic.header.stamp = tnow #msg_config_generic.carrier_frequency = data['carrier_frequency'] msg_config_generic.compass = data['compass'].encode( 'ascii', 'ignore') msg_config_generic.gps = data['gps'].encode('ascii', 'ignore') msg_config_generic.range_max_x = data['range_max_x'] msg_config_generic.range_max_y = data['range_max_y'] msg_config_generic.range_max_z = data['range_max_z'] msg_config_generic.range_min_x = data['range_min_x'] msg_config_generic.range_min_y = data['range_min_y'] msg_config_generic.static_lat = data['static_lat'] msg_config_generic.static_lon = data['static_lon'] msg_config_generic.static_orientation = data[ 'static_orientation'] #msg_config_generic.use_external_depth = data['use_external_depth'] self._pub_config_generic.publish(msg_config_generic) # waterlinked/config/receivers res_config_receivers = future_list[4].result() if res_config_receivers.ok: data = res_config_receivers.json() msg_config_receivers = ConfigReceivers() msg_config_receivers.header.stamp = tnow msg_config_receivers.receivers = [] for i in range(len(data)): rec = Receiver() rec.id = data[i]['id'] rec.x = data[i]['x'] rec.y = data[i]['y'] rec.z = data[i]['z'] msg_config_receivers.receivers.append(rec) self._pub_config_receivers.publish(msg_config_receivers) except ConnectionError as e: self.connection_error() def fast_callback(self, event): """ Callback function that requests Waterlinked position and orientation information at a fast rate. """ # Request current time and use it for all messages tnow = rospy.Time.now() if self._is_first_fast_loop: self._is_first_fast_loop = False self.f_cum_fast = 0 self.n_fast = 0 self._fast_t0 = tnow.to_sec() else: f = 1 / (tnow.to_sec() - self._fast_t0) self.f_cum_fast += f self.n_fast += 1 f_avg = self.f_cum_fast / self.n_fast rospy.logdebug("fast loop (n = %d): f_avg = %.3f Hz" % (self.n_fast, f_avg)) # Initiate HTTP request to all URLs future_list = [ self._session.get(url, timeout=2.0) for url in self._urls_fast ] try: # WARN: ORIENTATION IS CLOCKWISE REFERENCED FROM MAGNETIC NORTH # /waterlinked/external/orientation res_external_orientation = future_list[0].result() if res_external_orientation.ok: data = res_external_orientation.json() msg_external_orientation = ExternalOrientation() msg_external_orientation.header.stamp = tnow msg_external_orientation.orientation = data['orientation'] self._pub_external_orientation.publish( msg_external_orientation) # /waterlinked/position/acoustic/filtered res_position_acoustic_filtered = future_list[1].result() # WARN: WATERLINKED POSITION IS LEFT HANDED RFD -> X: RIGHT, y: FORWARDS, Z: DOWN # DO NOT USE ACOUSTIC_FILTERED FOR NAVIGATION! if res_position_acoustic_filtered.ok: data = res_position_acoustic_filtered.json() msg_position_acoustic_filtered = PositionAcousticFiltered() msg_position_acoustic_filtered.header.stamp = tnow msg_position_acoustic_filtered.header.frame_id = self._waterlinked_frame_id msg_position_acoustic_filtered.std = data['std'] msg_position_acoustic_filtered.temp = data['temp'] msg_position_acoustic_filtered.x = data['x'] msg_position_acoustic_filtered.y = data['y'] msg_position_acoustic_filtered.z = data['z'] if self._pub_position_acoustic_filtered.get_num_connections( ) > 0: rospy.logwarn_once( "{} | waterlinked/acoustic_filtered is left-handed RFD, don't use for navigation, " "use waterlinked/pose_with_cov_stamped (FLU) instead.") self._pub_position_acoustic_filtered.publish( msg_position_acoustic_filtered) # Create message of the type geometry_msgs/PoseWithCovariance msg_pose_with_cov_stamped = PoseWithCovarianceStamped() var_xyz = pow(data['std'], 2) # calculate variance from standard deviation msg_pose_with_cov_stamped.header.stamp = tnow msg_pose_with_cov_stamped.header.frame_id = self._waterlinked_frame_id msg_pose_with_cov_stamped.pose.pose.position.x = data['y'] msg_pose_with_cov_stamped.pose.pose.position.y = -data['x'] msg_pose_with_cov_stamped.pose.pose.position.z = -data['z'] msg_pose_with_cov_stamped.pose.pose.orientation = Quaternion( 0, 0, 0, 1) msg_pose_with_cov_stamped.pose.covariance = [ var_xyz, 0, 0, 0, 0, 0, 0, var_xyz, 0, 0, 0, 0, 0, 0, var_xyz, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ] self._pub_pos_with_covariance_stamped.publish( msg_pose_with_cov_stamped) # /waterlinked/position/acoustic/raw res_position_acoustic_raw = future_list[2].result() if res_position_acoustic_raw.ok: data = res_position_acoustic_raw.json() msg_position_acoustic_raw = PositionAcousticRaw() msg_position_acoustic_raw.header.stamp = tnow msg_position_acoustic_raw.header.frame_id = self._waterlinked_frame_id msg_position_acoustic_raw.std = data['std'] msg_position_acoustic_raw.temp = data['temp'] msg_position_acoustic_raw.x = data['x'] msg_position_acoustic_raw.y = data['y'] msg_position_acoustic_raw.z = data['z'] if self._pub_position_acoustic_raw.get_num_connections() > 0: rospy.logwarn_once( "{} | waterlinked/acoustic_raw is left-handed RFD, don't use for navigation, " "use waterlinked/pose_with_cov_stamped (FLU) instead.") self._pub_position_acoustic_raw.publish( msg_position_acoustic_raw) # /waterlinked/position/global res_position_global = future_list[3].result() if res_position_global.ok: data = res_position_global.json() msg_position_global = PositionGlobal() msg_position_global.header.stamp = tnow msg_position_global.lat = data['lat'] msg_position_global.lon = data['lon'] self._pub_position_global.publish(msg_position_global) # /waterlinked/position/master res_position_master = future_list[4].result() msg_position_master = None if res_position_master.ok: data = res_position_master.json() msg_position_master = PositionMaster() msg_position_master.header.stamp = tnow msg_position_master.cog = data['cog'] msg_position_master.hdop = data['hdop'] msg_position_master.lat = data['lat'] msg_position_master.lon = data['lon'] msg_position_master.numsats = data['numsats'] msg_position_master.orientation = data['orientation'] msg_position_master.sog = data['sog'] self._pub_position_master.publish(msg_position_master) # CONVENTION: UTM -> WATERLINKED IS DEFINED BY UTM POSITION OF MASTER, ROTATED ACCORDING TO MASTER ORIENTATION # CONVENTION: UTM -> MAP IS DEFINED BY UTM POSITION OF MASTER, WITHOUT ANY ROTATION (ALIGNED WITH NORTH) # CONVENTION: UTM -> MAP CAN ALSO BE DEFINED BY AN EXTERNAL DATUM [LATITUDE, LONGITUDE] if self._send_tf and msg_position_master is not None: tf_map = TransformStamped() # Map transformation tf_map.header.stamp = tnow tf_map.header.frame_id = self._map_frame_id tf_map.child_frame_id = self._waterlinked_frame_id # ORIENTATION IS PROVIDED AS NORTH REFERENCED CW # NEEDS TO BE CONVERTED TO EAST REFERENCED CCW q = Rotation.from_euler( 'xyz', [0, 0, 90 - msg_position_master.orientation], degrees=True).as_quat() tf_map.transform.rotation = Quaternion(*q) self._tf_bcast.sendTransform(tf_map) except ConnectionError as e: self.connection_error()
class SubjectAdmin: def __init__(self, *, username, password, devilry_url): self.devilry_url = devilry_url self.rest_url = f'{devilry_url}/devilry_subjectadmin/rest' self.period = None self.session = FuturesSession(max_workers=24) self.auth(username, password) def auth(self, username, password): self.creds = (username, password) login_url = f'{self.devilry_url}/authenticate/login' r = self.session.post(login_url, {'username': username, 'password': password}, allow_redirects=False).result() if not r.ok: raise ConnectionError('Auth failed') @staticmethod def _json_cb(sess, resp): resp.data = resp.json() def get(self, url, *, cb=None, **kwargs): cb = cb if not cb is None else self._json_cb return self.session.get(f'{self.rest_url}/{url}', **kwargs, background_callback=cb) def post(self, url, **kwargs): return self.session.post(f'{self.rest_url}/{url}', **kwargs, auth=self.creds) def put(self, url, **kwargs): return self.session.put(f'{self.rest_url}/{url}', **kwargs, auth=self.creds) def delete(self, url, **kwargs): return self.session.delete(f'{self.rest_url}/{url}', **kwargs, auth=self.creds) def periods(self): courses = self.get('allwhereisadmin').result().data periods = [] for course in courses: for period in course['periods']: periods.append({'course': course, 'period': period}) return periods def set_period(self, period): self.period = period @needs_period def create_assignment(self, *, short_name, long_name, first_deadline, publishing_time, setupstudents_mode, delivery_types=0, anonymous=False): post_data = locals() del post_data['self'] post_data['first_deadline'] = first_deadline.strftime('%F %T') post_data['publishing_time'] = publishing_time.strftime('%F %T') post_data['period_id'] = self.period['id'] return self.post('createnewassignment/', json=post_data, background_callback=self._json_cb) def set_hard_deadlines(self, assignment_id): def task(): r = self.get(f'assignment/{assignment_id}').result() assignment = r.data assignment['deadline_handling'] = 1 r = self.put(f'assignment/{assignment_id}', json=assignment).result() return self.session.executor.submit(task) def set_points_assignment(self, assignment_id, min_points=0, *, max_points, display_points=True): def task(): r = self.get(f'assignment/{assignment_id}').result() points2grade = 'raw-points' if display_points else 'passed-failed' assignment = r.data assignment['max_points'] = max_points assignment['passing_grade_min_points'] = min_points assignment['points_to_grade_mapper'] = points2grade assignment['grading_system_plugin_id'] = \ 'devilry_gradingsystemplugin_points' r = self.put(f'assignment/{assignment_id}', json=assignment).result() return self.session.executor.submit(task) def examiner_stats(self, assignment_id): return self.get(f'examinerstats/{assignment_id}') def set_examiner(self, student, examiner, assignment): return self.post(f'group/{assignment}/', json={'candidates': [{'user': {'id': student}}], 'examiners': [{'user': examiner['user']}], 'is_open': True}) @lru_cache(maxsize=64) def find_person(self, username): r = self.session.get(f'{self.devilry_url}/devilry_usersearch/search' f'?query={username}').result() if not r.ok: warn(f'Search could not be completed: {r.text}\n{r.reason}') return for user in r.json(): if user['username'] == username: return user def set_tags(self, assignment): groups = self.get(f'group/{assignment}/').result().data students = self.get(f'relatedstudent_assignment_ro/{assignment}/')\ .result().data def get_tags(student): for st in students: if student == st['user']['id']: return [{'tag': t} for t in st['tags'].split(',')] futures = [] for group in groups: # TODO: Copy ALL tags f = self.put(f'group/{assignment}/', json={'id': group['id'], 'candidates': [group['candidates'][0]], 'examiners': group['examiners'], 'is_open': True, 'tags': get_tags(group['candidates'][0] ['user']['id'])}) futures.append(f) return futures def get_group(self, username, assignment): def cb(sess, resp): for group in resp.json(): if group['candidates'][0]['user']['username'] == username: resp.data = group return return self.get(f'group/{assignment}/?query={username}', cb=cb) def update_examiner(self, group, examiner, assignment): examiners = [] if examiner is None else [{'user': examiner['user']}] return self.put(f'group/{assignment}/', json={'id': group['id'], 'candidates': [group['candidates'][0]], 'examiners': examiners, 'is_open': True, 'tags': group['tags']}) def remove_students(self, students, assignment): def remove(student): r = self.get(f'group/{assignment}/?query={student}').result() for group in r.data: if group['candidates'][0]['user']['username'] == student: break else: return return self.delete(f'group/{assignment}/', json={'id': group['id']}).result() return [self.session.executor.submit(remove, student) for student in students] def remove_students_by_tag(self, tag, assignment): r = self.get(f'group/{assignment}/?query={tag}').result() futures = [] for group in r.data: if tag in map(lambda x: x['tag'], group['tags']): futures.append(self.delete(f'group/{assignment}/', json={'id': group['id']})) return futures def add_students(self, students, assignment): async def add(student): r = await asyncio.wrap_future( self.get(f"relatedstudent/{self.period['id']}?query={student}")) if not r.ok: warn(f'Student {student} could not be found:\n{r.text}') for stud in r.json(): if stud['user']['username'] == student: break stud_id = stud['user']['id'] r = await asyncio.wrap_future(self.post(f'group/{assignment}/', json={'candidates': [{'user': {'id': stud_id}}], 'is_open': True})) if not r.ok: warn(f'Student {student} could not be added:\n{r.text}') loop = asyncio.get_event_loop() g = asyncio.gather(*[add(student) for student in students]) loop.run_until_complete(g) def setup_examiners_by_tags(self, assignment): r = self.get(f"relatedexaminer/{self.period['id']}").result() emap = {exr['tags']: exr['user']['id'] for exr in r.data} r = self.get(f'group/{assignment}/').result() futures = [] for group in r.data: for tag in group['tags']: t = tag['tag'] if not t in emap: continue futures.append(self.update_examiner(group, {'user': {'id': emap[t]}}, assignment)) return futures def close_groups_without_deliveries(self, assignment): r = self.get(f'group/{assignment}/').result() futures = [] for group in r.data: if group['num_deliveries'] == 0: futures.append(self.put(f'group/{assignment}/', json={'id': group['id'], 'is_open': False, 'candidates': group['candidates'], 'examiners': group['examiners'], 'tags': group['tags']})) return futures def set_deadline_text(self, assignment, text): dls = self.get(f'deadlinesbulk/{assignment}').result().data futures = [] for dl in dls: if dl['text'] is None: futures.append( self.put(f"deadlinesbulk/{assignment}/{dl['bulkdeadline_id']}", json={'text': text, 'deadline': dl['deadline']})) return futures def remove_examiner_no_delivery(self, assignment): r = self.get(f'group/{assignment}/').result() futures = [] for group in r.data: if not group['num_deliveries']: futures.append(self.update_examiner(group, None, assignment)) @needs_period def points(self): import pandas as pd ov = {} r = self.get(f"detailedperiodoverview/{self.period['id']}") data = r.result().data assignments = {a['id']: a['short_name'] for a in data['assignments']} for student in r.result().data['relatedstudents']: name = student['user']['username'] stdict = {} for assignment in student['groups_by_assignment']: a_name = assignments[assignment['assignmentid']] if assignment['grouplist'] and assignment['grouplist'][0]['feedback']: stdict[a_name] = assignment['grouplist'][0]['feedback']['points'] tag = student['relatedstudent']['tags'].split(',')[-1] stdict['group'] = tag.replace('gruppe', '') if 'gruppe' in tag else None ov[name] = stdict df = pd.DataFrame(ov).T df.set_index([df.index, 'group'], inplace=True) return df
class ManagedPersister(Persister): def __init__(self, workflow_name: str, wf_start_time: float, service_url: str, wf_exec_id=None, context: str = None, with_validation: bool = False, db_name: str = None, bag_size: int = 1, should_send_to_file: bool = False, should_send_to_service: bool = True): super().__init__(workflow_name, wf_start_time, wf_exec_id) self.retrospective_url = urljoin(service_url, "retrospective-provenance") self.prospective_url = urljoin(service_url, "prospective-provenance") self.context = context self.with_validation = with_validation self.db_name = db_name self.requests_queue = list() self.bag_size = bag_size self.should_send_to_service = should_send_to_service self.should_send_to_file = should_send_to_file self.session = None if self.should_send_to_service: logger.debug("You are using the Service URL: " + service_url) self.session = FuturesSession() def add_request(self, persistence_request: ProvRequestObj): try: request_data = persistence_request.as_dict() if self.context: request_data["context"] = self.context self.requests_queue.append(request_data) if len(self.requests_queue) >= self.bag_size: self._flush() # if `configuration` is present this object should be persisted synchronously # if "configuration" in prov_obj: # self.__flush__(True) except Exception: logger.error("[Prov] Unexpected exception") traceback.print_exc() pass def _close(self): if self.session: logger.info( "Waiting to get response from all submitted provenance tasks..." ) while not self.session.executor._work_queue.empty(): # wait to guarantee that all provenance requests have been sent (fired) to collector service sleep(0.1) # Persist remaining tasks synchronously self._flush(all_and_wait=True) if self.session: self.session.close() def _flush(self, all_and_wait: bool = False): if len(self.requests_queue) > 0: if all_and_wait: logger.debug("Going to flush everything. Flushing " + str(len(self.requests_queue))) if self.should_send_to_file: offline_prov_log.debug(json.dumps(self.requests_queue)) if self.should_send_to_service: self._send_to_service(self.requests_queue) self.requests_queue = list() else: to_flush = self.requests_queue[:self.bag_size] del self.requests_queue[:self.bag_size] logger.debug("Going to flush a part. Flushing " + str(len(to_flush)) + " out of " + str(len(self.requests_queue))) if self.should_send_to_file: offline_prov_log.debug(json.dumps(to_flush)) if self.should_send_to_service: self._send_to_service(to_flush) def _send_to_service(self, to_flush: List[dict]): params = { "with_validation": str(self.with_validation), "db_name": self.db_name } try: logger.debug("[Prov-Persistence]" + json.dumps(to_flush)) # TODO: check whether we need this result() below r = self.session.post(self.retrospective_url, json=to_flush, params=params, verify=False).result() except ConnectionError as ex: logger.error( "[Prov][ConnectionError] There is a communication error between client and server -> " + str(ex)) r = None pass except Exception as ex: traceback.print_exc() logger.error( "[Prov] Unexpected exception while adding retrospective provenance: " + type(ex).__name__ + "->" + str(ex)) r = None pass # If requests were validated, check for errors if r and self.with_validation: self._log_validation_message(r) def persist_prospective(self, json_data: dict): try: if self.should_send_to_file: offline_prov_log.debug(json.dumps(self.requests_queue)) if self.should_send_to_service: logger.debug("[Prov-Persistence][Prospective]" + json.dumps(json_data)) try: r = self.session.post(self.prospective_url, json=json_data, params={ 'overwrite': True }, verify=False).result() if 200 <= r.status_code <= 209: logger.debug( "Prospective provenance inserted successfully.") elif r.status_code == 406: error_parsed = json.loads(r._content.decode('utf-8')) error_obj = error_parsed['error'].replace("'", '"') logger.error(error_obj) elif r.status_code == 500: r = self.session.put(self.prospective_url, json=json_data).result() try: assert 200 <= r.status_code <= 209 except AssertionError: logger.error( "Prospective provenance was not inserted correctly. Status code = " + str(r.status_code)) elif r.status_code > 300: logger.error( "Prospective provenance was not inserted correctly. Status code = " + str(r.status_code)) except ConnectionError as ex: traceback.print_exc() logger.error( "[Prov][ConnectionError] There is a communication error between client and server -> " + str(ex)) pass except Exception as ex: logger.error( "[Prov] Unexpected exception while adding prospective provenance: " + type(ex).__name__) pass except Exception as ex: logger.error("[Prov] Unexpected exception " + type(ex).__name__) traceback.print_exc() pass @staticmethod def _log_validation_message(response): error_obj = json.loads(response._content.decode('utf-8')) if len(error_obj['error']) > 0: for error_list in error_obj['error']: for error in error_list: if error['code'][0] == 'W': logger.warning('{} {}{}'.format( error['type'], error['explanation'], '\n')) else: logger.error('{} {}{}'.format(error['type'], error['explanation'], '\n'))
class FiscalDataPackage(DataPackage): """This class represents a fiscal data package. The class is a subclass of the :class:`datapackage.DataPackage` class. The constructor takes the same arguments as its parent class, except that the schema is "fiscal". :param target: The target is the full path to the fiscal datapackage JSON descriptor, but it can also be a dictionary representing the schema itself or a url pointing to a descriptor (for more information please refer to the documentation for the :class:`datapackage.DataPackage` class. :param user: a `gobble.user.user` object. """ def __init__(self, filepath, user=None, **kw): if not isfile(filepath): raise NotImplemented('%s is not a local path', filepath) super(FiscalDataPackage, self).__init__(filepath, schema='fiscal', **kw) self._check_file_formats() self._streams = [] self._session = FuturesSession() self._futures = [] self._responses = [] self.user = user self.name = self.descriptor.get('name') self.path = basename(filepath) self.filepath = filepath def validate(self, raise_error=True): """Validate a datapackage schema. :param raise_error: raise error on failure or not (default: True) :raise: :class:`ValidationError` if the schema is invalid :return True or a list of error messages (if `raise_error` is False). """ if raise_error: super(FiscalDataPackage, self).validate() else: try: super(FiscalDataPackage, self).validate() message = '%s (%s) is a valid fiscal datapackage descriptor' log.info(message, self, self.path) return [] except ValidationError: messages = [] for error in self.iter_errors(): messages.append(error.message) log.warn('%s ValidationError: %s', self, error.message) return messages def upload(self, publish=False): """Upload a fiscal datapackage to Open-Spending. It does this in 3 steps: * request upload urls for AWS S3 storage * upload all files to the owner's S3 bucket * insert the data into the Open-Spending datastore (PostgreSQL) By default, newly uploaded packages are kept private, but you can change that with the `publish` flag. Also note that if you upload the same fiscal data package again, the previous version will be overwritten. For now, the only valid datafile format is CSV. :param publish: toggle the datapackage to "published" after upload """ self.validate() log.info('Starting uploading process for %s', self) for s3_target in self._request_s3_upload(): self._push_to_s3(*s3_target) self._handle_promises() self._insert_into_datastore() while self.in_progress: sleep(POLL_PERIOD) if publish: self.toggle('public') return self.url @property def url(self): return join(settings.OS_URL, self.user.id + ':' + self.name) @property def in_progress(self): """Return true when the upload finished.""" query = dict(datapackage=self._descriptor_s3_url) answer = upload_status(params=query).json() args = self, answer['status'], answer['progress'], len(self) log.debug('%s is loading (%s) %s/%s', *args) return answer['status'] != 'done' def toggle(self, to_state): """Toggle public access to a fiscal datapackage Change the status of a fiscal data package from public to private or vice-versa. If something went wrong, whilst changing the status, you will get a :class:`upload.ToggleError`. :param to_state: the unique name of the datapackage :return: the new state of the package, i.e. "public" or "private" """ publish = True if to_state == 'public' else False package_id = self.user.id + ':' + self.name query = dict(jwt=self.user.token, id=package_id, publish=publish) answer = handle(toggle_publish(params=query)) if not answer['success']: message = 'Unable to toggle datapackage to %s' raise ToggleError(message, to_state) log.info('%s is now %s', package_id, to_state) return to_state def _check_file_formats(self): for resource in self: if resource.descriptor['mediatype'] != 'text/csv': message = 'Usupported format: %s, valid formats are %s' raise NotImplemented(message, resource.path, OS_DATA_FORMATS) @property def filedata(self): filedata = { resource.descriptor['path']: { 'name': resource.descriptor['name'], 'length': getsize(resource.local_data_path), 'md5': compute_hash(resource.local_data_path), 'type': resource.descriptor['mediatype'], } for resource in self } descriptor_file = { basename(self.filepath): { 'name': self.name, 'length': getsize(self.filepath), 'md5': compute_hash(self.filepath), 'type': 'application/octet-stream', } } filedata.update(descriptor_file) return { 'filedata': filedata, 'metadata': { 'owner': self.user.id, 'name': self.name } } def _get_header(self, path, content_type): filepath = join(self.base_path, path) return { 'Content-Length': str(getsize(filepath)), 'Content-MD5': compute_hash(filepath), 'Content-Type': content_type } @property def _descriptor_s3_url(self): return join(settings.S3_BUCKET_URL, self.user.id, self.name, self.path) def _request_s3_upload(self): """Request AWS S3 upload urls for all files. """ response = request_upload(params=dict(jwt=self.user.token), json=self.filedata) files = handle(response)['filedata'] for path, info in files.items(): message = '%s is ready for upload to %s' log.info(message, path, info['upload_url']) query = {k: v[0] for k, v in info['upload_query'].items()} yield info['upload_url'], path, query, self._get_header( path, info['type']) def _push_to_s3(self, url, path, query, headers): """Send data files for upload to the S3 bucket. """ log.debug('Started uploading %s to %s', path, url) log.debug('Headers: %s', headers) log.debug('Query parameters: %s', query) absolute_path = join(self.base_path, path) stream = io.open(absolute_path, mode='rb') future = self._session.put(url, headers=headers, data=stream, params=query, background_callback=self._s3_callback) self._streams.append(stream) self._futures.append(future) @staticmethod def _s3_callback(_, response): handle(response) log.info('Successful S3 upload: %s', response.url) def _handle_promises(self): """Collect all promises from S3 uploads. """ for stream, future in zip(self._streams, self._futures): exception = future.exception() if exception: raise exception response = future.result() if response.status_code != 200: message = 'Something went wrong uploading %s to S3: %s' log.error(message, response.url, response.text) raise HTTPError(message) self._responses.append(response) stream.close() def _insert_into_datastore(self): """Transfer datafiles from S3 into the postgres datastore. :return: the url of the fiscal datapackage on Open-Spending """ query = dict(jwt=self.user.token, datapackage=self._descriptor_s3_url) response = upload_package(params=query) handle(response) log.info('Congratuations, %s was uploaded successfully!', self) log.info('You can find you fiscal datapackage here: %s', self.url) return self.url def __len__(self): return len(self.resources) def __repr__(self): return '<FiscalDataPackage [%s files]: %s>' % (len(self), self.name) def __str__(self): return self.name def __iter__(self): for resource in self.resources: yield resource def __getitem__(self, index): return self.resources[index]
class FiscalDataPackage(DataPackage): """This class represents a fiscal data package. The class is a subclass of the :class:`datapackage.DataPackage` class. The constructor takes the same arguments as its parent class, except that the schema is "fiscal". :param target: The target is the full path to the fiscal datapackage JSON descriptor, but it can also be a dictionary representing the schema itself or a url pointing to a descriptor (for more information please refer to the documentation for the :class:`datapackage.DataPackage` class. :param user: a `gobble.user.user` object. """ def __init__(self, filepath, user=None, **kw): if not isfile(filepath): raise NotImplemented('%s is not a local path', filepath) super(FiscalDataPackage, self).__init__(filepath, **kw) self._check_file_formats() self._streams = [] self._session = FuturesSession() self._futures = [] self._responses = [] self.user = user self.name = self.descriptor.get('name') self.path = basename(filepath) self.filepath = filepath def validate(self, raise_on_error=True, schema_only=True): """Validate a datapackage schema. By default, only the data-package schema is validated. To validate the data files too, set the `data` flag to `True`. The method fails if an error is found, unless the `raise_error` flag is explicitely set to `False`. :param raise_on_error: raise error on failure or not (default: True) :param schema_only: only validate the schema (default: True) :raise: :class:`ValidationError` if the schema is invalid :return A list of error messages or an empty list. """ messages = [] profile = Profile('fiscal-data-package') if raise_on_error: profile.validate(self.descriptor) else: try: profile.validate(self.descriptor) message = '%s (%s) is a valid fiscal data-package schema' log.info(message, self.path, self) except ValidationError as e: for error in e.errors: message = 'SCHEMA ERROR in %s: %s' args = self.path, error messages.append(message % args) log.warn(message, *args) if messages: messages.append('Aborting data validation due to invalid schema') return messages if not schema_only: return self._validate_data(raise_on_error) else: return messages def upload(self, publish=True, skip_validation=False): """Upload a fiscal datapackage to Open-Spending. It does this in 3 steps: * request upload urls for AWS S3 storage * upload all files to the owner's S3 bucket * insert the data into the Open-Spending datastore (PostgreSQL) By default, newly uploaded packages are kept private, but you can change that with the `publish` flag. Also note that if you upload the same fiscal data package again, the previous version will be overwritten. For now, the only valid datafile format is CSV. :param skip_validation: use only if you have already done so :param publish: toggle the datapackage to "published" after upload """ self.descriptor['author'] = self.user.name self.descriptor['owner'] = self.user.id with io.open(self.filepath, 'w') as descriptor: descriptor.write(self.to_json()) if not skip_validation: self.validate() log.info('Starting uploading process for %s', self) for s3_target in self._request_s3_upload(): self._push_to_s3(*s3_target) self._handle_promises() self._insert_into_datastore() sleep(POLL_PERIOD) while self.in_progress: sleep(POLL_PERIOD) if publish: self.toggle('public') return self.url @property def url(self): return join(settings.OS_URL, self.user.id + ':' + self.name) @property def in_progress(self): """Return true when the upload finished.""" query = dict(datapackage=self._descriptor_s3_url) try: answer = loads(upload_status(params=query).text) except JSONDecodeError: return True args = self, answer['status'], answer['progress'], len(self) log.debug('%s is loading (%s) %s/%s', *args) if answer['status'] == 'fail': raise UploadError(answer.get('error')) return answer['status'] not in {'done', 'fail'} def toggle(self, to_state): """Toggle public access to a fiscal datapackage Change the status of a fiscal data package from public to private or vice-versa. If something went wrong, whilst changing the status, you will get a :class:`upload.ToggleError`. :param to_state: the unique name of the datapackage :return: the new state of the package, i.e. "public" or "private" """ publish = True if to_state == 'public' else False package_id = self.user.id + ':' + self.name query = dict( jwt=self.user.permissions['os.datastore']['token'], id=package_id, publish=publish ) answer = handle(toggle_publish(params=query)) if not answer['success']: message = 'Unable to toggle datapackage to %s' raise ToggleError(message, to_state) log.info('%s is now %s', package_id, to_state) return to_state def _check_file_formats(self): pass def _validate_data(self, raise_on_error): """Validate the package resources with GoodTables.""" def summarize(feedback_, path_): intro = 'GoodTables has detected some errors in %s.' % path_ hint = 'Please check out the full report: %s.' % REPORT_FILENAME info = feedback_['meta'] summary = ( 'There are {bad_rows} (out of {total_rows}) bad rows ' 'and {bad_cols} (out of {total_cols}) bad columns. ' ).format( bad_rows=info['bad_row_count'], total_rows=info['row_count'], bad_cols=info['bad_column_count'], total_cols=len(info['columns']) ) log.debug(intro + summary) return [intro, summary, hint] for resource in self: schema = resource.descriptor['schema'] path = resource.descriptor['path'] filepath = join(self._base_path, path) pipeline = Pipeline(filepath, report_stream=StringIO()) pipeline.register_processor('schema', options={'schema': schema}) is_valid, report = pipeline.run() if is_valid: return [] if raise_on_error: raise ValidationError('%s is invalid' % filepath) else: feedback = report.generate() with open(REPORT_FILENAME, 'w+') as json: json.write(dumps(feedback, indent=4, ensure_ascii=False)) return summarize(feedback, path) @property def filedata(self): filedata = { resource.descriptor['path']: { 'name': resource.descriptor['name'], 'length': getsize(resource.source), 'md5': compute_hash(resource.source), 'type': resource.descriptor.get('mediatype', 'text/'+resource.descriptor['path'].split('.')[-1]), } for resource in self } descriptor_file = { basename(self.filepath): { 'name': self.name, 'length': getsize(self.filepath), 'md5': compute_hash(self.filepath), 'type': 'application/octet-stream', } } filedata.update(descriptor_file) return { 'filedata': filedata, 'metadata': { 'owner': self.user.id, 'name': self.name } } @property def bytes(self): return sum([file['length'] for file in self.filedata['filedata'].values()]) def _get_header(self, path, content_type): filepath = join(self.base_path, path) return {'Content-Length': str(getsize(filepath)), 'Content-MD5': compute_hash(filepath), 'Content-Type': content_type} @property def _descriptor_s3_url(self): return join(settings.S3_BUCKET_URL, self.user.id, self.name, self.path) def _request_s3_upload(self): """Request AWS S3 upload urls for all files. """ response = request_upload( params=dict(jwt=self.user.permissions['os.datastore']['token']), json=self.filedata ) files = handle(response)['filedata'] for path, info in files.items(): message = '%s is ready for upload to %s' log.info(message, path, info['upload_url']) query = {k: v[0] for k, v in info['upload_query'].items()} yield (info['upload_url'], path, query, self._get_header(path, info['type'])) def _push_to_s3(self, url, path, query, headers): """Send data files for upload to the S3 bucket. """ log.debug('Started uploading %s to %s', path, url) log.debug('Headers: %s', headers) log.debug('Query parameters: %s', query) absolute_path = join(self.base_path, path) stream = io.open(absolute_path, mode='rb') future = self._session.put(url, headers=headers, data=stream, params=query, background_callback=self._s3_callback) self._streams.append(stream) self._futures.append(future) @staticmethod def _s3_callback(_, response): handle(response) log.info('Successful S3 upload: %s', response.url) def _handle_promises(self): """Collect all promises from S3 uploads. """ for stream, future in zip(self._streams, self._futures): exception = future.exception() if exception: raise exception response = future.result() if response.status_code != 200: message = 'Something went wrong uploading %s to S3: %s' log.error(message, response.url, response.text) raise HTTPError(message) self._responses.append(response) stream.close() def _insert_into_datastore(self): """Transfer datafiles from S3 into the postgres datastore. :return: the url of the fiscal datapackage on Open-Spending """ query = { 'jwt': self.user.permissions['os.datastore']['token'], 'datapackage': self._descriptor_s3_url } response = upload_package(params=query) handle(response) log.info('Congratuations, %s was uploaded successfully!', self) log.info('You can find you fiscal datapackage here: %s', self.url) return self.url def __len__(self): return len(self.resources) def __repr__(self): return '<FiscalDataPackage [%s files]: %s>' % (len(self), self.name) def __str__(self): return self.name def __iter__(self): for resource in self.resources: yield resource def __getitem__(self, index): return self.resources[index]