def devices_import(): try: file_prefix = 'device_import_' + g.tenant_uid file_name = excels.save(request.files['file'], name=file_prefix + '.') except UploadNotAllowed: error = {'Upload': 'Upload file format error'} raise APIException(errors=error) file_path = excels.path(file_name) import_url = current_app.config.get('IMPORT_EXCEL_TASK_URL') request_json = { 'filePath': file_path, 'tenantID': g.tenant_uid, 'userIntID': g.user_id, 'language': g.language } with SyncHttp() as sync_http: response = sync_http.post(import_url, json=request_json) handled_response = handle_task_scheduler_response(response) if handled_response.get('status') == 3: query_status_url = url_for('base.get_task_scheduler_status')[7:] task_id = handled_response['taskID'] record = { 'status': 3, 'taskID': task_id, 'message': 'Devices import is in progress', 'result': { 'statusUrl': f"{query_status_url}?taskID={task_id}" } } else: record = { 'status': 4, 'message': handled_response.get('error') or 'Devices import failed', } return jsonify(record)
def export_devices(): device_count = db.session.query(func.count(Device.id)) \ .filter(Device.tenantID == g.tenant_uid).scalar() if device_count and device_count > 10000: raise ResourceLimited(field='devices') if device_count == 0: raise DataNotFound(field='devices') export_url = current_app.config.get('EXPORT_EXCEL_TASK_URL') request_json = { 'tenantID': g.tenant_uid, 'language': g.language } with SyncHttp() as sync_http: response = sync_http.post(export_url, json=request_json) handled_response = handle_task_scheduler_response(response) if handled_response.get('status') == 3: query_status_url = url_for('base.get_task_scheduler_status')[7:] task_id = handled_response['taskID'] record = { 'status': 3, 'taskID': task_id, 'message': 'Devices export is in progress', 'result': { 'statusUrl': f"{query_status_url}?taskID={task_id}" } } else: record = { 'status': 4, 'message': handled_response.get('error') or 'Devices export failed', } return jsonify(record)
def validate_webhook(self, data): url = data.get('url') token = data.get('token') if not all([url, token]): return timestamp = int(time.time()) nonce = generate_uuid(size=10) hash_str = f"{token}{timestamp}{nonce}".encode('utf-8') signature = hashlib.sha1(hash_str).hexdigest() validate_status = True params = dict(signature=signature, timestamp=timestamp, nonce=nonce) with SyncHttp() as sync_http: response = sync_http.get(url=url, params=params) if response.responseCode != 200: validate_status = False try: response_dict = json.loads(response.responseContent) if response_dict.get('nonce') != params.get('nonce'): validate_status = False except Exception as e: logger.error(f"Webhook {e}", exc_info=True) validate_status = False if not validate_status: raise FormInvalid(field='Webhook url')
def stream_rule_http(method: AnyStr, **kwargs): with SyncHttp() as sync_http: response = getattr(sync_http, method)(**kwargs) if response.responseCode not in [200, 201]: db.session.rollback() raise InternalError(field='stream') return response
def run_code(): """ codec api response { "error": { "decode:": "error message" }, "output": { "status_code": 0, "result": { "data_type": "event", "data": { "humidity": { "time": 1547660823, "value": 34 }, "temperature": { "time": 1547660823, "value": -3.7 } } } } } """ request_json = CodeRunSchema.validate_request() analog_type = request_json.get('analogType') protocol = db.session.query(Product.cloudProtocol) \ .filter(Product.productID == request_json.get('productID')) \ .scalar() if protocol is None: raise DataNotFound(field='productID') request_url = f"http://{current_app.config['CODEC_NODE']}/api/v1/codec" with SyncHttp() as sync_http: response = sync_http.post(request_url, json=request_json) if response.responseCode != 200: try: errors = json.loads(response.responseContent) except Exception: errors = { 'codec': response.responseContent } raise APIException(errors=errors) response_json = json.loads(response.responseContent) # return response if it has error if 'error' in response_json: return jsonify(response_json) output_data = response_json.get('output') status_code = output_data.get('status_code') # If status code is 1(ERROR) # or analog type is 2(encode) # return response without validate if status_code == 1 or analog_type == 2: return jsonify(response_json) result = output_data.get('result') error_dict = {} validate_data, validate_error = DecodeSchema().load(result) for key, value in validate_error.items(): error_dict[key] = value[0][:-1] data_stream = DataStream.query \ .filter(DataStream.productID == request_json.get('productID'), DataStream.tenantID == g.tenant_uid, DataStream.topic == request_json.get('topic'), DataStream.streamID == validate_data.get('stream_id')) \ .first() if not data_stream: raise DataNotFound(field='data_stream') error, passed_data = validate_decode_response(data_stream, validate_data) error_dict.update(error) record = { 'output': { 'status_code': status_code, 'result': passed_data } } if error_dict: record['error'] = error_dict return jsonify(record)