def login(self, session: Session, url: str, login_data: dict): return session.put(f"{url}/php/ajaxSet_Password.php", headers={ "Content-Type": "application/json", "csrfNonce": "undefined" }, data=json.dumps(login_data))
async def markfinish(app, s: Session, group, member, msg: str): text = msg.split(' ') if len(text) == 1: await app.sendGroupMessage(group, MessageChain.create([Plain('请输入标题!')])) elif len(text) != 2: return event_title = text[1] url = 'http://canvas.tongji.edu.cn/api/v1/planner/items?per_page=50&start_date=' + \ datetime.now().strftime('%Y-%m-%d') r = s.get(url) data = json.loads(r.text.replace('while(1);', '')) plannable_id = '' event_id = None for i in data: if i['context_type'] == 'User' and i['plannable'][ 'title'] == event_title: plannable_id = i['plannable_id'] if i['planner_override'] != None: event_id = i['planner_override']['id'] break else: await app.sendGroupMessage( group, MessageChain.create([Plain('查无此事件,请检查标题是否输入正确')])) data = { 'id': event_id, 'marked_complete': True, 'plannable_id': plannable_id, 'plannable_type': 'calendar_event', 'user_id': get_id(member.id), 'authenticity_token': parse.unquote( requests.utils.dict_from_cookiejar(s.cookies)['_csrf_token']) } if event_id == None: url = 'http://canvas.tongji.edu.cn/api/v1/planner/overrides' r = s.post(url, data=data) else: url = 'http://canvas.tongji.edu.cn/api/v1/planner/overrides/' + \ str(event_id) r = s.put(url, data=data) if not is_json(r.text) or 'errors' in r.json(): await app.sendGroupMessage(group, MessageChain.create([Plain('标记为完成失败!')])) else: await app.sendGroupMessage(group, MessageChain.create([Plain('标记为完成成功!')]))
def put(self, url, data=None, **kwargs): return Session.put( self, self._get_resource_uri(url), data, **self._set_default_timeout( **kwargs ) )
def update(session: Session, user: User) -> Response: app.logger.debug("UserApi#update_user: {} {}".format(user.last_name, user.first_name)) user_dict = { 'id': user.email, 'firstName': user.first_name, 'lastName': user.last_name } return session.put(url=URL_ENGINE_API + '/user/' + user.email + '/profile', json=user_dict)
class HTTPRequest(object): """记录cookies信息给下一次请求使用""" def __init__(self): # 创建session对象 self.session = Session() def request(self, method, url, params=None, data=None, headers=None, cookies=None, json=None): method = method.lower() if method == "post": # 判断是否使用json来传参(适用于接口项目有使用json传参) if json: output_log.info("正在发送请求,请求地址:{}, 请求参数:{}".format(url, json)) return self.session.post(url=url, json=json, headers=headers, cookies=cookies) else: output_log.info("正在发送请求,请求地址:{}, 请求参数:{}".format(url, data)) return self.session.post(url=url, data=data, headers=headers, cookies=cookies) elif method == "get": output_log.info("正在发送请求,请求地址:{}, 请求参数:{}".format(url, params)) return self.session.get(url=url, params=params, headers=headers, cookies=cookies) elif method == 'put': output_log.info("正在发送请求,请求地址:{}, 请求参数:{}".format(url, data)) return self.session.put(url=url, data=data, headers=headers, cookies=cookies) elif method == 'delete': output_log.info("正在发送请求,请求地址:{}, 请求参数:{}".format(url, data)) return self.session.delete(url=url, headers=headers, cookies=cookies) def close(self): self.session.close()
class HTTPRequest(object): def __init__(self): # 创建session对象 self.session = Session() # 记录cookies信息,给下一次请求使用 def request(self, method, url, params=None, data=None, headers=None, cookies=None, json=None): # 将方法名转化为小写 method = method.lower() if method == "post": # 判断是否使用json来传参,适用于接口项目使用json传参 if json: logging.info("正在发送请求,请求地址:{},请求参数:{}".format(url, json)) response = self.session.post(url=url, json=json, headers=headers, cookies=cookies) return response else: logging.info("正在发送请求,请求地址:{},请求参数:{}".format(url, data)) response = self.session.post(url=url, data=data, headers=headers, cookies=cookies) return response elif method == "get": logging.info("正在发送请求,请求地址:{},请求参数:{}".format(url, params)) response = self.session.get(url=url, params=params, headers=headers, cookies=cookies) return response elif method == "put": logging.info("正在发送请求,请求地址:{},请求参数:{}".format(url, data)) response = self.session.put(url=url, data=data, headers=headers, cookies=cookies) return response def close(self): self.session.close()
class UploadDialog(QDialog, FORM_CLASS): """ Modal dialog allowing to upload the data to the OQ-Platform """ upload_successful = pyqtSignal(str) def __init__(self, iface, file_stem): self.iface = iface QDialog.__init__(self) # Set up the user interface from Designer. self.setupUi(self) self.message_bar = QgsMessageBar() self.message_bar.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Fixed) self.layout().insertWidget(0, self.message_bar) self.message_bar_item = None self.button_box = self.buttonBox self.hostname, self.username, self.password = get_credentials( 'platform') self.web_view = self.web_view self.page = self.web_view.page() self.frame = self.page.mainFrame() self.session = Session() self.network_access_manager = self.page.networkAccessManager() self.cookie_jar = QNetworkCookieJar() self.network_access_manager.setCookieJar(self.cookie_jar) self._setup_context_menu() self.frame.javaScriptWindowObjectCleared.connect(self._setup_js) self.file_stem = file_stem self.layout().setContentsMargins(0, 0, 0, 0) self.web_view.loadFinished.connect(self.load_finished) self.layer_url = None def showEvent(self, event): super(UploadDialog, self).showEvent(event) self.upload() def upload(self): try: self._login_to_platform() except SvNetworkError as e: error_msg = ('Unable to login to the platform: ' + e.message) self.message_bar.pushMessage('Error', error_msg, duration=0, level=Qgis.Critical) return # adding by emitting signal in different thread worker = UploadWorker(self.hostname, self.session, self.file_stem, self.username, self.iface.activeLayer()) worker.successfully_finished.connect(self.upload_done) start_worker(worker, self.message_bar, 'Uploading data') def _update_layer_style(self): # The file stem contains the full path. We get just the basename layer_name = os.path.basename(self.file_stem) # Since the style name is set by default substituting '-' with '_', # tp get the right style we need to do the same substitution style_name = layer_name.replace('-', '_') try: sld = getGsCompatibleSld(self.iface.activeLayer(), style_name) except Exception as e: error_msg = ('Unable to export the styled layer descriptor: ' + str(e)) log_msg(error_msg, level='C', message_bar=self.message_bar, exception=e) return if DEBUG: import tempfile fd, fname = tempfile.mkstemp(suffix=".sld") os.close(fd) with open(fname, 'w', newline='') as f: f.write(sld) os.system('tidy -xml -i %s' % fname) headers = {'content-type': 'application/vnd.ogc.sld+xml'} # NOTE: to save the style, we actually need to do both the post and the # put in this sequence, otherwise it doesn't work. We still haven't # found any better way to obtain the same correct behavior resp = self.session.post(self.hostname + '/gs/rest/styles/%s.sld' % style_name, data=sld, headers=headers) resp = self.session.put(self.hostname + '/gs/rest/styles/%s.sld' % style_name, data=sld, headers=headers) if DEBUG: log_msg('Style upload response: %s' % resp) if not resp.ok: error_msg = ('Error while styling the uploaded layer: ' + resp.reason) self.message_bar.pushMessage('Style error', error_msg, duration=0, level=Qgis.Critical) select_style_xml = """ <layer> <name>oqplatform:%s</name> <defaultStyle> <name>%s</name> <atom:link xmlns:atom="http://www.w3.org/2005/Atom" rel="alternate" href="http://127.0.0.1:8080/geoserver/rest/styles/%s.xml" type="application/xml"/> </defaultStyle> </layer>""" % (style_name, style_name, style_name) headers = {'content-type': 'text/xml'} resp = self.session.put(self.hostname + '/gs/rest/layers/%s.xml' % style_name, data=select_style_xml, headers=headers) if DEBUG: log_msg('Style selection response: %s' % resp) if not resp.ok: error_msg = ( 'Error while selecting the style of the loaded layer: ' + resp.reason) self.message_bar.pushMessage('Style error', error_msg, duration=0, level=QgsMessageBar.CRITICAL) def upload_done(self, result): layer_url, success = result # In case success == 'False', layer_url contains the error message if success: # so far, we implemented the style-converter only for the # rule-based styles. Only in those cases, we should add a style to # the layer to be uploaded. Otherwise, it's fine to use the default # basic geonode style. if isinstance(self.iface.activeLayer().renderer(), QgsRuleBasedRenderer): self._update_layer_style() else: self.message_bar.pushMessage('Info', 'Using the basic default style', level=Qgis.Info) self.message_bar_item, _ = create_progress_message_bar( self.message_bar, 'Loading page......', no_percentage=True) self.web_view.load(QUrl(layer_url)) self.layer_url = layer_url self.upload_successful.emit(layer_url) else: error_msg = layer_url clear_progress_message_bar(self.message_bar) self.message_bar.pushMessage('Upload error', error_msg, duration=0, level=Qgis.Critical) def load_finished(self): clear_progress_message_bar(self.message_bar, self.message_bar_item) if not self.button_box.isEnabled(): self.button_box.setEnabled(True) self.button_box.addButton('Close', QDialogButtonBox.NoRole) self.button_box.addButton('Close and show in browser', QDialogButtonBox.YesRole) def _login_to_platform(self): self.hostname, self.username, self.password = get_credentials( 'platform') platform_login(self.hostname, self.username, self.password, self.session) sessionid = dict_from_cookiejar(self.session.cookies)['sessionid'] sessionid_cookie = QNetworkCookie(b'sessionid', sessionid.encode('utf8')) self.cookie_jar.setCookiesFromUrl([sessionid_cookie], QUrl(self.hostname)) def _setup_context_menu(self): settings = QSettings() developer_mode = settings.value('/irmt/developer_mode', True, type=bool) if developer_mode is True: self.web_view.page().settings().setAttribute( QWebSettings.DeveloperExtrasEnabled, True) else: self.web_view.setContextMenuPolicy(Qt.NoContextMenu) def _setup_js(self): # pass a reference (called qt_page) of self to the JS world # to expose a member of self to js you need to declare it as property # see for example self.json_str() self.frame.addToJavaScriptWindowObject('qt_page', self)
class Server: def __init__(self): self._session = Session() self.headers = {} self.timelineHeaders = {} def additionalHeaders(self, source, newSource): headerList = {} headerList.update(source) headerList.update(newSource) return headerList def urlEncode(self, url, path, params=[]): return url + path + '?' + urllib.parse.urlencode(params) def getJson(self, url, allowHeader=False): if allowHeader is False: return json.loads(self._session.get(url).text) else: return json.loads( self._session.get(url, headers=self.headers).text) def optionsContent(self, url, headers=None, data=None): if headers is None: headers = self.headers return self._session.options(url, headers=headers, data=data) def postContent(self, url, headers=None, data=None, files=None): if headers is None: headers = self.headers return self._session.post(url, headers=headers, data=data, files=files) def getContent(self, url, headers=None): if headers is None: headers = self.headers return self._session.get(url, headers=headers, stream=True) def deleteContent(self, url, data=None, headers=None): if headers is None: headers = self.headers return self._session.delete(url, headers=headers, data=data) def putContent(self, url, headers=None, data=None): if headers is None: headers = self.headers return self._session.put(url, headers=headers, data=data) def saveFile(self, path, raw): with open(path, 'wb') as f: shutil.copyfileobj(raw, f) def deleteFile(self, path): if os.path.exists(path): os.remove(path) return True else: return False
def restart(self, session: Session, url: str): restart_request_data = {"RestartReset": "Restart"} response = session.put(f"{url}/php/ajaxSet_status_restart.php", data=json.dumps(restart_request_data)) if not response.ok: response.raise_for_status()
def put(self, url, data=None, **kwargs): return Session.put(self, self._get_resource_uri(url), data, **self._set_default_timeout(**kwargs))
class AzureDataFactoryDeployer(object): """ Deployment tool for Azure Data Factories. """ API_VERSION = '2018-06-01' def __init__( self, credentials, subscription_id, resource_group_name, name, path, ): """ AzureDataFactoryDeployer class constructor. The main (and only) way to create an AzureDataFactoryDeployer instance. Parameters ---------- credentials A valid form of Azure credentials. In most cases this will be a valid instance the class of `ServicePrincipalCredentials`. This class is imported from the `azure.common.credentials` module. subscription_id Identifier of the subscription in which the Azure Data Factory instance is located. resource_group_name Name of the resource group in which the Azure Data Factory instance is located. name Name of the data factory instance to create or update. path Path to folder containing the Azure Data Factory resources. This must be the root folder that contains the folders called "dataset", "linkedService", "pipeline", and "trigger". """ self._path = path self._adf_url = 'https://management.azure.com/{}/{}'.format( 'subscriptions/{}/resourceGroups/{}'.format( subscription_id, resource_group_name, ), 'providers/Microsoft.DataFactory/factories/{}'.format(name), ) self.session = Session() self.session.headers.update({ 'Authorization': 'bearer {}'.format( credentials.token['access_token'], ), }) def deploy(self): print('Starting deployment...') for object_type in ADFObjectType: self._deploy(object_type) print('Completed deployment.') def _deploy(self, object_type): old_objects = self._get(object_type) old_objects = {o['name']: o for o in old_objects} # Get updated list of objects according to the current state of the # repository. new_objects = self._get_objects_from_repo(object_type) new_objects = {o['name']: o for o in new_objects} # The objects that were in the existing list but not in the list of # updated objects are up for deletion. Note that there is a possibility # that they were renamed, but in this case a new object will be made # with the new name instead. objects_to_be_deleted = [ o for o in old_objects if o not in new_objects ] # All objects that have their names in both the old and updated list of # objects are to be updated, but only if their contents are no longer # the same. objects_to_be_updated = [ o for o in new_objects if o in old_objects and new_objects[o]['properties'] != old_objects[o]['properties'] ] # The only objects that are new, are the ones that do not appear in the # existing list of objects. Note that some of these might be renamed # versions of existing objects, but it does little harm to create a new # one as the previous name will then be up for deletion. objects_to_be_created = [ o for o in new_objects if o not in old_objects ] if object_type is ADFObjectType.LinkedService: objects_to_be_created = self.get_deploy_order([ new_objects[ls] for ls in objects_to_be_created ]) objects_to_be_updated = self.get_deploy_order([ new_objects[ls] for ls in objects_to_be_updated ]) for o in objects_to_be_deleted: print('Deleting {}: {}...'.format(object_type, o)) self._delete(object_type, o) print('Deleted {}: {}.'.format(object_type, o)) for o in objects_to_be_created: print('Creating {}: {}'.format(object_type, o)) self._create(object_type, o, new_objects[o]) print('Created {}: {}'.format(object_type, o)) for o in objects_to_be_updated: print('Updating {}: {}'.format(object_type, o)) self._update(object_type, o, new_objects[o], old_objects[o]['etag']) print('Updated {}: {}'.format(object_type, o)) if object_type is ADFObjectType.Trigger: # Wait for triggers to be processed before starting them. sleep(5) for o in objects_to_be_created + objects_to_be_updated: self._start_trigger(o) def _create(self, object_type, name, data): response = self.session.put( '{}/{}s/{}?api-version={}'.format( self._adf_url, object_type, name, self.API_VERSION, ), json=data, ) return response.status_code def _get(self, object_type): response = self.session.get( '{}/{}s?api-version={}'.format( self._adf_url, object_type, self.API_VERSION, ), ) return response.json()['value'] def _update(self, object_type, name, data, etag): response = self.session.put( '{}/{}s/{}?api-version={}'.format( self._adf_url, object_type, name, self.API_VERSION, ), json=data, headers={ 'If-Match': etag, }, ) return response.status_code def _delete(self, object_type, name): response = self.session.delete( '{}/{}s/{}?api-version={}'.format( self._adf_url, object_type, name, self.API_VERSION, ), ) return response.status_code def _start_trigger(self, trigger_name): print('Starting trigger: {}'.format(trigger_name)) response = self.session.post( '{}/triggers/{}/start?api-version={}'.format( self._adf_url, trigger_name, self.API_VERSION, ), ) print('Started trigger: {}'.format(trigger_name)) return response.status_code def _get_objects_from_repo(self, object_type): object_dir_path = '{}/{}'.format(self._path, object_type) objects = [] for object_file_name in listdir(object_dir_path): object_file_path = '{}/{}'.format(object_dir_path, object_file_name) with open(object_file_path, 'r') as object_file: object_json = json_load(object_file) objects.append(object_json) return objects @classmethod def get_reference_name(cls, linked_service): if 'referenceName' in linked_service: return linked_service['referenceName'] for value in linked_service.values(): if isinstance(value, dict): reference_name = cls.get_reference_name(value) if reference_name: return reference_name @classmethod def get_deploy_order(cls, linked_services): references = [cls.get_reference_name(ls) for ls in linked_services] references = [ref for ref in references if ref is not None] dependents = [ ls['name'] for ls in linked_services if ls['name'] not in references ] return references + dependents
class Barbora: def __init__(self, username: str, password: str, msteams_webhook: str = None): self._username = username self._password = password self._msteams_webhook = msteams_webhook self.api_base = 'https://barbora.lt/api/eshop/v1' self.locations = [] # Initialize Cookie Jar self.cookie_jar = RequestsCookieJar() self.cookie_jar.set("region", "barbora.lt") # Create session self.session = Session() self.session.auth = HTTPBasicAuth('apikey', 'SecretKey') self.session.cookies = self.cookie_jar def _debug_status_code(self, response): print( f"{'.'*80}{response.status_code} - {response.reason} - {inspect.stack()[1][3]}" ) def _get_cookie(self) -> str: endpoint = f"{self.api_base}/user/login" payload = dict(email=self._username, password=self._password, rememberMe='true') response = self.session.post(url=endpoint, data=payload) self._debug_status_code(response) return response.cookies['.BRBAUTH'] def _get_locations(self) -> None: response = self.session.get(url=f"{self.api_base}/user/address") self._debug_status_code(response) self.locations = response.json()['address'] def _set_location(self, location_id: str) -> None: endpoint = f"{self.api_base}/cart/changeDeliveryAddress" payload = dict(deliveryAddressId=location_id, isWebRequest='true', forceToChangeAddressOnFirstTry='false') response = self.session.put(url=endpoint, data=payload) self._debug_status_code(response) def _get_time_table(self) -> list: endpoint = f"{self.api_base}/cart/deliveries" response = self.session.get(url=endpoint) self._debug_status_code(response) response_in_json = json.loads(response.text) timetable = response_in_json['deliveries'][0]['params']['matrix'] return timetable def _send_message_to_msteams(self, facts): payload = { "@type": "MessageCard", "@context": "http://schema.org/extensions", "themeColor": "0076D7", "summary": "Available slots for delivery @ Barbora", "sections": [{ "activityTitle": "Available slots for delivery @ Barbora", "facts": facts, "markdown": True }], } response = requests.post(url=self._msteams_webhook, json=payload) self._debug_status_code(response) print("**************************************************") print(f"Sending MS Teams message: {response.status_code}") print("**************************************************") def run_once(self): self._get_cookie() self._get_locations() free_slots = list() for location in self.locations: self._set_location(location_id=location['id']) timetable = self._get_time_table() for day in timetable: for hour in day['hours']: if hour['available']: print( f"{hour['deliveryTime']} - Go go go! - {location['address']}" ) free_slots.append( dict(name=str(hour['deliveryTime']), value=location['address'])) if free_slots and self._msteams_webhook: self._send_message_to_msteams(facts=free_slots) def run_loop(self, delay_in_seconds=300): while True: print("==================================================") print(f"Starting job - {datetime.datetime.now()}") print("==================================================") try: self.run_once() except Exception as e: print(e) finally: time.sleep(delay_in_seconds)
def create_user_member(session: Session, user: User) -> Response: app.logger.debug("TenantApi#create_user_member: {} {} {}".format(user.last_name, user.first_name, user.tenant.name)) return session.put(url="{base}/tenant/{tenant_id}/user-members/{user_id}".format(base=URL_ENGINE_API, tenant_id=user.tenant.name, user_id=user.email))