def test_server_xml_catalina_fallback(docker_cli, image): environment = { 'CATALINA_CONNECTOR_PROXYNAME': 'crowd.atlassian.com', 'CATALINA_CONNECTOR_PROXYPORT': '443', 'CATALINA_CONNECTOR_SECURE': 'true', 'CATALINA_CONNECTOR_SCHEME': 'https', 'CATALINA_CONTEXT_PATH': '/mycrowd', } container = run_image(docker_cli, image, environment=environment) _jvm = wait_for_proc(container, get_bootstrap_proc(container)) xml = parse_xml( container, f'{get_app_install_dir(container)}/apache-tomcat/conf/server.xml') connector = xml.find('.//Connector') context = xml.find('.//Context') assert connector.get('proxyName') == environment.get( 'CATALINA_CONNECTOR_PROXYNAME') assert connector.get('proxyPort') == environment.get( 'CATALINA_CONNECTOR_PROXYPORT') assert connector.get('secure') == environment.get( 'CATALINA_CONNECTOR_SECURE') assert connector.get('scheme') == environment.get( 'CATALINA_CONNECTOR_SCHEME')
def test_confluence_xml_cluster_multicast(docker_cli, image, run_user): environment = { 'ATL_CLUSTER_TYPE': 'multicast', 'ATL_CLUSTER_NAME': 'atl_cluster_name', 'ATL_CLUSTER_TTL': 'atl_cluster_ttl', 'ATL_CLUSTER_ADDRESS': '99.99.99.99' } container = run_image(docker_cli, image, user=run_user, environment=environment) _jvm = wait_for_proc(container, get_bootstrap_proc(container)) xml = parse_xml(container, f'{get_app_home(container)}/confluence.cfg.xml') assert xml.findall( './/property[@name="confluence.cluster"]')[0].text == "true" assert xml.findall('.//property[@name="confluence.cluster.join.type"]' )[0].text == "multicast" assert xml.findall('.//property[@name="confluence.cluster.name"]' )[0].text == "atl_cluster_name" assert xml.findall('.//property[@name="confluence.cluster.ttl"]' )[0].text == "atl_cluster_ttl" assert xml.findall('.//property[@name="confluence.cluster.address"]' )[0].text == "99.99.99.99"
def test_dbconfig_xml_defaults(docker_cli, image): environment = { 'ATL_DB_TYPE': 'postgres72', 'ATL_DB_DRIVER': 'org.postgresql.Driver', 'ATL_JDBC_URL': 'jdbc:postgresql://mypostgres.mycompany.org:5432/jiradb', 'ATL_JDBC_USER': '******', 'ATL_JDBC_PASSWORD': '******', } container = run_image(docker_cli, image, environment=environment) _jvm = wait_for_proc(container, get_bootstrap_proc(container)) xml = parse_xml(container, f'{get_app_home(container)}/dbconfig.xml') assert xml.findtext('.//pool-min-size') == '20' assert xml.findtext('.//pool-max-size') == '100' assert xml.findtext('.//pool-min-idle') == '10' assert xml.findtext('.//pool-max-idle') == '20' assert xml.findtext('.//pool-max-wait') == '30000' assert xml.findtext('.//validation-query') == 'select 1' assert xml.findtext('.//time-between-eviction-runs-millis') == '30000' assert xml.findtext('.//min-evictable-idle-time-millis') == '5000' assert xml.findtext('.//pool-remove-abandoned') == 'true' assert xml.findtext('.//pool-remove-abandoned-timeout') == '300' assert xml.findtext('.//pool-test-while-idle') == 'true' assert xml.findtext('.//pool-test-on-borrow') == 'false'
def _http_format_output(self, response_content, response_headers): """Formats the request response to the desired type""" try: if self.output_format == 'text': output = response_content.decode('utf-8', 'ignore') elif self.output_format == 'dict': output = helpers.convert_xml_to_dict(response_content) elif self.output_format == 'json': output = helpers.convert_xml_to_json(response_content) elif self.output_format == 'xml': output = helpers.parse_xml(response_content) else: output = response_content if self.callback: return self.callback(output) if self.return_type: return output, response_headers['Content-Type'] return output except Exception as e: if not self._silent: logger.warn( u"Failed format response from uri %s to %s error %s" % (self.uri, self.output_format, e)) return None
def test_confluence_xml_default(docker_cli, image): container = run_image(docker_cli, image) _jvm = wait_for_proc(container, get_bootstrap_proc(container)) xml = parse_xml(container, f'{get_app_home(container)}/confluence.cfg.xml') assert xml.findall('.//buildNumber')[0].text == "0" assert xml.findall('.//property[@name="hibernate.connection.url"]') == [] assert xml.findall('.//property[@name="confluence.cluster.home"]') == []
def test_seraph_defaults(docker_cli, image): container = run_image(docker_cli, image) _jvm = wait_for_proc(container, get_bootstrap_proc(container)) xml = parse_xml( container, f'{get_app_install_dir(container)}/confluence/WEB-INF/classes/seraph-config.xml' ) param = xml.findall('.//param-name[.="autologin.cookie.age"]') == []
def test_confluence_lucene_index(docker_cli, image): container = run_image( docker_cli, image, environment={'ATL_LUCENE_INDEX_DIR': '/some/other/dir'}) _jvm = wait_for_proc(container, get_bootstrap_proc(container)) xml = parse_xml(container, f'{get_app_home(container)}/confluence.cfg.xml') assert xml.findall( './/property[@name="lucene.index.dir"]')[0].text == '/some/other/dir'
def test_seraph_login_set(docker_cli, image): container = run_image(docker_cli, image, environment={"ATL_AUTOLOGIN_COOKIE_AGE": "TEST_VAL"}) _jvm = wait_for_proc(container, get_bootstrap_proc(container)) xml = parse_xml( container, f'{get_app_install_dir(container)}/confluence/WEB-INF/classes/seraph-config.xml' ) assert xml.findall('.//param-value[.="TEST_VAL"]')[0].text == "TEST_VAL"
def test_seraph_xml_defaults(docker_cli, image): container = run_image(docker_cli, image) _jvm = wait_for_proc(container, get_bootstrap_proc(container)) xml = parse_xml( container, f'{get_app_install_dir(container)}/atlassian-jira/WEB-INF/classes/seraph-config.xml' ) assert [ el.findtext('.//param-value') for el in xml.findall('.//init-param') if el.findtext('.//param-name') == 'autologin.cookie.age' ][0] == '1209600'
def test_seraph_xml_params(docker_cli, image): environment = {'ATL_AUTOLOGIN_COOKIE_AGE': '9001'} container = run_image(docker_cli, image, environment=environment) _jvm = wait_for_proc(container, get_bootstrap_proc(container)) xml = parse_xml( container, f'{get_app_install_dir(container)}/atlassian-jira/WEB-INF/classes/seraph-config.xml' ) assert [ el.findtext('.//param-value') for el in xml.findall('.//init-param') if el.findtext('.//param-name') == 'autologin.cookie.age' ][0] == environment.get('ATL_AUTOLOGIN_COOKIE_AGE')
def test_server_xml_access_log(docker_cli, image): environment = { 'ATL_TOMCAT_ACCESS_LOG': 'true', 'ATL_TOMCAT_PROXY_INTERNAL_IPS': '192.168.1.1', } container = run_image(docker_cli, image, environment=environment) _jvm = wait_for_proc(container, get_bootstrap_proc(container)) xml = parse_xml(container, f'{get_app_install_dir(container)}/conf/server.xml') valve = xml.find( './/Context/Valve[@className="org.apache.catalina.valves.RemoteIpValve"]' ) assert valve.get('internalProxies') == environment.get( 'ATL_TOMCAT_PROXY_INTERNAL_IPS')
def test_server_xml_params(docker_cli, image): environment = { 'ATL_TOMCAT_MGMT_PORT': '8006', 'ATL_TOMCAT_PORT': '9090', 'ATL_TOMCAT_MAXTHREADS': '201', 'ATL_TOMCAT_MINSPARETHREADS': '11', 'ATL_TOMCAT_CONNECTIONTIMEOUT': '20001', 'ATL_TOMCAT_ENABLELOOKUPS': 'true', 'ATL_TOMCAT_PROTOCOL': 'HTTP/2', 'ATL_TOMCAT_ACCEPTCOUNT': '11', 'ATL_TOMCAT_SECURE': 'true', 'ATL_TOMCAT_SCHEME': 'https', 'ATL_PROXY_NAME': 'conf.atlassian.com', 'ATL_PROXY_PORT': '443', 'ATL_TOMCAT_MAXHTTPHEADERSIZE': '8193', 'ATL_TOMCAT_CONTEXTPATH': '/myconf', } container = run_image(docker_cli, image, environment=environment) _jvm = wait_for_proc(container, get_bootstrap_proc(container)) xml = parse_xml(container, f'{get_app_install_dir(container)}/conf/server.xml') connector = xml.find('.//Connector') context = xml.find('.//Context') assert xml.get('port') == environment.get('ATL_TOMCAT_MGMT_PORT') assert connector.get('port') == environment.get('ATL_TOMCAT_PORT') assert connector.get('maxThreads') == environment.get( 'ATL_TOMCAT_MAXTHREADS') assert connector.get('minSpareThreads') == environment.get( 'ATL_TOMCAT_MINSPARETHREADS') assert connector.get('connectionTimeout') == environment.get( 'ATL_TOMCAT_CONNECTIONTIMEOUT') assert connector.get('enableLookups') == environment.get( 'ATL_TOMCAT_ENABLELOOKUPS') assert connector.get('protocol') == environment.get('ATL_TOMCAT_PROTOCOL') assert connector.get('acceptCount') == environment.get( 'ATL_TOMCAT_ACCEPTCOUNT') assert connector.get('secure') == environment.get('ATL_TOMCAT_SECURE') assert connector.get('scheme') == environment.get('ATL_TOMCAT_SCHEME') assert connector.get('proxyName') == environment.get('ATL_PROXY_NAME') assert connector.get('proxyPort') == environment.get('ATL_PROXY_PORT') assert connector.get('maxHttpHeaderSize') == environment.get( 'ATL_TOMCAT_MAXHTTPHEADERSIZE') assert context.get('path') == environment.get('ATL_TOMCAT_CONTEXTPATH')
def test_dbconfig_xml_params(docker_cli, image, run_user): environment = { 'ATL_DB_TYPE': 'postgres72', 'ATL_DB_DRIVER': 'org.postgresql.Driver', 'ATL_JDBC_URL': 'jdbc:postgresql://mypostgres.mycompany.org:5432/jiradb', 'ATL_JDBC_USER': '******', 'ATL_JDBC_PASSWORD': '******', 'ATL_DB_SCHEMA_NAME': 'private', 'ATL_DB_MAXIDLE': '21', 'ATL_DB_MAXWAITMILLIS': '30001', 'ATL_DB_MINEVICTABLEIDLETIMEMILLIS': '5001', 'ATL_DB_MINIDLE': '11', 'ATL_DB_POOLMAXSIZE': '101', 'ATL_DB_POOLMINSIZE': '21', 'ATL_DB_REMOVEABANDONED': 'false', 'ATL_DB_REMOVEABANDONEDTIMEOUT': '301', 'ATL_DB_TESTONBORROW': 'true', 'ATL_DB_TESTWHILEIDLE': 'false', 'ATL_DB_TIMEBETWEENEVICTIONRUNSMILLIS': '30001', 'ATL_DB_VALIDATIONQUERY': 'select 2', } container = run_image(docker_cli, image, user=run_user, environment=environment) _jvm = wait_for_proc(container, get_bootstrap_proc(container)) xml = parse_xml(container, f'{get_app_home(container)}/dbconfig.xml') assert xml.findtext('.//database-type') == environment.get('ATL_DB_TYPE') assert xml.findtext('.//driver-class') == environment.get('ATL_DB_DRIVER') assert xml.findtext('.//url') == environment.get('ATL_JDBC_URL') assert xml.findtext('.//username') == environment.get('ATL_JDBC_USER') assert xml.findtext('.//password') == environment.get('ATL_JDBC_PASSWORD') assert xml.findtext('.//schema-name') == environment.get('ATL_DB_SCHEMA_NAME') assert xml.findtext('.//pool-min-size') == environment.get('ATL_DB_POOLMINSIZE') assert xml.findtext('.//pool-max-size') == environment.get('ATL_DB_POOLMAXSIZE') assert xml.findtext('.//pool-min-idle') == environment.get('ATL_DB_MINIDLE') assert xml.findtext('.//pool-max-idle') == environment.get('ATL_DB_MAXIDLE') assert xml.findtext('.//pool-max-wait') == environment.get('ATL_DB_MAXWAITMILLIS') assert xml.findtext('.//validation-query') == environment.get('ATL_DB_VALIDATIONQUERY') assert xml.findtext('.//time-between-eviction-runs-millis') == environment.get('ATL_DB_TIMEBETWEENEVICTIONRUNSMILLIS') assert xml.findtext('.//min-evictable-idle-time-millis') == environment.get('ATL_DB_MINEVICTABLEIDLETIMEMILLIS') assert xml.findtext('.//pool-remove-abandoned') == environment.get('ATL_DB_REMOVEABANDONED') assert xml.findtext('.//pool-remove-abandoned-timeout') == environment.get('ATL_DB_REMOVEABANDONEDTIMEOUT') assert xml.findtext('.//pool-test-while-idle') == environment.get('ATL_DB_TESTWHILEIDLE') assert xml.findtext('.//pool-test-on-borrow') == environment.get('ATL_DB_TESTONBORROW')
def test_dbconfig_xml_default_schema_names(docker_cli, image, run_user, atl_db_type): default_schema_names = { 'mssql': 'dbo', 'mysql': 'public', 'oracle10g': '', 'postgres72': 'public', } schema_name = default_schema_names.get(atl_db_type, '') environment = { 'ATL_DB_TYPE': atl_db_type, } container = run_image(docker_cli, image, user=run_user, environment=environment) _jvm = wait_for_proc(container, get_bootstrap_proc(container)) xml = parse_xml(container, f'{get_app_home(container)}/dbconfig.xml') assert xml.findtext('.//schema-name') == schema_name
def test_confluence_xml_cluster_aws(docker_cli, image, run_user): environment = { 'ATL_CLUSTER_TYPE': 'aws', 'ATL_HAZELCAST_NETWORK_AWS_IAM_ROLE': 'atl_hazelcast_network_aws_iam_role', 'ATL_HAZELCAST_NETWORK_AWS_IAM_REGION': 'atl_hazelcast_network_aws_iam_region', 'ATL_HAZELCAST_NETWORK_AWS_HOST_HEADER': 'atl_hazelcast_network_aws_host_header', 'ATL_HAZELCAST_NETWORK_AWS_TAG_KEY': 'atl_hazelcast_network_aws_tag_key', 'ATL_HAZELCAST_NETWORK_AWS_TAG_VALUE': 'atl_hazelcast_network_aws_tag_value', 'ATL_CLUSTER_NAME': 'atl_cluster_name', 'ATL_CLUSTER_TTL': 'atl_cluster_ttl' } container = run_image(docker_cli, image, user=run_user, environment=environment) _jvm = wait_for_proc(container, get_bootstrap_proc(container)) xml = parse_xml(container, f'{get_app_home(container)}/confluence.cfg.xml') assert xml.findall( './/property[@name="confluence.cluster"]')[0].text == "true" assert xml.findall( './/property[@name="confluence.cluster.join.type"]')[0].text == "aws" assert xml.findall('.//property[@name="confluence.cluster.aws.iam.role"]' )[0].text == "atl_hazelcast_network_aws_iam_role" assert xml.findall('.//property[@name="confluence.cluster.aws.region"]' )[0].text == "atl_hazelcast_network_aws_iam_region" assert xml.findall( './/property[@name="confluence.cluster.aws.host.header"]' )[0].text == "atl_hazelcast_network_aws_host_header" assert xml.findall('.//property[@name="confluence.cluster.aws.tag.key"]' )[0].text == "atl_hazelcast_network_aws_tag_key" assert xml.findall('.//property[@name="confluence.cluster.aws.tag.value"]' )[0].text == "atl_hazelcast_network_aws_tag_value" assert xml.findall('.//property[@name="confluence.cluster.name"]' )[0].text == "atl_cluster_name" assert xml.findall('.//property[@name="confluence.cluster.ttl"]' )[0].text == "atl_cluster_ttl"
def test_confluence_xml_postgres(docker_cli, image, run_user): environment = { 'ATL_DB_TYPE': 'postgresql', 'ATL_JDBC_URL': 'atl_jdbc_url', 'ATL_JDBC_USER': '******', 'ATL_JDBC_PASSWORD': '******' } container = run_image(docker_cli, image, user=run_user, environment=environment) _jvm = wait_for_proc(container, get_bootstrap_proc(container)) xml = parse_xml(container, f'{get_app_home(container)}/confluence.cfg.xml') assert xml.findall('.//property[@name="hibernate.connection.url"]' )[0].text == "atl_jdbc_url" assert xml.findall('.//property[@name="hibernate.connection.username"]' )[0].text == "atl_jdbc_user" assert xml.findall('.//property[@name="hibernate.connection.password"]' )[0].text == "atl_jdbc_password" assert xml.findall('.//property[@name="confluence.database.choice"]' )[0].text == "postgresql" assert xml.findall( './/property[@name="hibernate.dialect"]' )[0].text == "com.atlassian.confluence.impl.hibernate.dialect.PostgreSQLDialect" assert xml.findall('.//property[@name="hibernate.connection.driver_class"]' )[0].text == "org.postgresql.Driver" assert xml.findall( './/property[@name="hibernate.c3p0.min_size"]')[0].text == "20" assert xml.findall( './/property[@name="hibernate.c3p0.max_size"]')[0].text == "100" assert xml.findall( './/property[@name="hibernate.c3p0.timeout"]')[0].text == "30" assert xml.findall('.//property[@name="hibernate.c3p0.idle_test_period"]' )[0].text == "100" assert xml.findall( './/property[@name="hibernate.c3p0.max_statements"]')[0].text == "0" assert xml.findall( './/property[@name="hibernate.c3p0.validate"]')[0].text == "false" assert xml.findall( './/property[@name="hibernate.c3p0.acquire_increment"]')[0].text == "1" assert xml.findall('.//property[@name="hibernate.c3p0.preferredTestQuery"]' )[0].text == "select 1"
def test_confluence_xml_postgres_all_set(docker_cli, image, run_user): environment = { 'ATL_DB_TYPE': 'postgresql', 'ATL_JDBC_URL': 'atl_jdbc_url', 'ATL_JDBC_USER': '******', 'ATL_JDBC_PASSWORD': '******', 'ATL_DB_POOLMAXSIZE': 'x100', 'ATL_DB_POOLMINSIZE': 'x20', 'ATL_DB_TIMEOUT': 'x30', 'ATL_DB_IDLETESTPERIOD': 'x100', 'ATL_DB_MAXSTATEMENTS': 'x0', 'ATL_DB_VALIDATE': 'xfalse', 'ATL_DB_ACQUIREINCREMENT': 'x1', 'ATL_DB_VALIDATIONQUERY': 'xselect 1' } container = run_image(docker_cli, image, user=run_user, environment=environment) _jvm = wait_for_proc(container, get_bootstrap_proc(container)) xml = parse_xml(container, f'{get_app_home(container)}/confluence.cfg.xml') assert xml.findall('.//property[@name="hibernate.connection.driver_class"]' )[0].text == "org.postgresql.Driver" assert xml.findall( './/property[@name="hibernate.dialect"]' )[0].text == "com.atlassian.confluence.impl.hibernate.dialect.PostgreSQLDialect" assert xml.findall( './/property[@name="hibernate.c3p0.min_size"]')[0].text == "x20" assert xml.findall( './/property[@name="hibernate.c3p0.max_size"]')[0].text == "x100" assert xml.findall( './/property[@name="hibernate.c3p0.timeout"]')[0].text == "x30" assert xml.findall('.//property[@name="hibernate.c3p0.idle_test_period"]' )[0].text == "x100" assert xml.findall( './/property[@name="hibernate.c3p0.max_statements"]')[0].text == "x0" assert xml.findall( './/property[@name="hibernate.c3p0.validate"]')[0].text == "xfalse" assert xml.findall('.//property[@name="hibernate.c3p0.acquire_increment"]' )[0].text == "x1" assert xml.findall('.//property[@name="hibernate.c3p0.preferredTestQuery"]' )[0].text == "xselect 1"
def test_server_xml_defaults(docker_cli, image): container = run_image(docker_cli, image) _jvm = wait_for_proc(container, get_bootstrap_proc(container)) xml = parse_xml(container, f'{get_app_install_dir(container)}/conf/server.xml') connector = xml.find('.//Connector') context = xml.find('.//Context') assert connector.get('port') == '8080' assert connector.get('maxThreads') == '100' assert connector.get('minSpareThreads') == '10' assert connector.get('connectionTimeout') == '20000' assert connector.get('enableLookups') == 'false' assert connector.get('protocol') == 'HTTP/1.1' assert connector.get('acceptCount') == '10' assert connector.get('secure') == 'false' assert connector.get('scheme') == 'http' assert connector.get('proxyName') == '' assert connector.get('proxyPort') == ''
def test_server_xml_catalina_fallback(docker_cli, image): environment = { 'CATALINA_CONNECTOR_PROXYNAME': 'PROXYNAME', 'CATALINA_CONNECTOR_PROXYPORT': 'PROXYPORT', 'CATALINA_CONNECTOR_SECURE': 'SECURE', 'CATALINA_CONNECTOR_SCHEME': 'SCHEME', 'CATALINA_CONTEXT_PATH': 'CONTEXT' } container = run_image(docker_cli, image, environment=environment) _jvm = wait_for_proc(container, get_bootstrap_proc(container)) xml = parse_xml(container, f'{get_app_install_dir(container)}/conf/server.xml') connector = xml.find('.//Connector') context = xml.find('.//Context') assert connector.get('proxyName') == 'PROXYNAME' assert connector.get('proxyPort') == 'PROXYPORT' assert connector.get('scheme') == 'SCHEME' assert connector.get('secure') == 'SECURE' assert context.get('path') == 'CONTEXT'
def test_confluence_xml_cluster_tcp(docker_cli, image, run_user): environment = { 'ATL_CLUSTER_TYPE': 'tcp_ip', 'ATL_CLUSTER_PEERS': '1.1.1.1,99.99.99.99', 'ATL_CLUSTER_NAME': 'atl_cluster_name', } container = run_image(docker_cli, image, user=run_user, environment=environment) _jvm = wait_for_proc(container, get_bootstrap_proc(container)) xml = parse_xml(container, f'{get_app_home(container)}/confluence.cfg.xml') assert xml.findall( './/property[@name="confluence.cluster"]')[0].text == "true" assert xml.findall('.//property[@name="confluence.cluster.join.type"]' )[0].text == "tcp_ip" assert xml.findall('.//property[@name="confluence.cluster.name"]' )[0].text == "atl_cluster_name" assert xml.findall('.//property[@name="confluence.cluster.peers"]' )[0].text == "1.1.1.1,99.99.99.99"
def wikify(xml_data_set): if xml_data_set: test_dict = helpers.parse_xml(xml_test_data) whole_set = [] corpus = test_dict['corpus']['@lang'] whole_set.append(corpus) # loop the every word from xml file for word in test_dict['corpus']['lexelt']: lexelt = word['@item'] lex_set = [] lex_set.append(lexelt) # loop every instance in the current lexeme for instance in word['instance']: instance_set = [] word_to_be_disambiguated = instance['context']['head'] #print("word_to_be_disambiguated: ", word_to_be_disambiguated) #print("\n") correct_sense = instance['answer']['@senseid'] #print("correct_sense: ", correct_sense) #print("\n") context = instance['context']['#text'] #print("Context: ", context) #print("\n") # Remove html entities context = html.unescape(context) #print("Unescape context: ", context) #print("\n") # Remove tags context = re.sub(r"\[(.*?)\]+", "", context) #print("Regex context: ", context) #print("\n") # Tokenize context = tokenizer.tokenize(context) #print("Tokenized context: ", context) #print("\n") # Remove stopwords context = [x for x in context if x not in stop] print('Stopwordless context: ', context) #print("\n") links = linkify(context, word_to_be_disambiguated) #print("links: ", links) #print("\n") instance_set.append(correct_sense) instance_set.append(links) lex_set.append(instance_set) # TODO: find strings in original context and add wiki url? whole_set.append(lex_set) return whole_set else: print('No XML file provided')
def make_request( self, uri=None, proto="HTTP", request_type="GET", headers=None, output_format="raw", return_type=False, no_token=False, timeout=None, ): if timeout is None: timeout = plexpy.CONFIG.PMS_TIMEOUT valid_request_types = ["GET", "POST", "PUT", "DELETE"] if request_type.upper() not in valid_request_types: logger.debug(u"HTTP request made but unsupported request type given.") return None if uri: if proto.upper() == "HTTPS": if not self.ssl_verify and hasattr(ssl, "_create_unverified_context"): context = ssl._create_unverified_context() handler = HTTPSConnection(host=self.host, port=self.port, timeout=timeout, context=context) logger.warn(u"PlexPy HTTP Handler :: Unverified HTTPS request made. This connection is not secure.") else: handler = HTTPSConnection(host=self.host, port=self.port, timeout=timeout) else: handler = HTTPConnection(host=self.host, port=self.port, timeout=timeout) if not no_token: if headers: headers.update({"X-Plex-Token": self.token}) else: headers = {"X-Plex-Token": self.token} try: if headers: handler.request(request_type, uri, headers=headers) else: handler.request(request_type, uri) response = handler.getresponse() request_status = response.status request_content = response.read() content_type = response.getheader("content-type") except IOError as e: logger.warn(u"Failed to access uri endpoint %s with error %s" % (uri, e)) return None except Exception as e: logger.warn( u"Failed to access uri endpoint %s. Is your server maybe accepting SSL connections only? %s" % (uri, e) ) return None except: logger.warn(u"Failed to access uri endpoint %s with Uncaught exception." % uri) return None if request_status in (200, 201): try: if output_format == "dict": output = helpers.convert_xml_to_dict(request_content) elif output_format == "json": output = helpers.convert_xml_to_json(request_content) elif output_format == "xml": output = helpers.parse_xml(request_content) else: output = request_content if return_type: return output, content_type return output except Exception as e: logger.warn(u"Failed format response from uri %s to %s error %s" % (uri, output_format, e)) return None else: logger.warn(u"Failed to access uri endpoint %s. Status code %r" % (uri, request_status)) return None else: logger.debug(u"HTTP request made but no enpoint given.") return None
return finally: for line in f.readlines(): tokens = line.rstrip().split('\t') if tokens[0] == word1 and tokens[1] == word2: cooccurrences = int(tokens[2]) # print(word1, word2, cooccurrences) # only one instance of a word pair in a file f.close() return cooccurrences return cooccurrences if __name__ == "__main__": test_dict = helpers.parse_xml(xml_test_data) # loop the every word from xml file for word in test_dict['corpus']['lexelt']: # loop every instance in the current lexeme for instance in word['instance']: word_to_be_disambiguated = instance['context']['head'] context = instance['context']['#text'] correct_sense = instance['answer']['@senseid'] # TODO remove tags and quotes from context # can be produce errors by giving weigth to senses that happen to have the keyword # format context to a list context = tokenizer.tokenize(context) # remove the word to be disambiguated: it doesn't provide any information for the context
def make_request(self, uri=None, proto='HTTP', request_type='GET', headers=None, output_format='raw', return_type=False, no_token=False, timeout=None): if timeout is None: timeout = plexpy.CONFIG.PMS_TIMEOUT valid_request_types = ['GET', 'POST', 'PUT', 'DELETE'] if request_type.upper() not in valid_request_types: logger.debug( u"HTTP request made but unsupported request type given.") return None if uri: if proto.upper() == 'HTTPS': if not self.ssl_verify and hasattr( ssl, '_create_unverified_context'): context = ssl._create_unverified_context() handler = HTTPSConnection(host=self.host, port=self.port, timeout=timeout, context=context) logger.warn( u"PlexPy HTTP Handler :: Unverified HTTPS request made. This connection is not secure." ) else: handler = HTTPSConnection(host=self.host, port=self.port, timeout=timeout) else: handler = HTTPConnection(host=self.host, port=self.port, timeout=timeout) if not no_token: if headers: headers.update({'X-Plex-Token': self.token}) else: headers = {'X-Plex-Token': self.token} try: if headers: handler.request(request_type, uri, headers=headers) else: handler.request(request_type, uri) response = handler.getresponse() request_status = response.status request_content = response.read() content_type = response.getheader('content-type') except IOError as e: logger.warn(u"Failed to access uri endpoint %s with error %s" % (uri, e)) return None except Exception as e: logger.warn( u"Failed to access uri endpoint %s. Is your server maybe accepting SSL connections only? %s" % (uri, e)) return None except: logger.warn( u"Failed to access uri endpoint %s with Uncaught exception." % uri) return None if request_status in (200, 201): try: if output_format == 'dict': output = helpers.convert_xml_to_dict(request_content) elif output_format == 'json': output = helpers.convert_xml_to_json(request_content) elif output_format == 'xml': output = helpers.parse_xml(request_content) else: output = request_content if return_type: return output, content_type return output except Exception as e: logger.warn( u"Failed format response from uri %s to %s error %s" % (uri, output_format, e)) return None else: logger.warn( u"Failed to access uri endpoint %s. Status code %r" % (uri, request_status)) return None else: logger.debug(u"HTTP request made but no enpoint given.") return None