def get_issues(self, issues, limit=300): result = [] keys = ','.join(issues) request = 'project=%s AND key in (%s)' % (self.settings.project, keys) LOGGER.debug(request) response = self.proxy.getIssuesFromJqlSearch(self.get_token(), request, Types.intType(limit)) for item in response: issue = JiraIssue() issue.parse_raw(item) result.append(issue) return result
def updateGrid(index, src, dst, proto, data, date, notes, sess_str=SESS_K, sheetId=SHEETID): wsdl.insertRow(sess_str, sheetId, 6, 1) return wsdl.setCellValues(sess_str, sheetId, ( Types.structType({'text': '%s' % index, 'col': 0, 'sheetId': SHEETID, 'row': 6}), Types.structType({'text': '%s' % src, 'col': 1, 'sheetId': SHEETID, 'row': 6}), Types.structType({'text': '%s' % dst, 'col': 2, 'sheetId': SHEETID, 'row': 6}), Types.structType({'text': '%s' % data, 'col': 3, 'sheetId': SHEETID, 'row': 6}), Types.structType({'text': '%s' % proto, 'col': 4, 'sheetId': SHEETID, 'row': 6}), Types.structType({'text': '%s' % date, 'col': 5, 'sheetId': SHEETID, 'row': 6}), Types.structType({'text': '%s' % notes, 'col': 6, 'sheetId': SHEETID, 'row': 6}), ))
def store_outdated_issues(): env = jira_rpc_init(LOGGER) client = env['client'] auth = env['auth'] project_name = env['project_name'] request = '''Sprint in openSprints() AND project = {project} AND type in (Story, Bug, Improvement) AND status in (Defined,Undefined,"In Progress") AND "Estimation Date" <= endOfDay()'''.format(project=project_name) LOGGER.info('Queuing JIRA for outdated issues...') response = client.getIssuesFromJqlSearch(auth, request, Types.intType(20)) LOGGER.info('Response contains {count} issues'.format(count=len(response))) LOGGER.info('Removing all outdated issues from database...') OutdatedJiraIssue.objects.all().delete() LOGGER.info('Starting to import issues to database...') for issue in JiraUtil.__raw_data_to_issues_list__(response): message = '\tIssue {key} is being saved in database'.format(key=issue.key) LOGGER.info(message) outdated_jira_issue = JiraUtil.get_filled_in_outdated_jira_issue_obj(issue) outdated_jira_issue.save()
from SOAPpy import SOAPProxy from SOAPpy import Types import urllib2 import socket # CONSTANTS _url = 'http://api.google.com/search/beta2' _namespace = 'urn:GoogleSearch' # need to marshall into SOAP types SOAP_FALSE = Types.booleanType(0) SOAP_TRUE = Types.booleanType(1) # Google search options _license_key = 'Cu7YX75QFHLS3WD/7/4CO+GsI/jC69eb' _query = "" _start = 0 _maxResults = 10 _filter = SOAP_FALSE _restrict = '' _safeSearch = SOAP_FALSE _lang_restrict = '' def googleSearch(title): ''' It is the function of the application that make the request at google WS using getting the first url of response. @param title: the title of the article to serach for @return: the first url
# get the new namespace if namespace is None: new_ns = None else: new_ns = self.getNS(namespace, data) # return response payload return data, new_ns username = "******" password = "******" url = "http://%s:%[email protected]:8080/FisboxWs/services/Fiskal" % (username, password) namespace = "http://server.fiskal.llarik.sk/" proxy = SOAPProxy(url, namespace=namespace, transport = AuthenticatedTransport(username, password)) proxy.config.strictNamespaces = 1 proxy.config.debug = 1 input = Types.stringType(name = (namespace, "message"), data = "Python") print(proxy.connect(input)) print(proxy.getServerVersion()) print(proxy.getModuleVersion()) print(proxy.getModuleSerialNumber()) print(proxy.getModuleDate()) print(proxy.getModuleInfo()) input = Types.decimalType(name = (namespace, "fmId"), data = 100) print(proxy.getParagonCopy(input))
def main2(): #go database dbs = MySQLdb.connect(host='mysql.ebi.ac.uk',user='******',passwd='amigo',db='go_latest',port=4085) cur = dbs.cursor() #reactome rwsdl = "http://www.reactome.org:8080/caBIOWebApp/services/caBIOService?wsdl" rserv = WSDL.Proxy(rwsdl) #kegg kegg_url = "http://rest.kegg.jp" conn = Connection(kegg_url) #uniprot taxonomy url = 'http://www.uniprot.org/taxonomy/' print '---' annotationArray = [] with open('annotations.dump','rb') as f: ar = pickle.load(f) for idx,element in enumerate(ar): print idx modelAnnotations = Counter() for index in element: for annotation in element[index]: try: bioArray = [] tAnnotation = annotation.replace('%3A',':') tAnnotation = re.search(':([^:]+:[^:]+$)',tAnnotation).group(1) if 'GO' in annotation: cur.execute("SELECT * FROM term WHERE acc='{0}'".format(tAnnotation)) for row in cur.fetchall(): bioArray.append([row[1],row[3]]) modelAnnotations.update([row[1]]) elif 'reactome' in annotation: tAnnotation2 = re.search('_([^_]+$)',tAnnotation).group(1) try: query = rserv.queryById(Types.longType(long(tAnnotation))) except: continue bioArray.append([query['name'],tAnnotation]) modelAnnotations.update([query['name']]) elif 'kegg' in annotation: if 'pathway' in tAnnotation: tAnnotation2 = 'map' + re.search('[^0-9]+([0-9]+$)',tAnnotation).group(1) reply = conn.request_get('find/pathway/{0}'.format(tAnnotation2), headers={'Accept':'text/json'}) if reply['body'] != '\n': bioArray.append([reply['body'].split('\t')[1].strip(),tAnnotation]) modelAnnotations.update([reply['body'].split('\t')[1].strip()]) else: print annotation elif 'uniprot' in annotation: identifier = annotation.split(':')[-1] url = 'http://www.uniprot.org/uniprot/{0}.tab'.format(identifier) params = {} data = urllib.urlencode(params) request = urllib2.Request(url, data) request.add_header('User-Agent', 'Python contact') response = urllib2.urlopen(request) page = response.read(200000) proteinName = page.split('\n')[1].split('\t')[3] modelAnnotations.update([proteinName]) elif 'interpro' in annotation: identifier = annotation.split(':')[-1] url = 'http://www.ebi.ac.uk/interpro/entry/{0}'.format(identifier) params = {} data = urllib.urlencode(params) request = urllib2.Request(url, data) request.add_header('User-Agent', 'Python contact') response = urllib2.urlopen(request) page = response.read(200000) pointer = page.index('h2 class="strapline"') extract = page[pointer:pointer+100] extract = extract[extract.index('>')+1:extract.index('<')] modelAnnotations.update([extract]) #elif 'taxonomy' in annotation: #uniprot stuff for taxonomy # pass ''' url = 'http://www.uniprot.org/taxonomy/' params = { 'from':'ACC', 'to':'P_REFSEQ_AC', 'format':'tab', 'query':'P13368 P20806 Q9UM73 P97793 Q17192' } data = urllib.urlencode(params) request = urllib2.Request(url, data) contact = "" # Please set your email address here to help us debug in case of problems. request.add_header('User-Agent', 'Python contact') response = urllib2.urlopen(request) page = response.read(200000) ''' else: print '--',annotation,'GO' in tAnnotation except: continue print modelAnnotations annotationArray.append(modelAnnotations) with open('parsedAnnotations.dump','wb') as f: pickle.dump(annotationArray,f)
except ImportError: #Edit Tom De Smedt, 2004 #For now, just use the legacy SOAP.py, no warning. #warn( "SOAPpy not imported. Trying legacy SOAP.py.", # DeprecationWarning ) try: import SOAP except ImportError: raise RuntimeError( "Unable to find SOAPpy or SOAP. Can't continue.\n" ) # # Constants that differ between the modules # if SOAPpy: false = Types.booleanType(0) true = Types.booleanType(1) structType = Types.structType faultType = Types.faultType else: false = SOAP.booleanType(0) true = SOAP.booleanType(1) structType = SOAP.structType faultType = SOAP.faultType # # Get a SOAP Proxy object in the correct way for the module we're using # def getProxy( url, namespace, http_proxy ): if SOAPpy: return SOAPProxy( url,
# using sipXconfig SOAP from Python - example # submitted by Joey Korkames<*****@*****.**> sipx_host = 'sipxserver.example.com' from SOAPpy import WSDL from SOAPpy import URLopener from SOAPpy import Types # need to marshall into SOAP types SOAP_FALSE = Types.booleanType(0) SOAP_TRUE = Types.booleanType(1) #I've manually disabled HTTP auth in my sipxconfig installation (to debug other issues), so the auth here may not actually work in a stock sipx environment.... sipx_auth = URLopener.URLopener(username='******',passwd='scrubscrub') sipx_namespace = 'urn:ConfigService' sipx_user_wsdl = 'http://' + sipx_host + '/sipxconfig/services/UserService?wsdl' sipx_user = WSDL.Proxy(sipx_auth.open(sipx_user_wsdl), namespace=sipx_namespace) sipx_phone_wsdl = 'http://' + sipx_host + '/sipxconfig/services/PhoneService?wsdl' sipx_phone = WSDL.Proxy(sipx_auth.open(sipx_phone_wsdl), namespace=sipx_namespace) #prints SOAP traffic to STDOUT #sipx_user.soapproxy.config.dumpSOAPOut = 1 #sipx_user.soapproxy.config.dumpSOAPIn = 1 #sipx_phone.soapproxy.config.dumpSOAPOut = 1 #sipx_phone.soapproxy.config.dumpSOAPIn = 1
client = WSDL.Proxy(wsdlfile) job = dict() job['id'] = 1; job['name'] = 'job name'; job['task'] = []; task = dict() task['name'] = 'python_task_name'; task['progress'] = dict(); task['progress']['status'] = 'Stopped'; task['progress']['data'] = dict(); task['progress']['data']['start'] = 'python start data'; task['progress']['data']['end'] = 'python end data'; job['task'].append(task) print "\n" try: ret = client.create_job(username, job) if(ret == 1): print "Successfully created job \n" else: print ret + "\n" ret_list = client.get_jobs(username) print Types.simplify(ret_list) except Exception as e: print e
def __call__(self, *args, **keywords): args = map(_type_conversion, self.__type_mask, args) results = getattr(self.__target, self.__name)(*args, **keywords) return Types.simplify(results)
def get_backlog_defects_count(self): request = BACKLOG_DEFECTS_QUERY response = self.proxy.getIssuesFromJqlSearch(self.get_token(), request, Types.intType(1000)) return len(response)
# ############################################################################# from SOAPpy import SOAPProxy from SOAPpy import Types url = 'http://localhost:8180/mondrian/xmla' n= 'urn:schemas-microsoft-com:xml-analysis' server = SOAPProxy(url,n) # if you want to see the SOAP message exchanged # uncomment the two following lines server.config.dumpSOAPOut = 1 #server.config.dumpSOAPIn = 1 server.namespace='ns1' server.Discover(RequestType=Types.untypedType('MDSCHEMA_CUBES')) ### ### Genera la peticion # <?xml version="1.0" encoding="UTF-8"?> # <SOAP-ENV:Envelope SOAP-ENV:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/" xmlns:SOAP-ENC="http://schemas.xmlsoap.org/soap/encoding/" xmlns:SOAP-ENV="http://schemas.xmlsoap.org/soap/envelope/"> # <SOAP-ENV:Body> # <ns1:Discover xmlns:ns1="ns1" SOAP-ENC:root="1"> # <RequestType>MDSCHEMA_CUBES</RequestType> # </ns1:Discover> # </SOAP-ENV:Body> # </SOAP-ENV:Envelope>
def getResponsePrimitive(self): return Types.simplify(self.getResponse())
############################################################################# from SOAPpy import SOAPProxy from SOAPpy import Types url = 'http://localhost:8180/mondrian/xmla' n= 'urn:schemas-microsoft-com:xml-analysis' server = SOAPProxy(url,n) # if you want to see the SOAP message exchanged # uncomment the two following lines server.config.dumpSOAPOut = 1 #server.config.dumpSOAPIn = 1 server.namespace='ns1' server.Discover({'ns1:RequestType':Types.untypedType('MDSCHEMA_CUBES')}) ### ### Genera la peticion # <?xml version="1.0" encoding="UTF-8"?> # <SOAP-ENV:Envelope SOAP-ENV:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/" xmlns:SOAP-ENC="http://schemas.xmlsoap.org/soap/encoding/" xmlns:SOAP-ENV="http://schemas.xmlsoap.org/soap/envelope/"> # <SOAP-ENV:Body> # <ns1:Discover xmlns:ns1="ns1" SOAP-ENC:root="1"> # <v1> # <ns1:RequestType>MDSCHEMA_CUBES</ns1:RequestType> # </v1> # </ns1:Discover> # </SOAP-ENV:Body> # </SOAP-ENV:Envelope>
except ImportError: #Edit Tom De Smedt, 2004 #For now, just use the legacy SOAP.py, no warning. #warn( "SOAPpy not imported. Trying legacy SOAP.py.", # DeprecationWarning ) try: import SOAP except ImportError: raise RuntimeError("Unable to find SOAPpy or SOAP. Can't continue.\n") # # Constants that differ between the modules # if SOAPpy: false = Types.booleanType(0) true = Types.booleanType(1) structType = Types.structType faultType = Types.faultType else: false = SOAP.booleanType(0) true = SOAP.booleanType(1) structType = SOAP.structType faultType = SOAP.faultType # # Get a SOAP Proxy object in the correct way for the module we're using # def getProxy(url, namespace, http_proxy): if SOAPpy:
def request(self): client = self.__env['client'] auth = self.__env['auth'] LOGGER.info('JIRA Advanced Search request: {request}'.format(request=self.__request)) self.__response = client.getIssuesFromJqlSearch(auth, self.__request, Types.intType(self.__limit))
def main2(): #go database dbs = MySQLdb.connect(host='mysql.ebi.ac.uk', user='******', passwd='amigo', db='go_latest', port=4085) cur = dbs.cursor() #reactome rwsdl = "http://www.reactome.org:8080/caBIOWebApp/services/caBIOService?wsdl" rserv = WSDL.Proxy(rwsdl) #kegg kegg_url = "http://rest.kegg.jp" #conn = Connection(kegg_url) #uniprot taxonomy url = 'http://www.uniprot.org/taxonomy/' print '---' annotationArray = [] with open('annotations.dump', 'rb') as f: ar = pickle.load(f) for idx, element in enumerate(ar): print idx modelAnnotations = Counter() for index in element: for annotation in element[index]: try: bioArray = [] tAnnotation = annotation.replace('%3A', ':') tAnnotation = re.search(':([^:]+:[^:]+$)', tAnnotation).group(1) if 'GO' in annotation: cur.execute( "SELECT * FROM term WHERE acc='{0}'".format( tAnnotation)) for row in cur.fetchall(): bioArray.append([row[1], row[3]]) modelAnnotations.update([row[1]]) elif 'reactome' in annotation: tAnnotation2 = re.search('_([^_]+$)', tAnnotation).group(1) try: query = rserv.queryById( Types.longType(long(tAnnotation))) except: continue bioArray.append([query['name'], tAnnotation]) modelAnnotations.update([query['name']]) ''' elif 'kegg' in annotation: if 'pathway' in tAnnotation: tAnnotation2 = 'map' + re.search('[^0-9]+([0-9]+$)',tAnnotation).group(1) reply = conn.request_get('find/pathway/{0}'.format(tAnnotation2), headers={'Accept':'text/json'}) if reply['body'] != '\n': bioArray.append([reply['body'].split('\t')[1].strip(),tAnnotation]) modelAnnotations.update([reply['body'].split('\t')[1].strip()]) else: print annotation ''' elif 'uniprot' in annotation: identifier = annotation.split(':')[-1] url = 'http://www.uniprot.org/uniprot/{0}.tab'.format( identifier) params = {} data = urllib.urlencode(params) request = urllib2.Request(url, data) request.add_header('User-Agent', 'Python contact') response = urllib2.urlopen(request) page = response.read(200000) proteinName = page.split('\n')[1].split('\t')[3] modelAnnotations.update([proteinName]) elif 'interpro' in annotation: identifier = annotation.split(':')[-1] url = 'http://www.ebi.ac.uk/interpro/entry/{0}'.format( identifier) params = {} data = urllib.urlencode(params) request = urllib2.Request(url, data) request.add_header('User-Agent', 'Python contact') response = urllib2.urlopen(request) page = response.read(200000) pointer = page.index('h2 class="strapline"') extract = page[pointer:pointer + 100] extract = extract[extract.index('>') + 1:extract.index('<')] modelAnnotations.update([extract]) #elif 'taxonomy' in annotation: #uniprot stuff for taxonomy # pass ''' url = 'http://www.uniprot.org/taxonomy/' params = { 'from':'ACC', 'to':'P_REFSEQ_AC', 'format':'tab', 'query':'P13368 P20806 Q9UM73 P97793 Q17192' } data = urllib.urlencode(params) request = urllib2.Request(url, data) contact = "" # Please set your email address here to help us debug in case of problems. request.add_header('User-Agent', 'Python contact') response = urllib2.urlopen(request) page = response.read(200000) ''' else: print '--', annotation, 'GO' in tAnnotation except: continue print modelAnnotations annotationArray.append(modelAnnotations) with open('parsedAnnotations.dump', 'wb') as f: pickle.dump(annotationArray, f)
def __call__(self,*args,**keywords): args = map(_type_conversion,self.__type_mask,args) results = getattr(self.__target,self.__name)(*args,**keywords) return Types.simplify(results)
from SOAPpy import SOAPProxy from SOAPpy import Types namespace = "http://server.fiskal.llarik.sk/" url = "http://10.17.0.6:8080/FisboxWs/services/Fiskal" input = Types.stringType(name=(namespace, "message"), data="Python") proxy = SOAPProxy(url, namespace=namespace) proxy.config.debug = 1 proxy.connect(input)
from SOAPpy import SOAPProxy from SOAPpy import Types url = 'http://localhost:8180/mondrian/xmla' n= 'urn:schemas-microsoft-com:xml-analysis' server = SOAPProxy(url,n) # if you want to see the SOAP message exchanged # uncomment the two following lines server.config.dumpSOAPOut = 1 #server.config.dumpSOAPIn = 1 cmd = {'Statement': Types.untypedType('SELECT [Measures].MEMBERS ON COLUMNS FROM [Sales]')} prts={ 'PropertyList': { 'DataSourceInfo':Types.untypedType('Provider=Mondrian;DataSource=MondrianFoodMart;'), 'Catalog':Types.untypedType('FoodMart'), 'Format':Types.untypedType('Multidimensional'), 'AxisFormat':Types.untypedType('ClusterFormat') } } #server.Execute(Command=cmd, Properties=prts) #server._callWithBody({'Excecute':{'Command':cmd, 'Properties':prts}}) restr={ 'ns1:RestrictionList': {