def __init__(self, host=None): if host is not None: self.conn = httplib.HTTPSConnection(host)
def do_compile(self, params, target_filename, filenames, remove): # Send the request to Google. headers = {"Content-type": "application/x-www-form-urlencoded"} conn = httplib.HTTPSConnection("closure-compiler.appspot.com") conn.request("POST", "/compile", urllib.urlencode(params), headers) response = conn.getresponse() json_str = response.read() conn.close() # Parse the JSON response. json_data = json.loads(json_str) def file_lookup(name): if not name.startswith("Input_"): return "???" n = int(name[6:]) - 1 return filenames[n] if json_data.has_key("serverErrors"): errors = json_data["serverErrors"] for error in errors: print("SERVER ERROR: %s" % target_filename) print(error["error"]) elif json_data.has_key("errors"): errors = json_data["errors"] for error in errors: print("FATAL ERROR") print(error["error"]) if error["file"]: print("%s at line %d:" % ( file_lookup(error["file"]), error["lineno"])) print(error["line"]) print((" " * error["charno"]) + "^") sys.exit(1) else: if json_data.has_key("warnings"): warnings = json_data["warnings"] for warning in warnings: print("WARNING") print(warning["warning"]) if warning["file"]: print("%s at line %d:" % ( file_lookup(warning["file"]), warning["lineno"])) print(warning["line"]) print((" " * warning["charno"]) + "^") print() if not json_data.has_key("compiledCode"): print("FATAL ERROR: Compiler did not return compiledCode.") sys.exit(1) code = HEADER + "\n" + json_data["compiledCode"] for code_statement in remove: code = code.replace(code_statement, "") # Trim down Google's Apache licences. # The Closure Compiler used to preserve these until August 2015. # Delete this in a few months if the licences don't return. LICENSE = re.compile("""/\\* [\w ]+ (Copyright \\d+ Google Inc.) https://developers.google.com/blockly/ Licensed under the Apache License, Version 2.0 \(the "License"\); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. \\*/""") code = re.sub(LICENSE, r"\n// \1 Apache License 2.0", code) stats = json_data["statistics"] original_b = stats["originalSize"] compressed_b = stats["compressedSize"] if original_b > 0 and compressed_b > 0: f = open(target_filename, "w") f.write(code) f.close() original_kb = int(original_b / 1024 + 0.5) compressed_kb = int(compressed_b / 1024 + 0.5) ratio = int(float(compressed_b) / float(original_b) * 100 + 0.5) print("SUCCESS: " + target_filename) print("Size changed from %d KB to %d KB (%d%%)." % ( original_kb, compressed_kb, ratio)) else: print("UNKNOWN ERROR")
def __init__(self, uri, basepath=None): self.basepath = basepath self.mimetype = None self.file = None self.data = None self.uri = None self.local = None self.tmp_file = None uri = uri or str() if type(uri) != str: uri = uri.decode("utf-8") log.debug("FileObject %r, Basepath: %r", uri, basepath) # Data URI if uri.startswith("data:"): m = _rx_datauri.match(uri) self.mimetype = m.group("mime") self.data = base64.b64decode(m.group("data").encode("utf-8")) else: # Check if we have an external scheme if basepath and not urlparse.urlparse(uri).scheme: urlParts = urlparse.urlparse(basepath) else: urlParts = urlparse.urlparse(uri) log.debug("URLParts: {}".format((urlParts, urlParts.scheme))) if urlParts.scheme == 'file': if basepath and uri.startswith('/'): uri = urlparse.urljoin(basepath, uri[1:]) urlResponse = urllib2.urlopen(uri) self.mimetype = urlResponse.info().get( "Content-Type", '').split(";")[0] self.uri = urlResponse.geturl() self.file = urlResponse # Drive letters have len==1 but we are looking # for things like http: elif urlParts.scheme in ('http', 'https'): log.debug("Sending request for {} with httplib".format(uri)) # External data if basepath: uri = urlparse.urljoin(basepath, uri) log.debug("Uri parsed: {}".format(uri)) #path = urlparse.urlsplit(url)[2] #mimetype = getMimeType(path) # Using HTTPLIB server, path = urllib2.splithost(uri[uri.find("//"):]) if uri.startswith("https://"): conn = httplib.HTTPSConnection(server, **httpConfig) else: conn = httplib.HTTPConnection(server) conn.request("GET", path) r1 = conn.getresponse() # log.debug("HTTP %r %r %r %r", server, path, uri, r1) if (r1.status, r1.reason) == (200, "OK"): self.mimetype = r1.getheader( "Content-Type", '').split(";")[0] self.uri = uri log.debug("here") if r1.getheader("content-encoding") == "gzip": import gzip self.file = gzip.GzipFile( mode="rb", fileobj=six.StringIO(r1.read())) else: self.file = r1 else: log.debug( "Received non-200 status: {}".format((r1.status, r1.reason))) try: urlResponse = urllib2.urlopen(uri) except urllib2.HTTPError as e: log.error("Could not process uri: {}".format(e)) return self.mimetype = urlResponse.info().get( "Content-Type", '').split(";")[0] self.uri = urlResponse.geturl() self.file = urlResponse else: log.debug("Unrecognized scheme, assuming local file path") # Local data if basepath: uri = os.path.normpath(os.path.join(basepath, uri)) if os.path.isfile(uri): self.uri = uri self.local = uri self.setMimeTypeByName(uri) if self.mimetype and self.mimetype.startswith('text'): self.file = open(uri, "r") #removed bytes... lets hope it goes ok :/ else: # removed bytes... lets hope it goes ok :/ self.file = open(uri, "rb")
def __call__(self, hostport): return httplib.HTTPSConnection( hostport, key_file=self._key_file, cert_file=self._cert_file)
def __init__(self): self.client = APIClient(app_key=APP_KEY, app_secret=APP_SECRET, redirect_uri=CALLBACK_URL) self.conn = httplib.HTTPSConnection('api.weibo.com')
from bin.main.rally.BuildBatch import BuildBatch from main.resources.rally.FetchReference import FetchReference logger = logging.getLogger(__name__) logger.debug("In Update Object") baseURL = '/slm/webservice/v2.0/' isFeature = 'F' in formattedID userAndPass = b64encode(b"%s:%s")%(configuration.userName, configuration.password) values = [] query = FetchReference(userAndPass, configuration.url) conn = httplib.HTTPSConnection(configuration.url,"443",context=ssl._create_unverified_context()) headers = {'Authorization' : 'Basic %s' %userAndPass} curURL = baseURL + '/portfolioitem/feature?fetch=FormattedID&query=(FormattedID%20%3D%20' + formattedID + ')' if isFeature else baseURL + 'hierarchicalrequirement?fetch=FormattedID&query=(FromattedID%20%3D%20' + formattedID + ')' conn.request('GET', curURL, "", headers) request = conn.getresponse() reqJson = json.loads(request.read()) objectRef = reqJson.get('QueryResult').get('Results')[0].get('_ref') for i in fields: check = i.upper() searchMe = fields[i].replace(" ", "%20")
commandTime = """ tell application "VLC" current time end tell """ commandName = """ tell application "VLC" name of current item end tell """ while True: currentTime = int(asrun(commandTime)) connection = httplib.HTTPSConnection("fiery-fire-3139.firebaseio.com") connection.request("PUT", "/current_time.json", str(currentTime)) result = connection.getresponse() print result.read() connection.close() currentName = asrun(commandName).strip() connection = httplib.HTTPSConnection("fiery-fire-3139.firebaseio.com") connection.request("PUT", "/current_name.json", "\"" + currentName + "\"") result = connection.getresponse() print result.read() connection.close() time.sleep(0.5)
def get_login(action, value=None, hold_time=1): cookiejar = None #print #print #print "Run at ",datetime.datetime.now() headers = { "Content-Type": "application/x-www-form-urlencoded", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Accept-Encoding": "sdch", "Host": "mytotalconnectcomfort.com", "DNT": "1", "Origin": "https://mytotalconnectcomfort.com/portal", "User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.95 Safari/537.36" } conn = httplib.HTTPSConnection("mytotalconnectcomfort.com") conn.request("GET", "/portal/", None, headers) r0 = conn.getresponse() #print r0.status, r0.reason for x in r0.getheaders(): (n, v) = x #print "R0 HEADER",n,v if (n.lower() == "set-cookie"): cookiejar = client_cookies(v, cookiejar) #cookiejar = r0.getheader("Set-Cookie") location = r0.getheader("Location") retries = 5 params = urllib.urlencode({ "timeOffset": "240", "UserName": USERNAME, "Password": PASSWORD, "RememberMe": "false" }) #print params newcookie = export_cookiejar(cookiejar) #print "Cookiejar now",newcookie headers = { "Content-Type": "application/x-www-form-urlencoded", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Accept-Encoding": "sdch", "Host": "mytotalconnectcomfort.com", "DNT": "1", "Origin": "https://mytotalconnectcomfort.com/portal/", "Cookie": newcookie, "User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.95 Safari/537.36" } conn = httplib.HTTPSConnection("mytotalconnectcomfort.com") conn.request("POST", "/portal/", params, headers) r1 = conn.getresponse() #print r1.status, r1.reason for x in r1.getheaders(): (n, v) = x #print "GOT2 HEADER",n,v if (n.lower() == "set-cookie"): cookiejar = client_cookies(v, cookiejar) cookie = export_cookiejar(cookiejar) #print "Cookiejar now",cookie location = r1.getheader("Location") if ((location == None) or (r1.status != 302)): #raise BaseException("Login fail" ) print( "ErrorNever got redirect on initial login status={0} {1}".format( r1.status, r1.reason)) return # Skip second query - just go directly to our device_id, rather than letting it # redirect us to it. code = str(DEVICE_ID) t = datetime.datetime.now() utc_seconds = (time.mktime(t.timetuple())) utc_seconds = int(utc_seconds * 1000) #print "Code ",code location = "/portal/Device/CheckDataSession/" + code + "?_=" + str( utc_seconds) #print "THIRD" headers = { "Accept": "*/*", "DNT": "1", #"Accept-Encoding":"gzip,deflate,sdch", "Accept-Encoding": "plain", "Cache-Control": "max-age=0", "Accept-Language": "en-US,en,q=0.8", "Connection": "keep-alive", "Host": "mytotalconnectcomfort.com", "Referer": "https://mytotalconnectcomfort.com/portal/", "X-Requested-With": "XMLHttpRequest", "User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.95 Safari/537.36", "Cookie": cookie } conn = httplib.HTTPSConnection("mytotalconnectcomfort.com") #conn.set_debuglevel(999); #print "LOCATION R3 is",location conn.request("GET", location, None, headers) r3 = conn.getresponse() if (r3.status != 200): print("Error Didn't get 200 status on R3 status={0} {1}".format( r3.status, r3.reason)) return # Print thermostat information returned if (action == "settemp"): rawdata = r3.read() j = json.loads(rawdata) stats = str(j['latestData']['uiData']["CoolSetpoint"]) print stats return if (action == "mode"): rawdata = r3.read() j = json.loads(rawdata) stats = str(j['latestData']['uiData']["SystemSwitchPosition"]) print stats return if (action == "fan"): rawdata = r3.read() j = json.loads(rawdata) stats = str(j['latestData']['fanData']["fanIsRunning"]) print stats return if (action == "temp"): #print r3.status, r3.reason rawdata = r3.read() j = json.loads(rawdata) #print "R3 Dump" # print json.dumps(j,indent=2) # print json.dumps(j,sort_keys=True,indent=4, separators=(',', ': ')) # print "Success:",j['success'] # print "Live",j['deviceLive'] #stats = '''{"SystemSwitchPosition":"%s","CurrentSetpointStatus":"%s","Indoor Temperature":"%s","Indoor Humidity":"%s","Cool Setpoint":"%s","Heat Setpoint":"%s","Hold Until":"%s","Status Cool":"%s","Status Heat":"%s","Status Fan":"%s"}''' % (str(j['latestData']['uiData']["SystemSwitchPosition"]), str(j['latestData']['uiData']["CurrentSetpointStatus"]), str(j['latestData']['uiData']["DispTemperature"]), str(j['latestData']['uiData']["IndoorHumidity"]), str(j['latestData']['uiData']["CoolSetpoint"]), str(j['latestData']['uiData']["HeatSetpoint"]), str(j['latestData']['uiData']["TemporaryHoldUntilTime"]), str(j['latestData']['uiData']["StatusCool"]), str(j['latestData']['uiData']["StatusHeat"]), str(j['latestData']['fanData']["fanMode"])) stats = str(j['latestData']['uiData']["DispTemperature"]) print stats return headers = { "Accept": 'application/json; q=0.01', "DNT": "1", "Accept-Encoding": "gzip,deflate,sdch", 'Content-Type': 'application/json; charset=UTF-8', "Cache-Control": "max-age=0", "Accept-Language": "en-US,en,q=0.8", "Connection": "keep-alive", "Host": "mytotalconnectcomfort.com", "Referer": "https://mytotalconnectcomfort.com/portal/", "X-Requested-With": "XMLHttpRequest", "User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.95 Safari/537.36", 'Referer': "/TotalConnectComfort/Device/CheckDataSession/" + code, "Cookie": cookie } # Data structure with data we will send back payload = { "CoolNextPeriod": None, "CoolSetpoint": None, "DeviceID": DEVICE_ID, "FanMode": None, "HeatNextPeriod": None, "HeatSetpoint": None, "StatusCool": 0, "StatusHeat": 0, "SystemSwitch": None } # Calculate the hold time for cooling/heating t = datetime.datetime.now() stop_time = ((t.hour + hold_time) % 24) * 60 + t.minute stop_time = stop_time / 15 # Modify payload based on user input if (action == "cool"): payload["CoolSetpoint"] = value payload["StatusCool"] = 1 payload["StatusHeat"] = 1 payload["CoolNextPeriod"] = stop_time if (action == "heat"): payload["HeatSetpoint"] = value payload["StatusCool"] = 1 payload["StatusHeat"] = 1 payload["HeatNextPeriod"] = stop_time if (action == "cancel"): payload["StatusCool"] = 0 payload["StatusHeat"] = 0 if (action == "fan"): payload["FanMode"] = value # Prep and send payload location = "/portal/Device/SubmitControlScreenChanges" rawj = json.dumps(payload) conn = httplib.HTTPSConnection("mytotalconnectcomfort.com") #conn.set_debuglevel(999); #print "R4 will send" #print rawj conn.request("POST", location, rawj, headers) r4 = conn.getresponse() if (r4.status != 200): print("Error Didn't get 200 status on R4 status={0} {1}".format( r4.status, r4.reason)) return else: print "Success in configuring thermostat!"
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. See the License for the specific language governing # permissions and limitations under the License. import httplib import base64 import json import urllib key = 'CHANGEME: YOUR_API_KEY' secret = 'CHANGEME: YOUR_API_SECRET' practiceid = 000000 version = 'preview1' # Start a connection connection = httplib.HTTPSConnection('api.athenahealth.com') # Authenticate (basic access authentication) auth_prefixes = { 'v1': '/oauth', 'preview1': '/oauthpreview', 'openpreview1': '/oauthopenpreview', } auth_path = auth_prefixes[version] + '/token' keypair = base64.b64encode('{0}:{1}'.format(key, secret)) auth_parameters = urllib.urlencode({'grant_type': 'client_credentials'}) auth_headers = { 'Content-type': 'application/x-www-form-urlencoded', 'Authorization': 'Basic {0}'.format(keypair), }
cPfdDC = str(re.findall(r'sPfdDC=\"\S*\"', fobj, re.I)[0]).replace('sPfdDC="', '').replace('"', '') return (canary, sSid, sCki, cPfdDC) def getMailAddFromFile(fobj): regex = re.compile(r"\b[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,4}\b", re.IGNORECASE) mails = re.findall(regex, fobj) return set(mails) try: #首次访问 获取Session ID conn = httplib.HTTPSConnection(args.domain) conn.request(method='GET', url='/owa/') res = dict(conn.getresponse().getheaders()) session = res['set-cookie'].split(';')[0] headers['Cookie'] = '%s; PBack=0' % session conn.close() #登录获取用户cookie sessionid cadata conn = httplib.HTTPSConnection(args.domain) conn.request(method='POST', url='/owa/auth.owa', body=urllib.urlencode(login_data), headers=headers) res = dict(conn.getresponse().getheaders()) session = res['set-cookie'].split(';') #headers['Cookie'] += '; %s; %s' % (session[0],session[1].replace(' path=/, ',''))
def return_text(url, headers): conn = httplib.HTTPSConnection(args.domain) conn.request(method='GET', url=url, headers=headers) res = conn.getresponse().read() conn.close() return res
def SendPostCommand(command, client_secret, header_additions={}, body=None): headers = GetPopulatedHeader(client_secret) headers = dict(headers.items() + header_additions.items()) conn = httplib.HTTPSConnection(API_ENDPOINT_DOMAIN) conn.request('POST', command, body, headers) return conn.getresponse()
def SendGetCommand(command, client_secret): headers = GetPopulatedHeader(client_secret) conn = httplib.HTTPSConnection(API_ENDPOINT_DOMAIN) conn.request('GET', command, '', headers) return conn.getresponse()
def __submitQBXMLReq(self, xmldoc, recursing=False): """ Send the specified XML document to the Quickbooks QPI for processing via a HTTPS POST. """ if not self.__session_ticket and not recursing: # We're not logged in (no ticket) and this is the first time we're through here. req = self.__makeSignInReq() signin_response = self.__submitQBXMLReq(xmldoc=req, recursing=True) assert self.__parseLoginResponse( signin_response) == True, "Unable to parese login response." # Logged in ok, so we now update the session ticket in the original request ticket_el = xmldoc.xpath( "/QBXML/SignonMsgsRq/SignonTicketRq/SessionTicket")[0] ticket_el.text = self.__session_ticket headers = {"Content-type": "application/x-qbxml"} host = self.api_url.split("/")[0] path = "/" + "/".join(self.api_url.split("/")[1:]) h = httplib.HTTPSConnection(host=host, port=443, key_file=self.key_file, cert_file=self.cert_file, timeout=self.https_timeout) if self.debug: h.debuglevel = 1 data = etree.tostring(xmldoc, pretty_print=False, encoding="utf-8", xml_declaration=True) try: h.request('POST', path, data, headers) resp = h.getresponse() data = resp.read() if not resp.status == 200: raise QBOEHTTPError( None, "Invalid response received from QBOE. Response: %d %s" % (resp.status, resp.reason)) except httplib.ssl.SSLError as ex: self.__checkCerts() try: httplib_err_num = int( re.search(r'error:([0-9A-F]+):', ex.args[1], re.I | re.M).group(1).encode("hex")) except (AttributeError, ValueError): httplib_err_num = None if httplib_err_num == 3134304230303039: raise QBOEHTTPError(ex, "There appears to be a problem with the specified private key file: '%s'." " (Hint: Is the first line of this file '-----BEGIN RSA PRIVATE KEY-----'?)\n\n" \ % self.key_file) elif httplib_err_num == 3134304443303039: raise QBOEHTTPError(ex, "There appears to be a problem with the specified certificate file: '%s'." " (Hint: Is the first line of this file '-----BEGIN CERTIFICATE-----'?)\n\n" \ % self.cert_file) elif httplib_err_num == 3134303934343142: raise QBOEHTTPError(ex, "The specified certificate ('%s') and key ('%s') don't match or are corrupted.\n\n" \ % (self.cert_file, self.key_file)) else: raise finally: h.close() return etree.XML(data)
headers = { # Request headers. Replace the placeholder key below with your subscription key. 'Content-Type': 'application/json', 'Ocp-Apim-Subscription-Key': '0fb41fe10d3141f68910857a7a54304b', } params = urllib.urlencode({}) # Replace the example URL below with the URL of the image you want to analyze. body = "{\'url\' : \'https://bloximages.chicago2.vip.townnews.com/thefacts.com/content/tncms/assets/v3/editorial/a/bd/abd8cd9a-7b74-504d-ad29-055ad9599521/553c408f1c69a.image.jpg?resize=500%2C751\'}" try: # NOTE: You must use the same region in your REST call as you used to obtain your subscription keys. # For example, if you obtained your subscription keys from westcentralus, replace "westus" in the # URL below with "westcentralus". conn = httplib.HTTPSConnection('api.projectoxford.ai') conn.request("POST", "/emotion/v1.0/recognize?%s" % params, body, headers) response = conn.getresponse() data = response.read() # 'data' contains the JSON data. The following formats the JSON data for display. parsed = json.loads(data) print("Response:") print(json.dumps(parsed, sort_keys=True, indent=2)) conn.close() except Exception as e: print("[Errno {0}] {1}".format(e.errno, e.strerror)) # import httplib, urllib, base64 # # # Image to analyse (body of the request) #
def get_connection(organization): return httplib.HTTPSConnection('%s.slack.com' % organization)
import httplib, os conn = httplib.HTTPSConnection("api.heroku.com") conn.request("POST", "/apps/steamleaderboards/dynos", '{"type": "run", "time_to_live": 1800, "command": "scraper", "size": "free"}', {'Content-Type': 'application/json', 'Authorization': 'Bearer ' + os.environ['API_TOKEN'], 'Accept': 'application/vnd.heroku+json; version=3'}) response = conn.getresponse() print response.status, response.reason
def post(self): servidor = 'egela1617.ehu.eus' conn = httplib.HTTPSConnection(servidor) conn.connect() self.session['clase'] = 'Egela' metodo = 'POST' usuario = self.request.get('user') password = self.request.get('pass') print usuario print password try: params = {'username': '******', 'password': '******'} params_encoded = urllib.urlencode(params) recurso = '/login/index.php' cabeceras_peticion = { 'Host': servidor, 'Content-Type': 'application/x-www-form-urlencoded', 'Content-Length': str(len(params_encoded)) } except: print 'problem' conn.request(metodo, recurso, headers=cabeceras_peticion, body=params_encoded) respuesta = conn.getresponse() logging.debug(respuesta.status) #self.response.out.write(respuesta.getheaders()) cookie = respuesta.getheader('set-cookie') cookie2 = cookie.split(';') print cookie2 global egela_cookie egela_cookie = cookie2[0] self.session['cookie'] = egela_cookie print egela_cookie servidor = 'egela1617.ehu.eus' conn = httplib.HTTPSConnection(servidor) conn.connect() metodo = 'GET' recurso = '/my/' cabeceras_peticion = {'Host': servidor, 'cookie': egela_cookie} cuerpo_peticion = '' conn.request(metodo, recurso, headers=cabeceras_peticion, body=cuerpo_peticion) response = conn.getresponse() print response.status if (response.status != 200): time.sleep(2) self.redirect('/buscarCookie') global html html = BeautifulSoup(response.read(), "html.parser") lista = ['lista'] n = 1 print html.find_all('h2') for enlace in html.find_all('h2'): try: #lista1.insert ( str(n) + ') ' + enlace.a.text+'\n') time.sleep(1) lista.insert(n, enlace.a.text) n = n + 1 except: print 'problem' #numeroCurso = raw_input("Introduce el numero del curso: ") template_values = {'nombre': lista} template = JINJA_ENVIRONMENT.get_template('index.html') self.response.write(template.render(template_values)) time.sleep(2) #self.session['lista']=lista conn.close()
def getWorkflowInfo(workflow, nodbs=0): conn = httplib.HTTPSConnection('cmsweb.cern.ch', cert_file=os.getenv('X509_USER_PROXY'), key_file=os.getenv('X509_USER_PROXY')) r1 = conn.request('GET', '/reqmgr/view/showWorkload?requestName=%s' % workflow) r2 = conn.getresponse() data = r2.read() conn.close() list = data.split('\n') primaryds = '' priority = -1 timeev = -1 prepid = '' globaltag = '' sites = [] events_per_job = None lumis_per_job = None acquisitionEra = None processingVersion = None outputtier = None reqevts = 0 prepmemory = 0 requestdays = 0 campaign = '' lheinputfiles = 0 for raw in list: if 'acquisitionEra' in raw: a = raw.find("'") if a >= 0: b = raw.find("'", a + 1) acquisitionEra = raw[a + 1:b] else: a = raw.find(" =") b = raw.find('<br') acquisitionEra = raw[a + 3:b] elif 'primaryDataset' in raw: primaryds = raw[raw.find("'") + 1:] primaryds = primaryds[0:primaryds.find("'")] elif 'output.dataTier' in raw: outputtier = raw[raw.find("'") + 1:] outputtier = outputtier[0:outputtier.find("'")] elif 'schema.LheInputFiles' in raw: lheinputfiles = raw[raw.find("'") + 1:] lheinputfiles = lheinputfiles[0:lheinputfiles.find("'")] if lheinputfiles == 'True': lheinputfiles = 1 else: lheinputfiles = 0 elif 'cmsswVersion' in raw: cmssw = raw[raw.find("'") + 1:] cmssw = cmssw[0:cmssw.find("'")] elif 'PrepID' in raw: prepid = raw[raw.find("'") + 1:] prepid = prepid[0:prepid.find("'")] elif 'lumis_per_job' in raw: a = raw.find(" =") b = raw.find('<br') lumis_per_job = int(raw[a + 3:b]) elif '.schema.Campaign' in raw: campaign = raw[raw.find("'") + 1:] campaign = campaign[0:campaign.find("'")] elif 'splitting.events_per_job' in raw: a = raw.find(" =") b = raw.find('<br') events_per_job = int(raw[a + 3:b]) elif 'request.schema.Memory' in raw: a = raw.find(" =") b = raw.find('<br') prepmemory = int(raw[a + 3:b]) elif 'TimePerEvent' in raw: a = raw.find("'") if a >= 0: b = raw.find("'", a + 1) timeev = int(raw[a + 1:b]) else: a = raw.find(" =") b = raw.find('<br') timeev = int(float(raw[a + 3:b])) elif 'request.priority' in raw: a = raw.find("'") if a >= 0: b = raw.find("'", a + 1) priority = int(raw[a + 1:b]) else: a = raw.find(" =") b = raw.find('<br') #print "*%s*" % raw[a+3:b] priority = int(raw[a + 3:b]) elif 'RequestDate' in raw: reqdate = raw[raw.find("[") + 1:raw.find("]")] reqdate = reqdate.replace("'", "") reqdate = "datetime.datetime(" + reqdate + ")" reqdate = eval(reqdate) requestdays = (datetime.datetime.now() - reqdate).days elif 'white' in raw and not '[]' in raw: sites = '[' + raw[raw.find("[") + 1:raw.find("]")] + ']' sites = eval(sites) elif 'processingVersion' in raw: processingVersion = raw[raw.find("'") + 1:] processingVersion = processingVersion[0:processingVersion.find("'" )] a = raw.find("'") if a >= 0: b = raw.find("'", a + 1) processingVersion = raw[a + 1:b] else: a = raw.find(" =") b = raw.find('<br') processingVersion = raw[a + 3:b] elif 'request.schema.GlobalTag' in raw: globaltag = raw[raw.find("'") + 1:] globaltag = globaltag[0:globaltag.find(":")] # TODO to be fixed custodialt1 = '?' for i in sites: if 'T1_' in i: custodialt1 = i break conn = httplib.HTTPSConnection('cmsweb.cern.ch', cert_file=os.getenv('X509_USER_PROXY'), key_file=os.getenv('X509_USER_PROXY')) r1 = conn.request('GET', '/reqmgr/reqMgr/request?requestName=%s' % workflow) r2 = conn.getresponse() data = r2.read() s = json.loads(data) conn.close() try: filtereff = float(s['FilterEfficiency']) except: filtereff = -1 try: team = s['Assignments'] if len(team) > 0: team = team[0] else: team = '' except: team = '' try: typ = s['RequestType'] except: typ = '' try: status = s['RequestStatus'] except: status = '' try: reqevts = s['RequestSizeEvents'] except: try: reqevts = s['RequestNumEvents'] except: reqevts = 0 inputdataset = {} try: inputdataset['name'] = s['InputDatasets'][0] except: pass if typ in ['MonteCarlo', 'LHEStepZero']: expectedevents = int(reqevts) expectedjobs = int(expectedevents / (events_per_job * filtereff)) elif typ in ['MonteCarloFromGEN']: if nodbs: [inputdataset['events'], inputdataset['status']] = [0, ''] else: [inputdataset['events'], inputdataset['status']] = getdsdetail(inputdataset['name']) if nodbs: inputdataset['lumicount'] = 0 else: inputdataset['lumicount'] = dbs_get_lumicount(inputdataset['name']) try: expectedjobs = inputdataset['lumicount'] / lumis_per_job except: expectedjobs = 0 expectedevents = int(filtereff * inputdataset['events']) else: expectedevents = -1 expectedjobs = -1 conn = httplib.HTTPSConnection('cmsweb.cern.ch', cert_file=os.getenv('X509_USER_PROXY'), key_file=os.getenv('X509_USER_PROXY')) r1 = conn.request( 'GET', '/reqmgr/reqMgr/outputDatasetsByRequestName?requestName=' + workflow) r2 = conn.getresponse() data = r2.read() s = json.loads(data) conn.close() ods = s if len(ods) == 0: print "No Outpudatasets for this workflow: " + workflow outputdataset = [] eventsdone = 0 for o in ods: oel = {} oel['name'] = o if nodbs: [oe, ost] = [0, ''] else: [oe, ost] = getdsdetail(o) oel['events'] = oe oel['status'] = ost outputdataset.append(oel) ret = { 'requestname': workflow, 'type': typ, 'status': status, 'campaign': campaign, 'expectedevents': expectedevents, 'inputdataset': inputdataset, 'primaryds': primaryds, 'prepid': prepid, 'globaltag': globaltag, 'timeev': timeev, 'priority': priority, 'sites': sites, 'custodialt1': custodialt1, 'outputdataset': outputdataset, 'team': team, 'acquisitionEra': acquisitionEra, 'requestdays': requestdays, 'processingVersion': processingVersion, 'events_per_job': events_per_job, 'lumis_per_job': lumis_per_job, 'expectedjobs': expectedjobs, 'cmssw': cmssw, 'outputtier': outputtier, 'prepmemory': prepmemory, 'lheinputfiles': lheinputfiles, 'filtereff': filtereff } #print ret return ret
def getConnection(self, host): return httplib.HTTPSConnection(host, key_file=self.key, cert_file=self.cert)
def upload_file(self, command, pyversion, filename): # Sign if requested if self.sign: gpg_args = ["gpg", "--detach-sign", "-a", filename] if self.identity: gpg_args[2:2] = ["--local-user", self.identity] spawn(gpg_args, dry_run=self.dry_run) # Fill in the data content = open(filename, 'rb').read() basename = os.path.basename(filename) comment = '' if command == 'bdist_egg' and self.distribution.has_ext_modules(): comment = "built on %s" % platform.platform(terse=1) data = { ':action': 'file_upload', 'protcol_version': '1', 'name': self.distribution.get_name(), 'version': self.distribution.get_version(), 'content': (basename, content), 'filetype': command, 'pyversion': pyversion, 'md5_digest': md5(content).hexdigest(), } if command == 'bdist_rpm': dist, version, id = platform.dist() if dist: comment = 'built for %s %s' % (dist, version) elif command == 'bdist_dumb': comment = 'built for %s' % platform.platform(terse=1) data['comment'] = comment if self.sign: data['gpg_signature'] = (os.path.basename(filename) + ".asc", open(filename + ".asc").read()) # set up the authentication auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip() # Build up the MIME payload for the POST data boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' sep_boundary = '\n--' + boundary end_boundary = sep_boundary + '--' body = StringIO.StringIO() for key, value in data.items(): # handle multiple entries for the same name if type(value) != type([]): value = [value] for value in value: if type(value) is tuple: fn = ';filename="%s"' % value[0] value = value[1] else: fn = "" value = str(value) body.write(sep_boundary) body.write('\nContent-Disposition: form-data; name="%s"' % key) body.write(fn) body.write("\n\n") body.write(value) if value and value[-1] == '\r': body.write('\n') # write an extra newline (lurve Macs) body.write(end_boundary) body.write("\n") body = body.getvalue() self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO) # build the Request # We can't use urllib2 since we need to send the Basic # auth right with the first request schema, netloc, url, params, query, fragments = \ urlparse.urlparse(self.repository) assert not params and not query and not fragments if schema == 'http': http = httplib.HTTPConnection(netloc) elif schema == 'https': http = httplib.HTTPSConnection(netloc) else: raise AssertionError, "unsupported schema " + schema data = '' loglevel = log.INFO try: http.connect() http.putrequest("POST", url) http.putheader('Content-type', 'multipart/form-data; boundary=%s' % boundary) http.putheader('Content-length', str(len(body))) http.putheader('Authorization', auth) http.endheaders() http.send(body) except socket.error, e: self.announce(str(e), log.ERROR) return
# send API request options = { 'token': config.get('pushover', 'token'), 'user': args.to, 'title': args.subject, 'message': args.message, 'priority': config.get('pushover', 'priority') } if config.get('pushover', 'priority') == 2: options['retry'] = config.get('pushover', 'retry') options['expire'] = config.get('pushover', 'expire') if config.has_option('section', 'sound') and len(config.get('pushover', 'sound')) > 0: options['sound'] = config.get('pushover', 'sound') conn = httplib.HTTPSConnection('api.pushover.net:443') conn.request( 'POST', '/1/messages.json', urllib.urlencode(options), {'Content-type': 'application/x-www-form-urlencoded'} ) res = conn.getresponse() if res.status != 200: print('Pushover API returned error: ' + res.read(), end='\n', file=stderr) exit(1)
clf = pickle.load(fopen) with open('xgb2.p', 'rb') as fopen: clf2 = pickle.load(fopen) with open('xgb3.p', 'rb') as fopen: clf3 = pickle.load(fopen) labels = ['Unknown', 'Husein'] labels_emotion = [ 'neutral', 'sadness', 'disgust', 'anger', 'surprise', 'fear', 'happiness' ] counts = [0, 0] ori_image = np.zeros([10, 10, 3]) headers = { "Content-type": "application/json", "X-Access-Token": "tcrrl0saWD3rmQ46KsCtxuc5EXy8JRaB60sN" } conn = httplib.HTTPSConnection("dev.sighthoundapi.com", context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)) def f(f_stop): global counts global ori_image global emotion_batch if counts[1] > 10: _, img_encoded = cv2.imencode('.jpg', ori_image) params = json.dumps( {"image": base64.b64encode(img_encoded.tostring())}) conn.request("POST", "/v1/detections?type=face&faceOption=emotion", params, headers) response = conn.getresponse() result = json.loads(response.read()) if result['objects']:
def main(): logging.basicConfig(filename='ltget_log.txt', format='%(asctime)s %(message)s', level=logging.DEBUG) logging.info("=================== Start ===================") #fixing the Insecure platform issue on Facepy import urllib3.contrib.pyopenssl #reading check file for status. It points to the last line of accesstokens.txt. checkfile = open('check.txt', 'r') check = int(checkfile.read()) checkfile.close() #need to open accesstokens.txt to read the line denoted by actok = open('accesstokens.txt', 'r') lines = actok.readlines() actok.close() #Gets the number of lines from index(from check.txt) to last line in accesstokes.txt print len(lines) remaining = lines[check:] print len(remaining) #Storing app id and secret appid = "1582658458614337" appsecret = "c938c071248be2751bbde872cdc56262" #Connecting to MongoDB client = MongoClient('localhost', 27017) #Creating a MongoDB database db = client['fbapp-DB'] #Creating a collection within the database collection = db['fb-users'] #If no. of lines in remaining is 0, do nothing. If there are accesstokens to be converted do the following if len(remaining) != 0: #let us traverse the list till the end to get all new access tokens for line in remaining: check = check + 1 if line != ",\n": try: #The try block helps ignore any missed cases, eg, cases in which people logged out and their access tokens are not valid anymore start_time = time.clock( ) # record processing time/authorized user print line lsplit = line.split(',') stat = lsplit[0] uid = lsplit[1] #build httpconnection object conn = httplib.HTTPSConnection('graph.facebook.com') #setting up GET request conn.request( "GET", "/oauth/access_token?grant_type=fb_exchange_token&client_id=" + appid + "&client_secret=" + appsecret + "&fb_exchange_token=" + stat + "") #getting and storing the full response ltat = conn.getresponse() data = ltat.read() print data #first split the response string along '=' fsplit = data.split('=') filelt = open('longtermaccesstoken.txt', 'a') #second split string along & (all this because accesstoken size could be variable) ssplit = fsplit[1].split('&') #finally store the result of second split into long term access token acltat = ssplit[0] #we need to store this into a file of long term accesstokens thisrow = [1, 2] thisrow[0] = acltat thisrow[1] = uid writer = csv.writer(filelt, delimiter=',') filelt.write(acltat + ',' + uid + '\n') filelt.close() # #Creating a document to store in a mongoDb collection userInfo = {} # #Storing valuable information from the facebook graph: userInfo['access_token'] = acltat graph = facepy.GraphAPI(acltat) profile = graph.get('me') user_name = profile['name'] user_id = profile['id'] existing = collection.find_one({"user id": user_id}) if existing == None: userInfo['first_name'] = profile['first_name'] userInfo['last_name'] = profile['last_name'] userInfo['user id'] = user_id userInfo['gender'] = profile['gender'] userInfo['email'] = profile['email'] userInfo['birthday'] = profile['birthday'] userInfo['name'] = user_name.title() userInfo['vizDone'] = 0 print "YOLO, this user does not exit in the database" # print "new profile",userInfo # print "old profle",existing collection.insert_one(userInfo) else: # print "current profile",existing # print "new profile",userInfo print "this user exists in the database" collection.update({'user id': user_id}, {"$set": { 'access_token': acltat }}) #need to increment the value of check by 1 # check = check + 1 #open check.txt and store the updated value of check in it. checkfileagain = open('check.txt', 'w') checkfileagain.write(str(check)) checkfileagain.close() except: print "user loged out" #need to increment the value of check by 1 # check = check + 1 #open check.txt and store the updated value of check in it. checkfileagain = open('check.txt', 'w') checkfileagain.write(str(check)) checkfileagain.close()
def getDevices(totalNumOfRequests): logging.debug("BEGIN - getDevices") if (VERBOSE_LOGGING == "verbose"): print "BEGIN - getDevices" headers = { "Authorization": getAuthHeader(cpc_username, cpc_password), "Accept": "application/json" } currentRequestCount = 0 deactivateCount = 0 deactivateList = [] while (currentRequestCount <= totalNumOfRequests): logging.debug( "BEGIN - getDevices - Building devices list request count: " + str(currentRequestCount)) if (VERBOSE_LOGGING == "verbose"): print "BEGIN - getDevices - Building devices list request count: " + str( currentRequestCount) try: currentRequestCount = currentRequestCount + 1 conn = httplib.HTTPSConnection(cpc_host, cpc_port) conn.request( "GET", "/api/Computer?pgNum=" + str(currentRequestCount) + "&pgSize=250&incCounts=true&active=true", None, headers) data = conn.getresponse().read() conn.close() except httplib.HTTPException as inst: print "Exception: %s" % inst return None except ValueError as inst: print "Exception decoding JSON: %s" % inst return None devices = json.loads(data)['data'] for d in devices['computers']: # Get fields to compasre computerId = d['computerId'] lastConnected = d['lastConnected'] deviceName = d['name'] # If last connected date is greater than month threshold than add device to deactivate list dtLastConnected = datetime.strptime( str(lastConnected)[:10], "%Y-%m-%d") comparedate = datetime(dtLastConnected.year, dtLastConnected.month, dtLastConnected.day) three_months = NOW + relativedelta(months=-3) if three_months > comparedate: if (VERBOSE_LOGGING == "verbose"): try: logging.debug("DEACTIVATE - device id: " + str(computerId) + " device name: " + str(deviceName) + " with last connected date of: " + str(lastConnected)) print "DEACTIVATE - device id: " + str( computerId) + " device name: " + str( deviceName ) + " with last connected date of: " + str( lastConnected) except: #ignore name errors pass deactivateCount = deactivateCount + 1 deactivateList.append(d) else: if (VERBOSE_LOGGING == "verbose"): logging.debug("IGNORE - device id: " + str(computerId) + " with last connected date of: " + str(lastConnected)) print "IGNORE - device id: " + str( computerId) + " with last connected date of: " + str( lastConnected) if (VERBOSE_LOGGING == "verbose"): logging.debug( "END - getDevices - Building devices list request count: " + str(currentRequestCount)) print "END - getDevices - Building devices list request count: " + str( currentRequestCount) else: logging.debug("Building devices list... request count: " + str(currentRequestCount)) print "Building devices list... request count: " + str( currentRequestCount) if (VERBOSE_LOGGING == "verbose"): logging.debug("TOTAL Devices that are scheduled to be deactivated: " + str(deactivateCount)) logging.debug("END - getDevices") print "TOTAL Devices that are scheduled to be deactivated: " + str( deactivateCount) print "END - getDevices" return deactivateList
import eventlet eventlet.monkey_patch(all=False, socket=True) import sys import httplib import os import time from cf_auth import username, apikey container_name = sys.argv[1] use_service_net = os.environ.get('USECFSERVICENET', False) # auth conn = httplib.HTTPSConnection('auth.api.rackspacecloud.com') conn.request('GET', '/auth', headers={ 'x-auth-user': username, 'x-auth-key': apikey }) resp = conn.getresponse() AUTH_TOKEN = resp.getheader('x-auth-token') URL = resp.getheader('x-storage-url') CONNECTION_ENDPOINT = URL.split('/')[2] if use_service_net: CONNECTION_ENDPOINT = 'snet-' + CONNECTION_ENDPOINT conn.close() SEND_HEADERS = {'X-Auth-Token': AUTH_TOKEN, 'Content-Type': 'text/plain'}
try: from pymd5 import md5, padding except: sys.exit("Please traverse to the directory having pymd5") try: url = sys.argv[1] except: sys.exit("Please provide input URL") #Extracting the original hash from the url token token = url[url.index("token=")+len("token="):url.index("&user")] #Using length extension to find has of longer string newtoken = md5(state = token.decode("hex"), count=512) appendstring = "&command3=DeleteAllFiles" newtoken.update(appendstring) newtoken = newtoken.hexdigest() #Computing padding padding = urllib.quote(padding((8+len(url[url.index("user"):]))*8)) #Updated URL url = url[:url.index("token=")+len("token=")]+newtoken + url[url.index("&user"):]+ padding + appendstring parsedUrl = urlparse.urlparse(url) conn = httplib.HTTPSConnection(parsedUrl.hostname) conn.request("GET", parsedUrl.path + "?" + parsedUrl.query) print conn.getresponse().read()
def enrich(image_bytes, save_json): image_data = base64.b64encode(image_bytes).decode() headers = { "Content-type": "application/json", "X-Access-Token": "KhKhaWgY7Oku4p8TwYjW4bytJtzNCvyNfMPd", } params = json.dumps({"image": image_data}) try: conn = httplib.HTTPSConnection( "dev.sighthoundapi.com", context=ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) ) conn.request( "POST", "/v1/recognition?objectType=vehicle,licenseplate", params, headers ) response = conn.getresponse() except: print("retrying sightound") conn = httplib.HTTPSConnection( "dev.sighthoundapi.com", context=ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) ) conn.request( "POST", "/v1/recognition?objectType=vehicle,licenseplate", params, headers ) response = conn.getresponse() result = json.loads(response.read()) print("Detection Results = " + json.dumps(result, indent=4)) with open(save_json, "w") as f: f.write(json.dumps(result, indent=4)) message = "" plates = [] for o in result["objects"]: annotations = o["vehicleAnnotation"] if annotations["recognitionConfidence"] > 0.0: if "attributes" in annotations: system = annotations["attributes"]["system"] if "color" in system: color = system["color"]["name"] color = color.split("/")[0] message += color + " " if ( "make" in system and "model" in system and system["make"]["confidence"] > 0.6 ): message += ( system["make"]["name"] + " " + system["model"]["name"] + " " ) elif "vehicleType" in system: message += system["vehicleType"] + " " if "licenseplate" in annotations: plate = annotations["licenseplate"]["attributes"]["system"]["string"] if ( annotations["licenseplate"]["attributes"]["system"]["region"][ "confidence" ] > 0.55 ): region = annotations["licenseplate"]["attributes"]["system"]["region"][ "name" ] plate["region"] = region if region in us_state_abbrev: # if it's not one of the 50 states it's probably an error plate["state"] = us_state_abbrev[region] plates.append(plate) if message == "": message = "Unidentified vehicle" return {"message": message, "plates": plates, "count": len(result["objects"])}
########### Python 2.7 ############# import httplib, urllib, base64 headers = { # Request headers 'Ocp-Apim-Subscription-Key': 'YOUR-SUBSCRIPTION-KEY', } params = urllib.urlencode({ # Query parameter 'q': 'turn on the left light', # Optional request parameters, set to default values 'timezoneOffset': '0', 'verbose': 'false', 'spellCheck': 'false', 'staging': 'false', }) try: conn = httplib.HTTPSConnection('westus.api.cognitive.microsoft.com') conn.request("GET", "/luis/v2.0/apps/df67dcdb-c37d-46af-88e1-8b97951ca1c2?%s" % params, "{body}", headers) response = conn.getresponse() data = response.read() print(data) conn.close() except Exception as e: print("[Errno {0}] {1}".format(e.errno, e.strerror)) ####################################
def __init__(self): self.conn = httplib.HTTPSConnection(CFA.website) self.null_data = json.dumps({"entities":[]}) self.max_times = 15