Beispiel #1
1
def auth_flickr(request):
    from agro.sources import utils
    api, secret, url = 'e22dd4a81125531e047036ed1ab2a9e7', '72a484d250375bdf', ''
    token = ''
    user_name, user_id = '', ''

    frob = request.GET.get('frob', '')

    if frob:
        api_sig = md5.new('%sapi_key%sfrob%smethodflickr.auth.getToken' % (secret, api, frob)).hexdigest()
        params = urllib.urlencode({'api_key':api, 'frob':frob, 'method':'flickr.auth.getToken', 'api_sig':api_sig})
        res = utils.get_remote_data("http://api.flickr.com/services/rest/?" + params)

        if res.get("stat", "") == "fail":
            log.error("flickr retrieve failed.")
            log.error("%s" % res.get("stat"))
            return False

        #token = res.get('auth')
        auth_res = res.getchildren()[0]
        token = auth_res.find('token').text
        user = auth_res.find('user')
        user_name = user.get('username')
        user_id = user.get('nsid')

    else:
        if request.method == 'POST':
            perms = 'read'
            api_sig = md5.new('%sapi_key%sperms%s' % (secret, api, perms)).hexdigest()
            params = urllib.urlencode({'api_key':api, 'perms':perms, 'api_sig':api_sig})
            return HttpResponseRedirect('http://flickr.com/services/auth/?%s' % params)
        else:
            pass

    return render_to_response('flickr_auth.html', {'api':api, 'secret':secret, 'user_name':user_name, 'user_id':user_id, 'token':token,}, context_instance=RequestContext(request))
Beispiel #2
1
 def get_login_url(self, next=None):
     if not next:
         next = self.request.full_url()
     if not next.startswith("http://") and not next.startswith("https://"):
         next = urlparse.urljoin(self.request.full_url(), next)
     if self.get_argument("code", None):
         return (
             "http://"
             + self.request.host
             + self.reverse_url("login")
             + "?"
             + urllib.urlencode({"next": next, "code": self.get_argument("code")})
         )
     redirect_uri = (
         "http://" + self.request.host + self.reverse_url("login") + "?" + urllib.urlencode({"next": next})
     )
     if self.get_argument("code", None):
         args["code"] = self.get_argument("code")
     return "https://www.facebook.com/dialog/oauth?" + urllib.urlencode(
         {
             "client_id": options.facebook_app_id,
             "redirect_uri": redirect_uri,
             "scope": "offline_access,publish_actions",
         }
     )
Beispiel #3
1
    def request(self, operation, parameters={}):
        """
        Sends the request to the Turk server and returns a response object.
        """

        if not self.signature or not self.accesskey:
            raise RuntimeError("Signature or access key missing")

        timestamp = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
        hmacstr = hmac.new(config.signature, "AWSMechanicalTurkRequester" + operation + timestamp, hashlib.sha1)
        hmacstr = base64.encodestring(hmacstr.digest()).strip()

        logger.info("Request to MTurk: {0}".format(operation))
        for paramk, paramv in parameters.items():
            logger.debug("  {0}: {1}".format(paramk, paramv))

        baseurl = "/?" + urllib.urlencode(
            {
                "Service": "AWSMechanicalTurkRequester",
                "AWSAccessKeyId": config.accesskey,
                "Version": "2008-08-02",
                "Operation": operation,
                "Signature": hmacstr,
                "Timestamp": timestamp,
            }
        )
        url = baseurl + "&" + urllib.urlencode(parameters)
        url = "https://" + self.server + url

        req = urllib2.Request(url=url)
        data = urllib2.urlopen(req)

        response = Response(operation, data)
        return response
Beispiel #4
0
 def test_urlencode(self):
     self.assertEqual(urllib.urlencode({"a": 34, "bbb": "ccc"}),
             "a=34&bbb=ccc")
     self.assertEqual(urllib.urlencode({"a": 34}), "a=34")
     self.assertEqual(urllib.urlencode({}), "")
     self.assertEqual(urllib.urlencode({"a": 34, "bbb": "$%&s"}),
             "a=34&bbb=%24%25%26s")
Beispiel #5
0
    def run_upload_request(self):
        try:
            self.fitbit.init_tracker_for_transfer()

            url = self.FITBIT_HOST + self.START_PATH

            # Start the request Chain
            self.form_base_info()
            while url is not None:
                req = urllib2.Request(url, urllib.urlencode(self.info_dict))
                req.add_header("User-Agent", "FitBit Client")
                res = urllib2.urlopen(req).read()
                print res
                r = FitBitResponse(res)
                self.remote_info = r.response
                self.form_base_info()
                op_index = 0
                for o in r.opcodes:
                    self.info_dict["opResponse[%d]" % op_index] = base64.b64encode(''.join([chr(x) for x in self.fitbit.run_opcode(o["opcode"], o["payload"])]))
                    self.info_dict["opStatus[%d]" % op_index] = "success"
                    op_index += 1
                urllib.urlencode(self.info_dict)
                print self.info_dict
                if r.host:
                    url = "http://%s%s" % (r.host, r.path)
                    print url
                else:
                    print "No URL returned. Quitting."
                    break
        except:
            self.fitbit.base.close()
            raise
        self.fitbit.command_sleep()
        self.fitbit.base.close()
Beispiel #6
0
 def search(self, **searchparams):
   results = memcache.get(urlencode(searchparams)) # cicabogár, ennek valójában nem itt lenne a helye
   if results is "tutinemezaválasz": #not None:
     return results
   else:
     searchurl = "http://iwiw.hu/search/pages/user/ajaxsearch.jsp?do=AdvancedSearch&page=0&"
     iwiwsearch = urlfetch.fetch(searchurl + urlencode(searchparams), headers={'Cookie': self.logincookie}).content
     leves = BeautifulSoup(iwiwsearch)
     cuccok = []
     for kartya in leves.findAll("div", "cardContainer"):
       nev = kartya.findAll("a")[1].string.strip()
       name = nev.split("[")[0]
       try:
         nick = re.search("\[(?P<nick>.*)\]", nev).group(1)
       except AttributeError:
         nick = ""
       profile_url = kartya.findAll("a")[1]["href"]
       try:
          pic_popup_url = kartya.find("a", "user_image")["onclick"].split("'")[1]
       except KeyError:
         pic_popup_url = ""
       try:
         pic_thumbnail = kartya.find("a", "user_image").img["src"]
       except KeyError:
         pic_thumbnail = ""
       try:
         city = kartya.find("div", "city").string.strip()
       except AttributeError:
         city = ""
       tutu = {"name": name, "nick": nick, "profile_url": profile_url, "pic_popup_url": pic_popup_url, "pic_thumbnail": pic_thumbnail, "city": city}
       cuccok.append(tutu)
     memcache.add(urlencode(searchparams), cuccok)
     return cuccok
    def setUp(self):
        self.username = "******"
        self.email = "mailto:[email protected]"        
        self.password = "******"
        self.auth = "Basic %s" % base64.b64encode("%s:%s" % (self.username, self.password))
        form = {'username':self.username,'email': self.email,'password':self.password,'password2':self.password}
        response = self.client.post(reverse(views.register),form, X_Experience_API_Version="1.0.0")


        self.testparams1 = {"stateId": self.stateId, "activityId": self.activityId, "agent": self.testagent}
        path = '%s?%s' % (self.url, urllib.urlencode(self.testparams1))
        self.teststate1 = {"test":"put activity state 1","obj":{"agent":"test"}}
        self.put1 = self.client.put(path, json.dumps(self.teststate1), content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")

        self.testparams2 = {"stateId": self.stateId2, "activityId": self.activityId, "agent": self.testagent}
        path = '%s?%s' % (self.url, urllib.urlencode(self.testparams2))
        self.teststate2 = {"test":"put activity state 2","obj":{"agent":"test"}}
        self.put2 = self.client.put(path, json.dumps(self.teststate2), content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")

        self.testparams3 = {"stateId": self.stateId3, "activityId": self.activityId2, "agent": self.testagent}
        path = '%s?%s' % (self.url, urllib.urlencode(self.testparams3))
        self.teststate3 = {"test":"put activity state 3","obj":{"agent":"test"}}
        self.put3 = self.client.put(path, json.dumps(self.teststate3), content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")

        self.testparams4 = {"stateId": self.stateId4, "activityId": self.activityId2, "agent": self.otheragent}
        path = '%s?%s' % (self.url, urllib.urlencode(self.testparams4))
        self.teststate4 = {"test":"put activity state 4","obj":{"agent":"other"}}
        self.put4 = self.client.put(path, json.dumps(self.teststate4), content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")
    def test_ie_cors_put_delete(self):
        username = "******"
        email = "mailto:[email protected]"
        password = "******"
        auth = "Basic %s" % base64.b64encode("%s:%s" % (username, password))
        form = {'username':username,'email': email,'password':password,'password2':password}
        response = self.client.post(reverse(views.register),form, X_Experience_API_Version="1.0.0")

        testagent = '{"name":"another test","mbox":"mailto:[email protected]"}'
        sid = "test_ie_cors_put_delete_set_1"
        sparam1 = {"stateId": sid, "activityId": self.activityId, "agent": testagent}
        path = '%s?%s' % (self.url, urllib.urlencode({"method":"PUT"}))
        
        content = {"test":"test_ie_cors_put_delete","obj":{"actor":"another test"}}
        param = "stateId=%s&activityId=%s&agent=%s&content=%s&Content-Type=application/x-www-form-urlencoded&Authorization=%s&X-Experience-API-Version=1.0.0" % (sid, self.activityId, testagent, content, auth)
        put1 = self.client.post(path, param, content_type='application/x-www-form-urlencoded')
 
        self.assertEqual(put1.status_code, 204)
        self.assertEqual(put1.content, '')
        
        r = self.client.get(self.url, {"stateId": sid, "activityId": self.activityId, "agent": testagent}, X_Experience_API_Version="1.0.0", Authorization=auth)
        self.assertEqual(r.status_code, 200)
        import ast
        c = ast.literal_eval(r.content)

        self.assertEqual(c['test'], content['test'])
        self.assertEqual(r['etag'], '"%s"' % hashlib.sha1('%s' % content).hexdigest())
 
        dparam = "agent=%s&activityId=%s&Authorization=%s&Content-Type=application/x-www-form-urlencoded&X-Experience-API-Version=1.0.0" % (testagent,self.activityId,auth)
        path = '%s?%s' % (self.url, urllib.urlencode({"method":"DELETE"}))
        f_r = self.client.post(path, dparam, content_type='application/x-www-form-urlencoded')
        self.assertEqual(f_r.status_code, 204)
Beispiel #9
0
    def request(self, url, method="GET", parameters=None):
        request_url = self.server_url + "/v" + self.api_version + url
        req_file = None
        parameters["token"] = self.api_token
        if method == "GET":
            opener = urllib2.build_opener()
            req_file = opener.open(request_url + "?" + urllib.urlencode(parameters))
        else:
            # check if there is an attachment
            if "attachment" in parameters and poster_loaded:
                opener = poster.streaminghttp.register_openers()
                datagen, headers = poster.encode.multipart_encode(parameters)
                req = urllib2.Request(request_url, datagen, headers)
            else:
                req = urllib2.Request(request_url, urllib.urlencode(parameters))
            req_file = urllib2.urlopen(req)

        # parse response
        try:
            json_response = str(req_file.read())
            response = json.loads(json_response)
            if req_file.code >= 400:
                raise TorApiException(response["error"])
        finally:
            req_file.close()

        # return
        return response
Beispiel #10
0
def _insert_links(data_dict, limit, offset):
    '''Adds link to the next/prev part (same limit, offset=offset+limit)
    and the resource page.'''
    data_dict['_links'] = {}

    # get the url from the request
    urlstring = toolkit.request.environ['CKAN_CURRENT_URL']

    # change the offset in the url
    parsed = list(urlparse.urlparse(urlstring))
    query = urllib2.unquote(parsed[4])

    arguments = dict(urlparse.parse_qsl(query))
    arguments_start = dict(arguments)
    arguments_prev = dict(arguments)
    arguments_next = dict(arguments)
    if 'offset' in arguments_start:
        arguments_start.pop('offset')
    arguments_next['offset'] = int(offset) + int(limit)
    arguments_prev['offset'] = int(offset) - int(limit)

    parsed_start = parsed[:]
    parsed_prev = parsed[:]
    parsed_next = parsed[:]
    parsed_start[4] = urllib.urlencode(arguments_start)
    parsed_next[4] = urllib.urlencode(arguments_next)
    parsed_prev[4] = urllib.urlencode(arguments_prev)

    # add the links to the data dict
    data_dict['_links']['start'] = urlparse.urlunparse(parsed_start)
    data_dict['_links']['next'] = urlparse.urlunparse(parsed_next)
    if int(offset) - int(limit) > 0:
        data_dict['_links']['prev'] = urlparse.urlunparse(parsed_prev)
Beispiel #11
0
    def get(self,
            url,
            username=None,
            password=None,
            **kwargs):

        """ Makes a get request to the URL specified."""

        headers = {}
        if kwargs:
            if 'headers' in kwargs:
                headers = kwargs['headers']
                del kwargs['headers']
                self.logger.debug('Headers passed in:' + headers)
            if url.find('?') >= 0:
                url = url + '&' + urlencode(kwargs)
            else:
                url = url + '?' + urlencode(kwargs)

        self.logger.debug('About to do a GET on:' + url)

        h = httplib2.Http()
        h.add_credentials(username, password)
        headers['User-Agent'] = self.user_agent

        return h.request(url, method='GET', headers=headers)
Beispiel #12
0
    def put(self,
            url,
            payload,
            contentType,
            username=None,
            password=None,
            **kwargs):

        """
        Makes a PUT request to the URL specified and includes the payload
        that gets passed in. The content type header gets set to the
        specified content type.
        """

        headers = {}
        if kwargs:
            if 'headers' in kwargs:
                headers = kwargs['headers']
                del kwargs['headers']
                self.logger.debug('Headers passed in:' + headers)
            if url.find('?') >= 0:
                url = url + '&' + urlencode(kwargs)
            else:
                url = url + '?' + urlencode(kwargs)

        self.logger.debug('About to do a PUT on:' + url)

        h = httplib2.Http()
        h.add_credentials(username, password)
        headers['User-Agent'] = self.user_agent
        if contentType is not None:
            headers['Content-Type'] = contentType
        return h.request(url, body=payload, method='PUT', headers=headers)
def wigle_print(username, password, netid):
    browser = mechanize.Browser()

    browser.open('http://wigle.net')
    reqData = urllib.urlencode({'credential_0': username,
                                'credential_1': password})

    browser.open('https://wigle.net//gps/gps/main/login', reqData)

    params = {}
    params['netid'] = netid
    reqParams = urllib.urlencode(params)
    respURL = 'http://wigle.net/gps/gps/main/confirmquery/'
    resp = browser.open(respURL, reqParams).read()

    mapLat = 'N/A'
    mapLon = 'N/A'
    rLat = re.findall(r'maplat=.*\&', resp)

    if rLat:
        mapLat = rLat[0].split('&')[0].split('=')[1]
        rLon = re.findall(r'maplon=.*\&', resp)

    if rLon:
        mapLon = rLon[0].split

    print '[-] Lat: ' + mapLat + ', Lon: ' + mapLon
Beispiel #14
0
def make_request(method, url,
    params=None, data=None, headers=None, cookies=None, files=None,
    auth=None, timeout=None, allow_redirects=False, proxies=None):
    """Sends an HTTP request Returns :class:`Response <models.Response>`

    See the requests documentation for explanation of all these parameters

    Currently proxies, files, and cookies are all ignored
    """
    http = httplib2.Http(timeout=timeout)
    http.follow_redirects = allow_redirects

    if auth is not None:
        http.add_credentials(auth[0], auth[1])

    if data is not None:
        udata = {}
        for k, v in data.iteritems():
            try:
                udata[k.encode('utf-8')] = unicode(v).encode('utf-8')
            except UnicodeDecodeError:
                udata[k.encode('utf-8')] = unicode(v, 'utf-8').encode('utf-8')
        data = urlencode(udata)

    if params is not None:
        enc_params = urlencode(params, doseq=True)
        if urlparse(url).query:
            url = '%s&%s' % (url, enc_params)
        else:
            url = '%s?%s' % (url, enc_params)

    resp, content = http.request(url, method, headers=headers, body=data)

    # Format httplib2 request as requests object
    return Response(resp, content, url)
Beispiel #15
0
    def request(self, path, args=None, post_args=None):
        """Fetches the given path in the Graph API.

        We translate args to a valid query string. If post_args is given,
        we send a POST request to the given path with the given arguments.
        """
        if not args: args = {}
        if self.access_token:
            if post_args is not None:
                post_args["access_token"] = self.access_token
            else:
                args["access_token"] = self.access_token
        post_data = None if post_args is None else urllib.urlencode(post_args)

        file = urllib2.urlopen("https://graph.facebook.com/" + path + "?" +
                                  urllib.urlencode(args), post_data)

        try:
            response = json.loads(file.read())
        finally:
            file.close()
        if response.get("error"):
            raise GraphAPIError(response["error"]["type"],
                                response["error"]["message"])
        return response
Beispiel #16
0
    def netflix_request(self, path, callback, access_token=None,
                           post_args=None, **args):
        args['output'] = 'json'
        overridepost = False
        if(args.get('override') == 'POST'):
            args.pop('override')
            overridepost = True
            post_args = args
            args = {}
        # Add the OAuth resource request signature if we have credentials
        url = 'http://api.netflix.com' + path
        if access_token:
            #args['output'] = 'json'
            all_args = {}
            all_args.update(args)
            all_args.update(post_args or {})
            method = 'POST' if post_args is not None else 'GET'
            oauth = self._oauth_request_parameters(
                url, access_token, all_args, method=method)
            args.update(oauth)

        if args:
            url += '?' + urllib.urlencode(args)
        try:
            if post_args is not None:
                response = urlfetch.fetch(url, method='POST',
                    payload=urllib.urlencode(post_args), deadline=10)
            else:
                response = urlfetch.fetch(url, deadline=10)
        except urlfetch.DownloadError, e:
            logging.exception(e)
            response = None
    def test_enketo_remote_server_responses(self):
        #just in case if we want to shift the testing back to the main server
        testing_enketo_url = settings.ENKETO_URL
        #testing_enketo_url = 'http://enketo-dev.formhub.org'
        form_id = "test_%s" % re.sub(re.compile("\."), "_", str(time()))
        server_url = "%s/%s" % (self.base_url, self.user.username)
        enketo_url = '%slaunch/launchSurvey' % testing_enketo_url

        values = {
            'format': 'json',
            'form_id': form_id,
            'server_url': server_url
        }
        data = urllib.urlencode(values)
        req = urllib2.Request(enketo_url, data)
        try:
            response = urllib2.urlopen(req)
            response = json.loads(response.read())
            success = response['success']
            if not success and 'reason' in response:
                fail_msg = "This enketo installation is for use by "\
                    "formhub.org users only."
                if response['reason'].startswith(fail_msg):
                    raise SkipTest
            return_url = response['url']
            success = response['success']
            self.assertTrue(success)
            enketo_base_url = urlparse(settings.ENKETO_URL).netloc
            return_base_url = urlparse(return_url).netloc
            self.assertIn(enketo_base_url, return_base_url)
        except urllib2.URLError:
            self.assertTrue(False)

        #second time
        req2 = urllib2.Request(enketo_url, data)
        try:
            response2 = urllib2.urlopen(req2)
            response2 = json.loads(response2.read())
            return_url_2 = response2['url']
            success2 = response2['success']
            reason2 = response2['reason']
            self.assertEqual(return_url, return_url_2)
            self.assertFalse(success2)
            self.assertEqual(reason2, "existing")
        except urllib2.URLError:
            self.assertTrue(False)

        #error message
        values['server_url'] = ""
        data = urllib.urlencode(values)
        req3 = urllib2.Request(enketo_url, data)
        try:
            response3 = urllib2.urlopen(req3)
            response3 = json.loads(response3.read())
            success3 = response3['success']
            reason3 = response3['reason']
            self.assertFalse(success3)
            self.assertEqual(reason3, "empty")
        except urllib2.URLError:
            self.assertTrue(False)
Beispiel #18
0
    def post(self):
        fontdata = self.request.POST.get('font', None)

        # Need to use isinstance as cgi.FieldStorage always evaluates to False.
        # See http://bugs.python.org/issue19097
        if not isinstance(fontdata, cgi.FieldStorage):
            self.redirect('/font_conversion.html?' +  urllib.urlencode(
                {'err_msg': 'Please select a font'}))
            return

        #TODO(bstell) make this work correctly.
        font_type = 'woff'
        name = os.path.splitext(os.path.basename(fontdata.filename))[0]

        try:
            font = TTFont(fontdata.file)
        except:
            self.redirect('/font_conversion.html?' +  urllib.urlencode(
                {'err_msg': 'failed to parse font'}))
            return

        self.response.headers['Content-Type'] = 'application/font-woff'
        self.response.headers['Content-Disposition'] = \
            'attachment; filename={0}.{1}'.format(name, font_type)
        font.flavor = font_type
        output = StringIO.StringIO()
        font.save(output)
        self.response.out.write(output.getvalue())
Beispiel #19
0
	def prepair_query(self, media, *args, **kwards):
		if media == 'tvshow':
			uri = '/search/advanced_search.php?'
			query = {"q": args[0], "from_year": args[3], "to_year": args[3], "section": 2}
			uri += urllib.urlencode(query)
			html = self.request(uri)
			r = re.search('Search Results For: "(.*?)</table>', html, re.DOTALL)
			if r:
				fragment = r.group(1)
				pattern = r'<a\s+href="([^"]+)"\s+title="([^"]+)'
				for match in re.finditer(pattern, fragment):
					url, title_year = match.groups('')
					url = url.replace('-tvshow-online-free-putlocker.html', '-tvshow-season-%s-episode-%s-online-free-putlocker.html' % (args[1], args[2]))
					uri = url.replace(self.base_url, '')
					return uri
			return False
		else:
			uri = '/search/advanced_search.php?'
			query = {"q": args[0], "from_year": args[1], "to_year": args[1], "section": 1}
			uri += urllib.urlencode(query)
			html = self.request(uri)
			r = re.search('Search Results For: "(.*?)</table>', html, re.DOTALL)
			if r:
				fragment = r.group(1)
				pattern = r'<a\s+href="([^"]+)"\s+title="([^"]+)'
				for match in re.finditer(pattern, fragment):
					url, title_year = match.groups('')
					uri = url.replace(self.base_url, '')
					return uri
			return False
Beispiel #20
0
    def index(self):
        # Avoid duplicate URLs for the same WMS service
        # (Only if it has not been checked before)
        if not request.params.get('deduped', False):
            urls = request.params.getall('url')
            deduped_urls = set(urls)

            if len(deduped_urls) < len(urls):
                # Redirect to the same location, but with the deduplicated
                # URLs.
                offset = url_for(controller='ckanext.os.controllers.widgets:PreviewWidget',action='index')

                query_string = urlencode([('url', u) for u in deduped_urls])

                for key,value in request.params.iteritems():
                    if key != 'url':
                        query_string += '&' + urlencode([(key, value)])
                query_string += '&deduped=true'
                new_url = offset + '?' + query_string

                redirect(new_url)

        # Render the page
        c.libraries_base_url = 'http://%s/libraries' % LIBRARIES_HOST
        c.tiles_url_ckan = TILES_URL_CKAN
        c.wms_url_ckan = WMS_URL_CKAN
        c.wfs_url_ckan = WFS_URL_CKAN

        return render('os/map_preview.html')
def movreel(url):
    try:
        user = xbmcaddon.Addon().getSetting("movreel_user")
        password = xbmcaddon.Addon().getSetting("movreel_password")

        login = '******'
        post = {'op': 'login', 'login': user, 'password': password, 'redirect': url}
        post = urllib.urlencode(post)
        result = getUrl(url, close=False).result
        result += getUrl(login, post=post, close=False).result

        post = {}
        f = common.parseDOM(result, "Form", attrs = { "name": "F1" })[-1]
        k = common.parseDOM(f, "input", ret="name", attrs = { "type": "hidden" })
        for i in k: post.update({i: common.parseDOM(f, "input", ret="value", attrs = { "name": i })[0]})
        post.update({'method_free': '', 'method_premium': ''})
        post = urllib.urlencode(post)

        import time
        request = urllib2.Request(url, post)

        for i in range(0, 3):
            try:
                response = urllib2.urlopen(request, timeout=10)
                result = response.read()
                response.close()
                url = re.compile('(<a .+?</a>)').findall(result)
                url = [i for i in url if 'Download Link' in i][-1]
                url = common.parseDOM(url, "a", ret="href")[0]
                return url
            except:
                time.sleep(1)
    except:
        return
def post(user,passwd):
    fp = open("Score.txt", "w")
    login_url="http://www.dean.gxnu.edu.cn/jwxt/index.php/api/user/login"

    data={}
    data['phone']="+8613512345678"
    data['username']=user
    data['password']=passwd
    post_data=urllib.urlencode(data)
    req=urllib2.urlopen(login_url,post_data)
    content=req.read()
    sid=content[56:82]
    data2={}
    data2['session_id']=sid
    url2="http://www.dean.gxnu.edu.cn/jwxt/index.php/api/chengji/getyxcj"
    sessionid="PHPSESSID="+sid
    post_data2=urllib.urlencode(data2)
    req2=urllib2.Request(url2,post_data2)
    req2.add_header('Cookie',sessionid)
    resp=urllib2.urlopen(req2)
    content2=json.loads(resp.read().encode('utf-8'))
    print u"课程名称\t\t成绩\t\t年度/学期\t\tbk\t\tcx\t\t绩点"
    fp.writelines("课程名称\t\t成绩\t\t年度/学期\t\tbk\t\tcx\t\t绩点\n")
    for subject  in content2['msg']:
        print subject['kcmc'] + "\t\t" + subject['cj'] + "\t\t" + subject['ndxq'][:-1] + "/" + subject['ndxq'][-1] + "\t\t" + subject['bk'] + "\t\t" + subject['cx'] + "\t\t" + subject['jd']
#        print "%-40s\t%-10s" % (subject['kcmc'], subject['cj'])
        fp.write(subject['kcmc'] + "\t\t" + subject['cj'] + "\t\t" + subject['ndxq'][:-1] + "/" + subject['ndxq'][-1] + "\t\t" + subject['bk'] + "\t\t" + subject['cx'] + "\t\t" + subject['jd'] + "\n")
    fp.close()
def uploadrocket(url):
    try:
        result = getUrl(url).result

        post = {}
        f = common.parseDOM(result, "Form", attrs = { "name": "freeorpremium" })[0]
        k = common.parseDOM(f, "input", ret="name", attrs = { "type": "hidden" })
        for i in k: post.update({i: common.parseDOM(f, "input", ret="value", attrs = { "name": i })[0]})
        post.update({'method_free': 'Free Download'})
        post = urllib.urlencode(post)

        result = getUrl(url, post=post).result

        post = {}
        f = common.parseDOM(result, "Form", attrs = { "name": "F1" })[0]
        k = common.parseDOM(f, "input", ret="name", attrs = { "type": "hidden" })
        for i in k: post.update({i: common.parseDOM(f, "input", ret="value", attrs = { "name": i })[0]})
        post.update(captcha(result))
        post = urllib.urlencode(post)

        result = getUrl(url, post=post).result

        url = common.parseDOM(result, "a", ret="href", attrs = { "onclick": "DL.+?" })[0]
        return url
    except:
        return
 def decorated_function(*args, **kwargs):
     try:
         results = f(*args, **kwargs)
         if 'redirect' in request.form:
             params = urllib.urlencode(results)
             return redirect(
                 '%s?%s' % (request.form['redirect'], params),
                 code=303)
         else:
             return jsonify(results=results)
     except AssertionError as ae:
         if 'redirect_error' in request.form:
             params = urllib.urlencode({'error': ae.args[0]})
             return redirect(
                 '%s?%s' % (request.form['redirect_error'], params),
                 code=303)
         else:
             return jsonify(error=ae.args[0]), 400
     except Exception as e:
         if 'redirect_error' in request.form:
             params = urllib.urlencode({'error': str(e)})
             return redirect(
                 '%s?%s' % (request.form['redirect_error'], params),
                 code=303)
         raise e
Beispiel #25
0
def urlread(url, get={}, post={}, headers={}, timeout=None):
    req = urllib2.Request(url, urllib.urlencode(get), headers=headers)
    try:
        response = urllib2.urlopen(req, urllib.urlencode(post), timeout).read()
    except:
        response = urllib2.urlopen(req, urllib.urlencode(post)).read()
    return response
Beispiel #26
0
def chemical_analyses():
    email = session.get('email', None)
    api_key = session.get('api_key', None)
    api = MetpetAPI(email, api_key).api

    filters = ast.literal_eval(json.dumps(request.args))
    offset = request.args.get('offset', 0)
    filters['offset'] = offset

    data = api.chemical_analysis.get(params=filters)
    next, previous, last, total_count = paginate_model('chemical_analyses',
                                                        data, filters)
    chemical_analyses = data.data['objects']

    first_page_filters = filters
    del first_page_filters['offset']

    if filters:
        first_page_url = url_for('chemical_analyses') + '?' + urlencode(first_page_filters)
    else:
        first_page_url = url_for('chemical_analyses') + urlencode(first_page_filters)

    return render_template('chemical_analyses.html',
                            chemical_analyses=chemical_analyses,
                            next_url=next,
                            prev_url=previous,
                            total=total_count,
                            first_page=first_page_url,
                            last_page=last)
Beispiel #27
0
 def post(self, params):
     print 'create user', params['email']
     try:
         conn = httplib.HTTPSConnection("www.virustotal.com")
         conn.request(method='POST', url='/en/account/signup/',
                      body=urllib.urlencode(params), headers=self.headers)
         response = conn.getresponse()
         HTML=response.read()
         error=re.search('<ul\s+?class="errorlist"><li>(?P<errorlist>.+?)</li></ul>',HTML)
         if error:
             error1=error.group("errorlist")
             print error1
         else:
             print "creat Scucess:",params['email']
             self.db_sql(params)
         conn.close()
     except:
         time.sleep(5)
         conn = httplib.HTTPSConnection("www.virustotal.com")
         conn.request(method='POST', url='/en/account/signup/',
                      body=urllib.urlencode(params), headers=self.headers)
         response = conn.getresponse()
         HTML=response.read()
         error=re.search('<ul\s+?class="errorlist"><li>(?P<errorlist>.+?)</li></ul>',HTML)
         if error:
             error1=error.group("errorlist")
             print error1
         else:
             print "creat Scucess:",params['email']
             self.db_sql(params)
         conn.close()
Beispiel #28
0
 def api_call(self, p):
     for k, v in p.iteritems(): 
         if isinstance(v, list): p[k] = str(v[0])
     
     if p['service'] == "search":
         url = self.API_URL + "/memberSearch/"
     elif p['service'] == "report":
         url = self.API_URL + "/esReport/"
     elif p['service'] == "create":
         url = self.API_URL + "/cohort/create/"
     elif p['service'] == "update":
         url = self.API_URL + "/cohort/update/"
     elif p['service'] == "delete":
         url = self.API_URL + "/cohort/delete/"
     elif p['service'] == "config":
         url = self.API_URL + "/config/"
     else:
         url = self.API_URL + "/memberSearch/"
     
     del p['service']
     
     p['ticket']     = self.get_proxy_ticket()
     p['clientName'] = self.CLIENT_NAME
     p['clientId']   = self.CLIENT_ID
     
     self.call = url + "?" + urllib.urlencode(p)
     return url + "?" + urllib.urlencode(p)
Beispiel #29
0
 def _queryapi(self, method_url, get, post):
     c = pycurl.Curl()
     if bool(get):
         query_url = method_url + '?' + urlencode(get)
     else:
         query_url = method_url
     c.setopt(c.URL, query_url)
     if bool(post):
         # first clear all fields that are None
         post_cleared = {}
         for i in post:
             if post[i] is not None:
                 post_cleared[i] = post[i]
         postfields = urlencode(post_cleared)
         c.setopt(c.POSTFIELDS, postfields)
     buffer = StringIO()
     c.setopt(c.WRITEFUNCTION, buffer.write)
     c.setopt(c.HTTPHEADER, ['PddToken: ' + self.token])
     c.perform()
     http_response_code = c.getinfo(c.RESPONSE_CODE)
     http_response_data = json.loads(buffer.getvalue())
     c.close()
     if 200 != http_response_code:
         self.module.fail_json(msg='Error querying yandex pdd api, HTTP status=' + c.getinfo(c.RESPONSE_CODE) + ' error=' + http_response_data.error)
     return (http_response_code, http_response_data)
def kingfiles(url):
    try:
        result = getUrl(url).result

        post = {}
        f = common.parseDOM(result, "Form", attrs = { "action": "" })[0]
        k = common.parseDOM(f, "input", ret="name", attrs = { "type": "hidden" })
        for i in k: post.update({i: common.parseDOM(f, "input", ret="value", attrs = { "name": i })[0]})
        post.update({'method_free': ' '})
        post = urllib.urlencode(post)

        result = getUrl(url, post=post).result

        post = {}
        f = common.parseDOM(result, "Form", attrs = { "action": "" })[0]
        k = common.parseDOM(f, "input", ret="name", attrs = { "type": "hidden" })
        for i in k: post.update({i: common.parseDOM(f, "input", ret="value", attrs = { "name": i })[0]})
        post.update({'method_free': ' '})
        post.update(captcha(result))
        post = urllib.urlencode(post)

        result = getUrl(url, post=post).result

        url = re.compile("var\s+download_url *= *'(.+?)'").findall(result)[0]
        return url
    except:
        return
Beispiel #31
0
 def delete(self, path, **kw):
     params = self.get_params(**kw)
     url = self.BASE_URL + path
     request = urllib2.Request(url, data=urllib.urlencode(params))
     request.get_method = lambda:'DELETE'
     return json.loads(urllib2.urlopen(request).read())
Beispiel #32
0
 def to_postdata(self):
     """Serialize as post data for a POST request."""
     # tell urlencode to deal with sequence values and map them correctly
     # to resulting querystring. for example self["k"] = ["v1", "v2"] will
     # result in 'k=v1&k=v2' and not k=%5B%27v1%27%2C+%27v2%27%5D
     return urllib.urlencode(self, True).replace('+', '%20')
Beispiel #33
0
        #print "dictx:\n",dictx
        #print '##########################'
        #print 'payload_dict:\n',payload_dict,len(payload_dict)
        #for i in payload_dict:
        #   print i

        paramslist = params.split('&')
        for i in paramslist:
            b = i.split('=')
            post_data[b[0]] = b[1]
            if b[1] == 'FUZZ':
                fuzzparam = b[0]
        for i in payload_dict:
            post_data[fuzzparam] = i
            #print post_data
            geturlcontent(url, urllib.urlencode(post_data), sys.argv[5])

        #print post_data
#print fuzzparam
#print '###################################################################'

#exit()
        for j in def_payload_dict:
            post_data[fuzzparam] = j
            geturlcontent(url, urllib.urlencode(post_data), sys.argv[5])

        #print post_data
#print '###################################################################'
    else:
        print 'input error parameter.\nexit'
        exit()
Beispiel #34
0
# -*- coding -*-

import urllib2
import urllib

url = 'http://192.168.168.168/0.htm'

headers = {
    'Connection': 'keep-alive',
    'Upgrade-Insecure-Requests':'keep-alive',
    'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.81 Safari/537.36' ,
    'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
    'Referer': 'http://192.168.168.168/0.htm',
    'Accept-Encoding':'gzip, deflate, sdch',
    'Accept-Language': 'zh-CN,zh;q=0.8'
}
data = {
    'DDDDD':'101001972000800',
    'upass':'******',
    '0MKKey':'(unable to decode value)',
    'v6ip':''
}
post_data = urllib.urlencode(data)
request = urllib2.Request(url,post_data,headers=headers)
response = urllib2.urlopen(request)
result = response.read().decode('GB2312')
print result
Beispiel #35
0
 def post(self):
     self.response.headers['Content-Type'] = 'text/plain'
     uid = self.request.get('user_id')
     token = self.request.get('token')
     sign = self.request.get('sign')
     orderid = self.request.get('orderId')
     obj_list = infisync.query(infisync.user_id == uid).fetch()
     ret_status = {}
     url = "http://acjs-cdc-andro.appspot.com/retrieve_wo_json"
     values = {}
     values['user_id'] = uid
     data = urllib.urlencode(values)
     req = urllib2.Request(url, data)
     response = urllib2.urlopen(req)
     regids_str = response.read()
     if(len(obj_list) != 0):
         obj = obj_list[0]
         now = int(round(time.time()*1000))
         if now < obj.validuntil_ts_msec:
             ret_status["status"] = 1
             ret_status["reg_ids"] = regids_str
         else:                
             url = "http://acjs.azurewebsites.net/acjs/CDCSubscriptionPurchaseChk_vGAE.php"
             values = {}
             values['token'] = obj.token
             values['product_id'] = 'sub_infi_sync'
             values['reg_ids'] = regids_str                
             values['user_id'] = uid
             data = urllib.urlencode(values)
             req = urllib2.Request(url, data)
             response = urllib2.urlopen(req)
             json_str = response.read()
             json_obj = json.loads(json_str)
             if(json_obj['status'] == 1):                   
                 obj.user_id = uid
                 obj.autorenewing = json_obj["auto_ren"]
                 obj.initiation_ts_msec = json_obj["init_ts"]
                 obj.not_valid = 0
                 obj.order_id = orderid
                 obj.sign = sign
                 obj.token = token
                 obj.validuntil_ts_msec = json_obj["valid_ts"]
                 obj.put()
                 ret_status["status"] = 1
                 ret_status['reg_ids'] = regids_str
             if(json_obj['status'] == 0):
                 obj.not_valid = 1
                 obj.put()
                 ret_status["status"] = 0
             if(json_obj['status'] == 2):
                 ret_status["status"] = 2
              
     else:
         url = "http://acjs.azurewebsites.net/acjs/CDCSubscriptionPurchaseChk_vGAE.php"
         values = {}
         values['token'] = token
         values['product_id'] = 'sub_infi_sync'
         values['reg_ids'] = regids_str                
         values['user_id'] = uid
         data = urllib.urlencode(values)
         req = urllib2.Request(url, data)
         response = urllib2.urlopen(req)
         json_str = response.read()
         json_obj = json.loads(json_str)
         if(json_obj['status'] == 1):
             obj = infisync()                 
             obj.user_id = uid
             obj.autorenewing = json_obj["auto_ren"]
             obj.initiation_ts_msec = json_obj["init_ts"]
             obj.not_valid = 0
             obj.order_id = orderid
             obj.sign = sign
             obj.token = token
             obj.validuntil_ts_msec = json_obj["valid_ts"]
             obj.put()
             ret_status["status"] = 1
             ret_status['reg_ids'] = regids_str
             if(json_obj['status'] == 0):
                 obj.not_valid = 1
                 obj.put()
                 ret_status["status"] = 0
             if(json_obj['status'] == 2):
                 ret_status["status"] = 2
                   
     self.response.write(json.dumps(ret_status))
Beispiel #36
0
f.write(str(reg))
f.close()
response= urllib2.urlopen(reg)
responsetext  = response.read()
f = open('/var/www/html/openWB/ramdisk/zoereply1lp1', 'w')
f.write(str(responsetext))
f.close()
android_config = json.loads(responsetext)
gigyarooturl = android_config['servers']['gigyaProd']['target'] 
gigyaapikey = android_config['servers']['gigyaProd']['apikey'] 
kamereonrooturl = android_config['servers']['wiredProd']['target']
kamereonapikey = android_config['servers']['wiredProd']['apikey']
#print(time_string, 'gigyarooturl',gigyarooturl,gigyaapikey,kamereonrooturl,kamereonapikey)
#
payload = {'loginID': loginID, 'password': password, 'apiKey': gigyaapikey} 
data = urllib.urlencode(payload) 
data = data.encode('Big5') 
reg= gigyarooturl + '/accounts.login?' + data
#print ('c2',reg)
f = open('/var/www/html/openWB/ramdisk/zoereq2lp1', 'w')
f.write(str(reg))
f.close()
response= urllib2.urlopen(reg)
responsetext  = response.read()
f = open('/var/www/html/openWB/ramdisk/zoereply2lp1', 'w')
f.write(str(responsetext))
f.close()
gigya_session = json.loads(responsetext)
gigyacookievalue = gigya_session['sessionInfo']['cookieValue']
#print(time_string,'gigyacookievalue',gigyacookievalue)
#
def fichas(item):
    logger.info("[altadefinizioneclick.py] fichas")

    itemlist = []

    # Descarga la pagina
    data = scrapertools.anti_cloudflare(item.url, headers)
    # fix - calidad
    data = re.sub(
        r'<div class="wrapperImage"[^<]+<a',
        '<div class="wrapperImage"><fix>SD</fix><a',
        data
    )
    # fix - IMDB
    data = re.sub(
        r'<h5> </div>',
        '<fix>IMDB: 0.0</fix>',
        data
    )
    # ------------------------------------------------
    cookies = ""
    matches = re.compile('(.altadefinizione.site.*?)\n', re.DOTALL).findall(config.get_cookie_data())
    for cookie in matches:
        name = cookie.split('\t')[5]
        value = cookie.split('\t')[6]
        cookies += name + "=" + value + ";"
    headers.append(['Cookie', cookies[:-1]])
    import urllib
    _headers = urllib.urlencode(dict(headers))
    # ------------------------------------------------

    if "/?s=" in item.url:
        patron = '<div class="col-lg-3 col-md-3 col-xs-3">.*?'
        patron += 'href="([^"]+)".*?'
        patron += '<div class="wrapperImage"[^<]+'
        patron += '<[^>]+>([^<]+)<.*?'
        patron += 'src="([^"]+)".*?'
        patron += 'class="titleFilm">([^<]+)<.*?'
        patron += 'IMDB: ([^<]+)<'
    else:
        patron = '<div class="wrapperImage"[^<]+'
        patron += '<[^>]+>([^<]+)<.*?'
        patron += 'href="([^"]+)".*?'
        patron += 'src="([^"]+)".*?'
        patron += 'href[^>]+>([^<]+)</a>.*?'
        patron += 'IMDB: ([^<]+)<'

    matches = re.compile(patron, re.DOTALL).findall(data)

    for scraped_1, scraped_2, scrapedthumbnail, scrapedtitle, scrapedpuntuacion in matches:

        scrapedurl = scraped_2
        scrapedcalidad = scraped_1
        if "/?s=" in item.url:
            scrapedurl = scraped_1
            scrapedcalidad = scraped_2

        title = scrapertools.decodeHtmlentities(scrapedtitle)
        title += " (" + scrapedcalidad + ") (" + scrapedpuntuacion + ")"

        # ------------------------------------------------
        scrapedthumbnail += "|" + _headers
        # ------------------------------------------------

        itemlist.append(infoSod(
            Item(channel=__channel__,
                 action="findvideos",
                 title="[COLOR azure]" + title + "[/COLOR]",
                 url=scrapedurl,
                 thumbnail=scrapedthumbnail,
                 fulltitle=title,
                 show=title), tipo='movie'))

    # Paginación
    next_page = scrapertools.find_single_match(data, '<a class="next page-numbers" href="([^"]+)">')
    if next_page != "":
        itemlist.append(
            Item(channel=__channel__,
                 action="fichas",
                 title="[COLOR orange]Successivo >>[/COLOR]",
                 url=next_page,
                 thumbnail="http://2.bp.blogspot.com/-fE9tzwmjaeQ/UcM2apxDtjI/AAAAAAAAeeg/WKSGM2TADLM/s1600/pager+old.png"))

    return itemlist
#!/usr/bin/env python
"""
Post seafile email/password to obtain auth token.
"""

import urllib
import urllib2
import simplejson as json


url = 'https://seacloud.cc/api2/auth-token/'
values = {'username': '******',
          'password': '******'}
data = urllib.urlencode(values)
req = urllib2.Request(url, data)
response = urllib2.urlopen(req)
the_page = response.read()
token = json.loads(the_page)['token']

print token

Beispiel #39
0
 def post(self, path, **kw):
     params = self.get_params(**kw)
     url = self.BASE_URL + path
     data = urllib.urlencode(params)
     return json.loads(urllib2.urlopen(url, data).read())
Beispiel #40
0
    def start(self):
        global page
        print 'Begin to crow, please wait' + '\n'
        cnt = 0
        while (page < 7895):
            print 'The ' + str(page) + ' page'
            data = urllib.urlencode({
                'conditionSelector': 'departmentSpan',
                'departmentId': '',
                'specialtyId': '',
                'grade': '2015',
                'naturalClassId': '0',
                'hasComplaint': '-1',
                'search': '%e6%9f%a5%e8%af%a2',  #&#x67E5;&#x8BE2;
                'listTableClass': 'HiddenList',
                'itemPerPage': '30',
                'currentPage': page,
                'sort.sortColumn': 'participant.studentId',
                'sort.ascending': 'true'
            })
            header = {
                "User-Agent":
                "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 UBrowser/6.1.2107.204 Safari/537.36",
                "Referer":
                'http://sztz.gdufe.edu.cn/sztz/publicity/studentList.jsp'
            }
            postUrl = "http://sztz.gdufe.edu.cn/sztz/publicity/studentList.jsp"
            request = urllib2.Request(postUrl, data, header)
            response = self.opener.open(request)
            html_doc = response.read()
            page = page + 1
            soup = BeautifulSoup(html_doc,
                                 "html.parser",
                                 from_encoding='utf-8')

            links = soup.find_all(
                name='a', attrs={'href': re.compile(r'itemParticipantId=')})

            for link in links:
                cnt = cnt + 1
                if (cnt % 15 == 0):
                    newUrl = mainUrl + link['href']
                    response1 = urllib2.urlopen(newUrl)
                    html_doc = response1.read()
                    soup = BeautifulSoup(html_doc,
                                         "html.parser",
                                         from_encoding='utf-8')
                    linkss = soup.find_all(name='td',
                                           attrs={
                                               'colspan': "3",
                                               'class': "tittle3"
                                           })

                    for linkk in linkss:
                        if not linkk.a:
                            if len(linkk) == 1:
                                s = linkk.get_text().encode('gbk', 'ignore')
                                s = str(s)
                                s = s.split()
                                s = str(s)
                                s = s.replace("', '", ' ')
                                s = s[2:-2]
                                s = s.encode('gbk').decode('string_escape')
                                mySet.add(s)
Beispiel #41
0
    def sources(self, url, hostDict, hostprDict):
        try:
            sources = []

            if url == None: return sources
            url = urlparse.urljoin(self.base_link, url)
            url = path = re.sub('/watching.html$', '', url.strip('/'))
            url = referer = url + '/watching.html'
            p = client.request(url)
            p = re.findall('load_player\(.+?(\d+)', p)
            p = urllib.urlencode({'id': p[0]})
            headers = {
                'Accept-Formating': 'application/json, text/javascript',
                'Server': 'cloudflare-nginx',
                'Referer': referer
            }
            r = urlparse.urljoin(self.base_link, '/ajax/movie/load_player_v3')
            r = client.request(r, post=p, headers=headers, XHR=True)
            url = json.loads(r)['value']
            url = client.request(url,
                                 headers=headers,
                                 XHR=True,
                                 output='geturl')

            if 'openload.io' in url or 'openload.co' in url or 'oload.tv' in url:
                sources.append({
                    'source': 'openload.co',
                    'quality': 'HD',
                    'language': 'en',
                    'url': url,
                    'direct': False,
                    'debridonly': False
                })
                raise Exception()

            r = client.request(url, headers=headers, XHR=True)
            try:
                src = json.loads(r)['playlist'][0]['sources']
                links = [i['file'] for i in src if 'file' in i]
                for i in links:
                    try:
                        sources.append({
                            'source':
                            'gvideo',
                            'quality':
                            directstream.googletag(i)[0]['quality'],
                            'language':
                            'en',
                            'url':
                            i,
                            'direct':
                            True,
                            'debridonly':
                            False
                        })
                    except:
                        pass
            except:
                pass

            return sources
        except:
            return sources
def makeFC_FromExtents(server,
                       port,
                       adminUser,
                       adminPass,
                       mapService,
                       outputFC,
                       token=None):
    ''' Function to get all services
    Requires Admin user/password, as well as server and port (necessary to construct token if one does not exist).
    If a token exists, you can pass one in for use.  
    Note: Will not return any services in the Utilities or System folder
    '''

    if token is None:
        token = gentoken(server, port, adminUser, adminPass)

    #mapService = mapService.replace( ".", "/")
    mapService = urllib.quote(mapService.encode('utf8'))

    extentURL = "http://{}:{}/arcgis/rest/services/{}".format(
        server, port, mapService.replace(".", "/"))
    fullExtent = getFullExtent(extentURL)

    logQueryURL = "http://{}:{}/arcgis/admin/logs/query".format(server, port)
    logFilter = "{'services': ['" + mapService + "']}"
    params = urllib.urlencode({
        'level': 'FINE',
        'filter': logFilter,
        'token': token,
        'f': 'json'
    })
    extentData = urllib2.urlopen(logQueryURL, params).read()

    print logQueryURL

    if 'success' not in extentData:
        arcpy.AddError("Error while querying logs: " + str(extentData))
        return

    else:
        # Got good data, proceed....
        dataObj = json.loads(extentData)

        # Create a featureclass, and open a cursor
        fc = createFC(outputFC, fullExtent["spatialReference"]["wkid"])
        cursorFC = arcpy.da.InsertCursor(
            fc,
            ["SHAPE@", "EventDate", "Scale", "InvScale", "Width", "Height"])

        # Need this variable to track number of events found for ExportMapImage call
        logEvents = 0

        # Need Array to hold Shape
        shapeArray = arcpy.Array()

        # Iterate over messages
        for item in dataObj["logMessages"]:
            eventDateTime = datetime.datetime.fromtimestamp(
                float(item["time"]) / 1000)

            if item["message"].startswith("Extent:"):
                eventScale = None  # Scale
                eventInvScale = None  # Inverse-Scale
                eventWidth = None  # Width
                eventHeight = None  # Height

                # Cycle through message details
                for pair in item["message"].replace(" ", "").split(";"):
                    if pair.count(":") == 1:
                        key, val = pair.split(":")

                        # Pick out Extent
                        if key == "Extent" and val.count(",") == 3:
                            # Split into ordinate values
                            MinX, MinY, MaxX, MaxY = val.split(",")
                            MinX = float(MinX)
                            MinY = float(MinY)
                            MaxX = float(MaxX)
                            MaxY = float(MaxY)

                            # Make sure extent is within range
                            if MinX > fullExtent["xmin"] and MaxX < fullExtent[
                                    "xmax"] and MinY > fullExtent[
                                        "ymin"] and MaxY < fullExtent["ymax"]:
                                shapeArray.add(arcpy.Point(MinX, MinY))
                                shapeArray.add(arcpy.Point(MinX, MaxY))
                                shapeArray.add(arcpy.Point(MaxX, MaxY))
                                shapeArray.add(arcpy.Point(MaxX, MinY))
                                shapeArray.add(arcpy.Point(MinX, MinY))
                                polygonGeo = arcpy.Polygon(shapeArray)

                        # Pick out Size
                        if key == "Size" and val.count(",") == 1:
                            eventWidth, eventHeight = val.split(",")
                            eventWidth = float(eventWidth)
                            eventHeight = float(eventHeight)

                        # Pick out Scale
                        if key == "Scale":
                            eventScale = float(val)
                            eventInvScale = 1 / eventScale

                # Save if Shape created
                if shapeArray.count > 0:

                    # Add Shape and Event Date
                    cursorFC.insertRow([
                        polygonGeo, eventDateTime, eventScale, eventInvScale,
                        eventWidth, eventHeight
                    ])

                    # Clear out Array points
                    shapeArray.removeAll()

                    logEvents += 1
        if cursorFC:
            del cursorFC

        arcpy.AddMessage(
            "Total number of events found in logs: {0}".format(logEvents))

        return
Beispiel #43
0
def get_sys_logs(self, cr, uid):
    """
    Utility method to send a publisher warranty get logs messages.
    """
    pool = pooler.get_pool(cr.dbname)

    dbuuid = pool.get('ir.config_parameter').get_param(cr, uid,
                                                       'database.uuid')
    db_create_date = pool.get('ir.config_parameter').get_param(
        cr, uid, 'database.create_date')
    limit_date = datetime.datetime.now()
    limit_date = limit_date - datetime.timedelta(15)
    limit_date_str = limit_date.strftime(misc.DEFAULT_SERVER_DATETIME_FORMAT)
    nbr_users = pool.get("res.users").search(cr, uid, [], count=True)
    nbr_active_users = pool.get("res.users").search(
        cr, uid, [("login_date", ">=", limit_date_str)], count=True)
    nbr_share_users = False
    nbr_active_share_users = False
    if "share" in pool.get("res.users")._all_columns:
        nbr_share_users = pool.get("res.users").search(cr,
                                                       uid,
                                                       [("share", "=", True)],
                                                       count=True)
        nbr_active_share_users = pool.get("res.users").search(
            cr,
            uid, [("share", "=", True), ("login_date", ">=", limit_date_str)],
            count=True)
    user = pool.get("res.users").browse(cr, uid, uid)

    web_base_url = self.pool.get('ir.config_parameter').get_param(
        cr, uid, 'web.base.url', 'False')
    msg = {
        "dbuuid": dbuuid,
        "nbr_users": nbr_users,
        "nbr_active_users": nbr_active_users,
        "nbr_share_users": nbr_share_users,
        "nbr_active_share_users": nbr_active_share_users,
        "dbname": cr.dbname,
        "db_create_date": db_create_date,
        "version": release.version,
        "language": user.lang,
        "web_base_url": web_base_url,
    }
    msg.update(
        pool.get("res.company").read(cr, uid, [1],
                                     ["name", "email", "phone"])[0])

    add_arg = {"timeout": 30} if sys.version_info >= (2, 6) else {}
    arguments = {
        'arg0': msg,
        "action": "update",
    }
    arguments_raw = urllib.urlencode(arguments)

    url = config.get("publisher_warranty_url")

    uo = urllib2.urlopen(url, arguments_raw, **add_arg)
    result = {}
    try:
        submit_result = uo.read()
        result = safe_eval(submit_result)
    finally:
        uo.close()
    return result
Beispiel #44
0
    def sources(self, url, hostDict, hostprDict):
        try:
            sources = []

            if url == None: return sources

            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '')
                         for i in data])
            aliases = eval(data['aliases'])
            headers = {}

            if 'tvshowtitle' in data:
                year = re.compile('(\d{4})-(\d{2})-(\d{2})').findall(
                    data['premiered'])[0][0]
                episode = '%01d' % int(data['episode'])
                url = '%s/tv-series/%s-season-%01d/watch/' % (
                    self.base_link, cleantitle.geturl(
                        data['tvshowtitle']), int(data['season']))
                url = client.request(url,
                                     headers=headers,
                                     timeout='10',
                                     output='geturl')

                if url == None:
                    url = self.searchShow(data['tvshowtitle'], data['season'],
                                          aliases, headers)

            else:
                episode = None
                year = data['year']
                url = self.searchMovie(data['title'], data['year'], aliases,
                                       headers)

            referer = url
            r = client.request(url)
            if episode == None:
                y = re.findall('Released\s*:\s*.+?\s*(\d{4})', r)[0]
                if not year == y: raise Exception()

            r = client.parseDOM(r, 'div', attrs={'class': 'sli-name'})
            r = zip(client.parseDOM(r, 'a', ret='href'),
                    client.parseDOM(r, 'a'))

            if not episode == None:
                r = [
                    i[0] for i in r
                    if i[1].lower().startswith('episode %02d:' %
                                               int(data['episode']))
                ]
            else:
                r = [i[0] for i in r]

            for u in r:
                try:
                    p = client.request(u, referer=referer, timeout='10')
                    t = re.findall('player_type\s*:\s*"(.+?)"', p)[0]
                    if t == 'embed': raise Exception()
                    headers = {'Origin': self.base_link}
                    eid = client.parseDOM(p,
                                          'input',
                                          ret='value',
                                          attrs={'name': 'episodeID'
                                                 })[0].encode('utf-8')
                    r = client.request(self.token_link,
                                       post=urllib.urlencode({'id': eid}),
                                       headers=headers,
                                       referer=referer,
                                       timeout='10',
                                       XHR=True)
                    isV2 = False

                    try:
                        js = json.loads(r)
                        hash = js['hash']
                        token = js['token']
                        _ = js['_']
                        url = self.grabber_link % (eid, hash, token, _)
                        u = client.request(url,
                                           headers=headers,
                                           referer=referer,
                                           timeout='10',
                                           XHR=True)
                        js = json.loads(u)
                    except:
                        isV2 = True
                        pass

                    if isV2:
                        mid = re.compile('.?id:\s+"(\d+)"').findall(
                            p)[0].encode('utf-8')
                        timestamp = str(int(time.time() * 1000))
                        url = self.token_v2_link % (eid, mid, timestamp)
                        script = client.request(url,
                                                headers=headers,
                                                referer=referer,
                                                timeout='10',
                                                XHR=True)
                        script = self.aadecode(script)
                        if 'hash' in script and 'token' in script:
                            hash = re.search('''hash\s+=\s+['"]([^"']+)''',
                                             script).group(1).encode('utf-8')
                            token = re.search('''token\s+=\s+['"]([^"']+)''',
                                              script).group(1).encode('utf-8')
                            _ = re.search('''_\s+=\s+['"]([^"']+)''',
                                          script).group(1).encode('utf-8')
                            url = self.grabber_link % (eid, hash, token, _)
                            u = client.request(url,
                                               headers=headers,
                                               referer=referer,
                                               timeout='10',
                                               XHR=True)
                            js = json.loads(u)

                    try:
                        u = js['playlist'][0]['sources']
                        u = [i['file'] for i in u if 'file' in i]

                        for i in u:
                            try:
                                sources.append({
                                    'source':
                                    'gvideo',
                                    'quality':
                                    directstream.googletag(i)[0]['quality'],
                                    'language':
                                    'en',
                                    'url':
                                    i,
                                    'direct':
                                    True,
                                    'debridonly':
                                    False
                                })
                            except:
                                pass
                    except:
                        pass

                    try:
                        u = js['backup']
                        u = urlparse.parse_qs(urlparse.urlsplit(u).query)
                        u = dict([(i, u[i][0]) if u[i] else (i, '')
                                  for i in u])
                        eid = u['eid']
                        mid = u['mid']

                        if isV2:
                            p = client.request(self.backup_token_link_v2 %
                                               (eid, mid, _),
                                               XHR=True,
                                               referer=referer,
                                               timeout='10')
                            x = re.search('''_x=['"]([^"']+)''', p).group(1)
                            y = re.search('''_y=['"]([^"']+)''', p).group(1)
                            u = client.request(self.backup_link_v2 %
                                               (eid, x, y),
                                               referer=referer,
                                               XHR=True,
                                               timeout='10')
                            js = json.loads(u)
                        else:
                            p = client.request(self.backup_token_link %
                                               (eid, mid, _),
                                               XHR=True,
                                               referer=referer,
                                               timeout='10')
                            x = re.search('''_x=['"]([^"']+)''', p).group(1)
                            y = re.search('''_y=['"]([^"']+)''', p).group(1)
                            u = client.request(self.backup_link % (eid, x, y),
                                               referer=referer,
                                               XHR=True,
                                               timeout='10')
                            js = json.loads(u)

                        try:
                            u = js['playlist'][0]['sources']
                            u = [i['file'] for i in u if 'file' in i]

                            for i in u:
                                try:
                                    sources.append({
                                        'source':
                                        'gvideo',
                                        'quality':
                                        directstream.googletag(i)[0]
                                        ['quality'],
                                        'language':
                                        'en',
                                        'url':
                                        i,
                                        'direct':
                                        True,
                                        'debridonly':
                                        False
                                    })
                                except:
                                    pass
                        except:
                            pass
                    except:
                        pass
                except:
                    pass

            return sources
        except:
            return sources
class EventHelper(object):
    """
    Helper class for Events.
    """
    @classmethod
    def groupByWeek(self, events):
        """
        Events should already be ordered by start_date
        """
        to_return = collections.OrderedDict()  # key: week_label, value: list of events

        current_week = 1
        week_start = None
        weekless_events = []
        offseason_events = []
        preseason_events = []
        for event in events:
            if event.official and event.event_type_enum in {EventType.CMP_DIVISION, EventType.CMP_FINALS}:
                if CHAMPIONSHIP_EVENTS_LABEL in to_return:
                    to_return[CHAMPIONSHIP_EVENTS_LABEL].append(event)
                else:
                    to_return[CHAMPIONSHIP_EVENTS_LABEL] = [event]
            elif event.official and event.event_type_enum in {EventType.REGIONAL, EventType.DISTRICT, EventType.DISTRICT_CMP}:
                if (event.start_date is None or
                   (event.start_date.month == 12 and event.start_date.day == 31)):
                    weekless_events.append(event)
                else:
                    if week_start is None:
                        diff_from_thurs = (event.start_date.weekday() - 3) % 7  # 3 is Thursday
                        week_start = event.start_date - datetime.timedelta(days=diff_from_thurs)

                    if event.start_date >= week_start + datetime.timedelta(days=7):
                        current_week += 1
                        week_start += datetime.timedelta(days=7)

                    label = REGIONAL_EVENTS_LABEL.format(current_week)
                    if label in to_return:
                        to_return[label].append(event)
                    else:
                        to_return[label] = [event]
            elif event.event_type_enum == EventType.PRESEASON:
                preseason_events.append(event)
            else:
                # everything else is an offseason event
                offseason_events.append(event)

        # Add weekless + other events last
        if weekless_events:
            to_return[WEEKLESS_EVENTS_LABEL] = weekless_events
        if offseason_events:
            to_return[OFFSEASON_EVENTS_LABEL] = offseason_events
        if preseason_events:
            to_return[PRESEASON_EVENTS_LABEL] = preseason_events

        return to_return

    @classmethod
    def distantFutureIfNoStartDate(self, event):
        if not event.start_date:
            return datetime.datetime(2177, 1, 1, 1, 1, 1)
        else:
            return event.start_date

    @classmethod
    def calculateTeamWLTFromMatches(self, team_key, matches):
        """
        Given a team_key and some matches, find the Win Loss Tie.
        """
        wlt = {"win": 0, "loss": 0, "tie": 0}

        for match in matches:
            if match.has_been_played and match.winning_alliance is not None:
                if match.winning_alliance == "":
                    wlt["tie"] += 1
                elif team_key in match.alliances[match.winning_alliance]["teams"]:
                    wlt["win"] += 1
                else:
                    wlt["loss"] += 1
        return wlt

    @classmethod
    def getTeamWLT(self, team_key, event):
        """
        Given a team_key, and an event, find the team's Win Loss Tie.
        """
        match_keys = Match.query(Match.event == event.key, Match.team_key_names == team_key).fetch(500, keys_only=True)
        return self.calculateTeamWLTFromMatches(team_key, ndb.get_multi(match_keys))

    @classmethod
    def getWeekEvents(self):
        """
        Get events this week
        In general, if an event is currently going on, it shows up in this query
        An event shows up in this query iff:
        a) The event is within_a_day
        OR
        b) The event.start_date is on or within 4 days after the closest Thursday
        """
        today = datetime.datetime.today()

        # Make sure all events to be returned are within range
        two_weeks_of_events_keys_future = Event.query().filter(
          Event.start_date >= (today - datetime.timedelta(days=7))).filter(
          Event.start_date <= (today + datetime.timedelta(days=7))).order(
          Event.start_date).fetch_async(50, keys_only=True)

        events = []
        diff_from_thurs = 3 - today.weekday()  # 3 is Thursday. diff_from_thurs ranges from 3 to -3 (Monday thru Sunday)
        closest_thursday = today + datetime.timedelta(days=diff_from_thurs)

        two_weeks_of_event_futures = ndb.get_multi_async(two_weeks_of_events_keys_future.get_result())
        for event_future in two_weeks_of_event_futures:
            event = event_future.get_result()
            if event.within_a_day:
                events.append(event)
            else:
                offset = event.start_date.date() - closest_thursday.date()
                if (offset == datetime.timedelta(0)) or (offset > datetime.timedelta(0) and offset < datetime.timedelta(4)):
                    events.append(event)

        return events

    @classmethod
    def getEventsWithinADay(self):
        week_events = self.getWeekEvents()
        ret = []
        for event in week_events:
            if event.within_a_day:
                ret.append(event)
        return ret

    @classmethod
    def getShortName(self, name_str):
        match = re.match(r'(MAR |PNW )?(FIRST Robotics|FRC)?(.*)(FIRST Robotics|FRC)?(District|Regional|Region|State|Tournament|FRC|Field)( Competition| Event| Championship)?', name_str)
        if match:
            short = match.group(3)
            match = re.match(r'(.*)(FIRST Robotics|FRC)', short)
            if match:
                return match.group(1).strip()
            else:
                return short.strip()

        return name_str.strip()

    @classmethod
    def get_timezone_id(cls, event_dict):
        if event_dict.get('location', None) is None:
            logging.warning('Could not get timezone for event {}{} with no location!'.format(event_dict['year'], event_dict['event_short']))
            return None

        # geocode request
        geocode_params = urllib.urlencode({
            'address': event_dict['location'],
            'sensor': 'false',
        })
        geocode_url = 'https://maps.googleapis.com/maps/api/geocode/json?%s' % geocode_params
        try:
            geocode_result = urlfetch.fetch(geocode_url)
        except Exception, e:
            logging.warning('urlfetch for geocode request failed: {}'.format(geocode_url))
            logging.info(e)
            return None
        if geocode_result.status_code != 200:
            logging.warning('Geocoding for event {}{} failed with url {}'.format(event_dict['year'], event_dict['event_short'], geocode_url))
            return None
        geocode_dict = json.loads(geocode_result.content)
        if not geocode_dict['results']:
            logging.warning('No geocode results for event location: {}'.format(event_dict['location']))
            return None
        lat = geocode_dict['results'][0]['geometry']['location']['lat']
        lng = geocode_dict['results'][0]['geometry']['location']['lng']

        # timezone request
        tz_params = urllib.urlencode({
            'location': '%s,%s' % (lat, lng),
            'timestamp': 0,  # we only care about timeZoneId, which doesn't depend on timestamp
            'sensor': 'false',
        })
        tz_url = 'https://maps.googleapis.com/maps/api/timezone/json?%s' % tz_params
        try:
            tz_result = urlfetch.fetch(tz_url)
        except Exception, e:
            logging.warning('urlfetch for timezone request failed: {}'.format(tz_url))
            logging.info(e)
            return None
Beispiel #46
0
def main():
    module = AnsibleModule(
        supports_check_mode=True,
        argument_spec = dict(
            caas_credentials = dict(type='dict',required=True,no_log=True),
            state = dict(default='present', choices=['present', 'absent']),
            wait = dict(type='bool',default=True,choices=[True,False]),
            name = dict(required=True),
            action=dict(default='ACCEPT_DECISIVELY', choices = ['ACCEPT_DECISIVELY','DROP']),
            ipVersion=dict(default='IPV4', choices = ['IPV4','IPV6']),
            protocol=dict(default='TCP', choices = ['IP', 'ICMP', 'TCP','UDP']),
            networkDomainId = dict(default=None),
            networkDomainName = dict(default=None),
            source = dict(type='dict'),
            destination = dict(type='dict'),
            enabled = dict(type='bool',default=True, choices=[True, False]),
            placement = dict(type='dict'),
        )
    )
    if not IMPORT_STATUS:
        module.fail_json(msg='missing dependencies for this module')
    has_changed = False

    # Check Authentication and get OrgId
    caas_credentials = module.params['caas_credentials']
    module.params['datacenterId'] = module.params['caas_credentials']['datacenter']

    wait = module.params['wait']
    state = module.params['state']

    orgId = _getOrgId(module,caas_credentials)

    if module.params['networkDomainId']==None:
        if module.params['networkDomainName']!=None:
            f = { 'name' : module.params['networkDomainName'], 'datacenterId' : module.params['datacenterId']}
            uri = '/caas/2.3/'+orgId+'/network/networkDomain?'+urllib.urlencode(f)
            result = caasAPI(module,caas_credentials, uri, '')
            if result['totalCount']==1:
                module.params['networkDomainId'] = result['networkDomain'][0]['id']

    firewallList = _listFirewallRule(module,caas_credentials,orgId,True)
 
#ABSENT
    if state == 'absent':
        if firewallList['totalCount'] == 1:
            uri = '/caas/2.3/'+orgId+'/network/deleteFirewallRule'
            _data = {}
            _data['id'] = firewallList['firewallRule'][0]['id']
            data = json.dumps(_data)
            if module.check_mode: has_changed=True
            else: 
                result = caasAPI(module,caas_credentials, uri, data)
                has_changed = True

#PRESENT
    if state == "present":
        if firewallList['totalCount'] < 1:
            uri = '/caas/2.3/'+orgId+'/network/createFirewallRule'
            _data = {}
            _data['name'] = module.params['name']
            _data['action'] = module.params['action']
            _data['ipVersion'] = module.params['ipVersion']
            _data['protocol'] = module.params['protocol']
            _data['networkDomainId'] = module.params['networkDomainId']
            _data['source'] = module.params['source']
            _data['destination'] = module.params['destination']
            _data['enabled'] = module.params['enabled']
            _data['placement'] = module.params['placement']
            data = json.dumps(_data)
            if module.check_mode: has_changed=True
            else: 
                result = caasAPI(module,caas_credentials, uri, data)
                has_changed = True
        if firewallList['totalCount'] == 1:
            if firewallList['firewallRule'][0]['enabled'] != module.params['enabled']: 
                uri = '/caas/2.3/'+orgId+'/network/editFirewallRule'
                _data = {}
                _data['id'] = firewallList['firewallRule'][0]['id']
                _data['enabled'] = module.params['enabled']
                data = json.dumps(_data)
                if module.check_mode: has_changed=True
                else: 
                    result = caasAPI(module,caas_credentials, uri, data)
                    has_changed = True

    firewallRuleList = _listFirewallRule(module,caas_credentials,orgId,wait)
    module.exit_json(changed=has_changed, firewallRules=firewallRuleList)
  def test_received_counts(self):
    # Set up data
    for date in iter_dates(
      datetime.date(2017, 12, 30),
      datetime.date(2018, 2, 1)
    ):
      calendar_day = Calendar(day=date)
      CalendarDao().insert(calendar_day)

    # noinspection PyArgumentList
    participant_1 = Participant(participantId=1, biobankId=4)
    summary_1 = self._make_participant(
      participant_1, 'Alice', 'Aardvark', self.hpo_foo, self.org_foo_a,
      time_int=datetime.datetime(2018, 1, 2),
      time_mem=datetime.datetime(2018, 1, 3),
      time_study=datetime.datetime(2018, 1, 3),
      time_fp=datetime.datetime(2018, 1, 4)
    )

    self._update_ehr(summary_1, update_time=datetime.datetime(2018, 1, 5))
    response = self.send_get('MetricsEHR')
    self.assertEqual(response['metrics']['EHR_RECEIVED'], 1)
    self.assertEqual(
      response['metrics']['EHR_RECEIVED'],
      sum([o['total_ehr_data_received'] for o in response['organization_metrics'].values()])
    )

    # noinspection PyArgumentList
    participant_2 = Participant(participantId=2, biobankId=5)
    summary_2 = self._make_participant(
      participant_2, 'Bo', 'Badger', self.hpo_bar, self.org_bar_a,
      time_int=datetime.datetime(2018, 1, 3),
      time_mem=datetime.datetime(2018, 1, 4),
      time_study=datetime.datetime(2018, 1, 4),
      time_fp=datetime.datetime(2018, 1, 5)
    )

    self._update_ehr(summary_2, update_time=datetime.datetime(2018, 1, 6))
    response = self.send_get('MetricsEHR')
    self.assertEqual(response['metrics']['EHR_RECEIVED'], 2)
    self.assertEqual(
      response['metrics']['EHR_RECEIVED'],
      sum([o['total_ehr_data_received'] for o in response['organization_metrics'].values()])
    )

    # participant with EHR but no consent
    participant_3 = Participant(participantId=3, biobankId=6)
    summary_3 = self._make_participant(
      participant_3, 'Bo', 'Badger', self.hpo_bar, self.org_bar_a,
      time_int=datetime.datetime(2018, 1, 3),
      time_mem=datetime.datetime(2018, 1, 4)
    )
    self._update_ehr(summary_3, update_time=datetime.datetime(2018, 1, 6))

    response = self.send_get('MetricsEHR')
    self.assertEqual(response['metrics']['EHR_RECEIVED'], 2)
    self.assertEqual(
      response['metrics']['EHR_RECEIVED'],
      sum([o['total_ehr_data_received'] for o in response['organization_metrics'].values()])
    )

    # test with organization filtering
    response = self.send_get('MetricsEHR', query_string=urllib.urlencode({
      'organization': 'FOO_A',
    }))
    self.assertEqual(response['metrics']['EHR_RECEIVED'], 1)
    self.assertEqual(
      response['metrics']['EHR_RECEIVED'],
      sum([o['total_ehr_data_received'] for o in response['organization_metrics'].values()])
    )
Beispiel #48
0
    def main(self, env):

        if (env == 'urlresolver'):
            addon('script.module.urlresolver').openSettings()
            return

        elif (env == 'metahandler'):
            addon('script.module.metahandler').openSettings()
            return

        elif (env == 'changelog_old'):
            try:
                sUrl = 'https://raw.githubusercontent.com/zombiB/zombi-addons/master/plugin.video.matrix/changelog.txt'
                oRequest =  urllib2.Request(sUrl)
                oResponse = urllib2.urlopen(oRequest)
                sContent = oResponse.read()
                self.TextBoxes('matrix Changelog', sContent)
            except:
                self.DIALOG.VSerror("%s, %s" % (self.ADDON.VSlang(30205), sUrl))
            return

        elif (env == 'changelog'):

            class XMLDialog(xbmcgui.WindowXMLDialog):

                def __init__(self, *args, **kwargs):
                    xbmcgui.WindowXMLDialog.__init__(self)
                    pass

                def onInit(self):

                    self.container = self.getControl(6)
                    self.button = self.getControl(5)
                    self.getControl(3).setVisible(False)
                    self.getControl(1).setLabel('ChangeLog')
                    self.button.setLabel('OK')

                    sUrl = 'https://api.github.com/repos/zombiB/zombi-addons/commits'
                    oRequest =  urllib2.Request(sUrl)
                    oResponse = urllib2.urlopen(oRequest)
                    sContent = oResponse.read()
                    result = json.loads(sContent)
                    listitems = []

                    for item in result:
                        #autor
                        icon = item['author']['avatar_url']
                        login = item['author']['login']
                        #message
                        try:
                            desc = item['commit']['message'].encode("utf-8")
                        except:
                            desc = 'None'

                        listitem = xbmcgui.ListItem(label = login, label2 = desc)
                        listitem.setArt({'icon': icon, 'thumb': icon})

                        listitems.append(listitem)
                    self.container.addItems(listitems)


                    self.setFocus(self.container)

                def onClick(self, controlId):
                    self.close()
                    return

                def onFocus(self, controlId):
                    self.controlId = controlId

                def _close_dialog( self ):
                    self.close()

            #path = cConfig().getAddonPath()
            path = "special://home/addons/plugin.video.matrix"
            wd = XMLDialog('DialogSelect.xml', path, "Default")
            wd.doModal()
            del wd
            return

        elif (env == 'soutient'):
            try:
                sUrl = 'https://raw.githubusercontent.com/zombiB/zombi-addons/master/plugin.video.matrix/soutient.txt'
                oRequest =  urllib2.Request(sUrl)
                oResponse = urllib2.urlopen(oRequest)
                sContent = oResponse.read()
                self.TextBoxes('matrix Soutient', sContent)
            except:
                self.DIALOG.VSerror("%s, %s" % (self.ADDON.VSlang(30205), sUrl))
            return

        elif (env == 'addon'):
            if self.DIALOG.VSyesno(self.ADDON.VSlang(30456)):
                #cached_Cache = cConfig().getFileCache()
                #cached_Cache = xbmc.translatePath(cached_Cache).decode("utf-8")
                cached_Cache = "special://home/userdata/addon_data/plugin.video.matrix/video_cache.db"
                #self.ClearDir2(cached_Cache, True)
                try:
                    xbmcvfs.delete(cached_Cache)
                    self.DIALOG.VSinfo('Clear Addon Cache, Successful[CR](Important relancer matrix)')
                except:
                    self.DIALOG.VSerror('Clear Addon Cache, Error')

            return

        elif (env == 'clean'):
            liste = ['Historiques', 'Lecture en cours', 'Marqués vues', 'Marque-Pages', 'Téléchargements']
            ret = self.DIALOG.select('BDD à supprimer', liste)
            #cached_DB = cConfig().getFileDB()
            cached_DB = "special://home/userdata/addon_data/plugin.video.matrix/matrix.db"
            #important seul xbmcvfs peux lire le special
            cached_DB = xbmc.translatePath(cached_DB).decode("utf-8")

            sql_drop = ""

            if ret > -1:

                if ret == 0:
                    sql_drop = "DROP TABLE history"
                elif ret == 1:
                    sql_drop = "DROP TABLE resume"
                elif ret == 2:
                    sql_drop = "DROP TABLE watched"
                elif ret == 3:
                    sql_drop = "DROP TABLE favorite"
                elif ret == 4:
                    sql_drop = "DROP TABLE download"

                try:
                    db = sqlite.connect(cached_DB)
                    dbcur = db.cursor()
                    dbcur.execute(sql_drop)
                    db.commit()
                    dbcur.close()
                    db.close()
                    self.DIALOG.VSok("Suppression BDD, Successful[CR](Important relancer matrix)")
                except:
                    self.DIALOG.VSerror("Suppression BDD, Error")

            return

        elif (env == 'xbmc'):
            if self.DIALOG.VSyesno(self.ADDON.VSlang(30456)):
                #temp = xbmc.translatePath('special://temp/').decode("utf-8")
                path = "special://temp/"
                #self.ClearDir(temp,True)
                try:
                    xbmcvfs.rmdir(path, True)
                    self.DIALOG.VSok('Clear Temp Cache, Successful[CR](Important relancer Kodi)')
                except:
                    self.DIALOG.VSerror('Clear Temp Cache, Error')
            return

        elif (env == 'fi'):
            if self.DIALOG.VSyesno(self.ADDON.VSlang(30456)):
                #path = xbmc.translatePath('special://temp/').decode("utf-8")
                path = "special://temp/archive_cache/"
                try:
                    xbmcvfs.rmdir(path, True)
                    self.DIALOG.VSok('Clear Archive_cache Cache, Successful[CR](Important relancer Kodi)')
                except:
                    self.DIALOG.VSerror('Clear Archive_cache Cache, Error')
                # filenames = next(os.walk(path))[2]
                # for i in filenames:
                #     if ".fi" in i:
                #         os.remove(os.path.join(path, i))
            return

        elif (env == 'uplog'):
            if self.DIALOG.VSyesno(self.ADDON.VSlang(30456)):
                #path = xbmc.translatePath('special://logpath/').decode("utf-8")
                path = "special://logpath/kodi.log"
                UA = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:50.0) Gecko/20100101 Firefox/50.0'
                headers = {'User-Agent': UA}
                #filenames = next(os.walk(path))[2]
                #for i in filenames:
                if xbmcvfs.exists(path):
                    post_data = {}
                    cUrl = 'http://slexy.org/index.php/submit'
                    #logop = open(path + i, 'rb')
                    logop = xbmcvfs.File(path, 'rb')
                    result = logop.read()
                    logop.close()
                    post_data['raw_paste'] = result
                    post_data['author'] = 'kodi.log'
                    post_data['language'] = 'text'
                    post_data['permissions'] = 1 #private
                    post_data['expire'] = 259200 #3j
                    post_data['submit'] = 'Submit+Paste'
                    request = urllib2.Request(cUrl, urllib.urlencode(post_data), headers)
                    reponse = urllib2.urlopen(request)
                    code = reponse.geturl().replace('http://slexy.org/view/', '')
                    reponse.close()
                    self.ADDON.setSetting('service_log', code)
                    self.DIALOG.VSok('Ce code doit être transmis lorsque vous ouvrez une issue veuillez le noter:' + '  ' + code)
            return

        elif (env == 'search'):

            from resources.lib.handler.pluginHandler import cPluginHandler
            valid = '[COLOR green][x][/COLOR]'

            class XMLDialog(xbmcgui.WindowXMLDialog):

                ADDON = addon()

                def __init__(self, *args, **kwargs):
                    xbmcgui.WindowXMLDialog.__init__( self )
                    pass

                def onInit(self):

                    self.container = self.getControl(6)
                    self.button = self.getControl(5)
                    self.getControl(3).setVisible(False)
                    self.getControl(1).setLabel(self.ADDON.VSlang(30094))
                    self.button.setLabel('OK')
                    listitems = []
                    oPluginHandler = cPluginHandler()
                    aPlugins = oPluginHandler.getAllPlugins()

                    for aPlugin in aPlugins:
                        #teste si deja dans le dsip
                        sPluginSettingsName = 'plugin_' + aPlugin[1]
                        bPlugin = self.ADDON.getSetting(sPluginSettingsName)

                        #icon = os.path.join(unicode(cConfig().getRootArt(), 'utf-8'), 'sites', aPlugin[1] + '.png')
                        icon = "special://home/addons/plugin.video.matrix/resources/art/sites/%s.png" % aPlugin[1]
                        stitle = aPlugin[0].replace('[COLOR violet]', '').replace('[COLOR orange]', '').replace('[/COLOR]', '').replace('[COLOR dodgerblue]', '').replace('[COLOR coral]', '')
                        if (bPlugin == 'true'):
                            stitle = ('%s %s') % (stitle, valid)
                        listitem = xbmcgui.ListItem(label = stitle, label2 = aPlugin[2])
                        listitem.setArt({'icon': icon, 'thumb': icon})
                        listitem.setProperty('Addon.Summary', aPlugin[2])
                        listitem.setProperty('sitename', aPlugin[1])
                        if (bPlugin == 'true'):
                            listitem.select(True)

                        listitems.append(listitem)
                    self.container.addItems(listitems)


                    self.setFocus(self.container)

                def onClick(self, controlId):
                    if controlId == 5:
                        self.close()
                        return
                    elif controlId == 99:
                        window = xbmcgui.Window(xbmcgui.getCurrentWindowId())
                        del window
                        self.close()
                        return
                    elif controlId == 7:
                        window = xbmcgui.Window(xbmcgui.getCurrentWindowId())
                        del window
                        self.close()
                        return
                    elif controlId == 6:
                        item = self.container.getSelectedItem()
                        if item.isSelected() == True:
                            label = item.getLabel().replace(valid, '')
                            item.setLabel(label)
                            item.select(False)
                            sPluginSettingsName = ('plugin_%s') % (item.getProperty('sitename'))
                            self.ADDON.setSetting(sPluginSettingsName, str('false'))
                        else :
                            label = ('%s %s') % (item.getLabel(), valid)
                            item.setLabel(label)
                            item.select(True)
                            sPluginSettingsName = ('plugin_%s') % (item.getProperty('sitename'))
                            self.ADDON.setSetting(sPluginSettingsName, str('true'))
                        return

                def onFocus(self, controlId):
                    self.controlId = controlId

                def _close_dialog( self ):
                    self.close()

                # def onAction( self, action ):
                    # if action.getId() in ( 9, 10, 92, 216, 247, 257, 275, 61467, 61448, ):
                        # self.close()

            #path = cConfig().getAddonPath()
            path = "special://home/addons/plugin.video.matrix"
            wd = XMLDialog('DialogSelect.xml', path, "Default")
            wd.doModal()
            del wd
            return

        elif (env == 'thumb'):

            if self.DIALOG.VSyesno('Êtes-vous sûr? Ceci effacera toutes les thumbnails'):

                text = False
                #path = xbmc.translatePath('special://userdata/Thumbnails/').decode("utf-8")
                path = "special://userdata/Thumbnails/"
                path_DB = "special://userdata/Database"
                try:
                    xbmcvfs.rmdir(path, True)
                    text = 'Clear Thumbnail Folder, Successful[CR]'
                except:
                    text = 'Clear Thumbnail Folder, Error[CR]'
                #for i in os.listdir(path):
                    # folders = os.path.join(path, i).encode('utf-8')
                    # if os.path.isdir(folders):
                    #     p = next(os.walk(folders))[2]
                    #     for x in p:
                    #         os.remove(os.path.join(folders, x).encode('utf-8'))

                #filenames = next(os.walk(path2))[2]
                folder, items = xbmcvfs.listdir(path_DB)
                items.sort()
                for sItemName in items:
                    if "extures" in sItemName:
                            cached_Cache = "/".join([path_DB, sItemName])
                            try:
                                xbmcvfs.delete(cached_Cache)
                                text += 'Clear Thumbnail DB, Successful[CR]'
                            except:
                                text += 'Clear Thumbnail DB, Error[CR]'

                if text:
                    text = "%s (Important relancer Kodi)" % text
                    self.DIALOG.VSok(text)
                # for x in filenames:
                #     if "exture" in x:
                #         con = sqlite.connect(os.path.join(path2, x).encode('utf-8'))
                #         cursor = con.cursor()
                #         cursor.execute("DELETE FROM texture")
                #         con.commit()
                #         cursor.close()
                #         con.close()
            return

        elif (env == 'sauv'):
            #dialog.select('Choose a playlist', ['Playlist #1', 'Playlist #2, 'Playlist #3'])
            select = self.DIALOG.VSselect(['Import', 'Export'])
            DB = "special://home/userdata/addon_data/plugin.video.matrix/matrix.db"
            if select >= 0:
                #new = self.DIALOG.browse(3, 'matrix', 'files')
                new = self.DIALOG.browse(3, 'matrix', "files")
                if new:
                    try:
                        if select == 0:
                            xbmcvfs.delete(DB)
                            #copy(source, destination)--copy file to destination, returns true/false.
                            xbmcvfs.copy(new + 'matrix.db', DB)
                        elif select == 1:
                            #copy(source, destination)--copy file to destination, returns true/false.
                            xbmcvfs.copy(DB, new + 'matrix.db')
                        self.DIALOG.VSinfo('Import/Export DB, Successful')
                    except:
                        self.DIALOG.VSerror('Import/Export DB, Error')

                return

        else:
            return

        return
Beispiel #49
0
 def get_success_url(self):
     return reverse('orgs.org_signup') + "?%s" % urlencode({'email': self.form.cleaned_data['email']})
  def test_cutoff_date_filtering(self):
    # noinspection PyArgumentList
    participant_1 = Participant(participantId=1, biobankId=4)
    summary_1 = self._make_participant(
      participant_1, 'A', 'Aardvark', self.hpo_foo, self.org_foo_a,
      time_int=datetime.datetime(2018, 1, 1),
      time_study=datetime.datetime(2018, 1, 2),
      time_mem=datetime.datetime(2018, 1, 3),
      time_fp=datetime.datetime(2018, 1, 4)
    )

    # noinspection PyArgumentList
    participant_2 = Participant(participantId=2, biobankId=5)
    summary_2 = self._make_participant(
      participant_2, 'B', 'Badger', self.hpo_foo, self.org_foo_a,
      time_int=datetime.datetime(2018, 1, 2),
      time_study=datetime.datetime(2018, 1, 3),
      time_mem=datetime.datetime(2018, 1, 4),
      time_fp=datetime.datetime(2018, 1, 5)
    )

    # noinspection PyArgumentList
    participant_3 = Participant(participantId=3, biobankId=6)
    summary_3 = self._make_participant(
      participant_3, 'C', 'Chicken', self.hpo_bar, self.org_bar_a,
      time_int=datetime.datetime(2018, 1, 2),
      time_study=datetime.datetime(2018, 1, 3),
      time_mem=datetime.datetime(2018, 1, 4),
      time_fp=datetime.datetime(2018, 1, 5)
    )

    for summary in [summary_1]:
      self._update_ehr(summary, update_time=datetime.datetime(2018, 1, 5))

    for summary in [summary_2, summary_3]:
      self._update_ehr(summary, update_time=datetime.datetime(2018, 1, 6))

    # Begin testing
    response = self.send_get('MetricsEHR/Organizations', query_string=urllib.urlencode({
      'end_date': '2018-01-01',
      'interval': 'day'
    }))
    self.assertEqual(
      response[str(self.org_foo_a.externalId)],
      {
        u'organization_id': self.org_foo_a.externalId,
        u'organization_name': unicode(self.org_foo_a.displayName),
        u'total_participants': 1,
        u'total_primary_consented': 0,
        u'total_ehr_consented': 0,
        u'total_core_participants': 0,
        u'total_ehr_data_received': 0,
        u'last_ehr_submission_date': u'2018-01-06',
      }
    )
    self.assertEqual(
      response[str(self.org_bar_a.externalId)],
      {
        u'organization_id': self.org_bar_a.externalId,
        u'organization_name': unicode(self.org_bar_a.displayName),
        u'total_participants': 0,
        u'total_primary_consented': 0,
        u'total_ehr_consented': 0,
        u'total_core_participants': 0,
        u'total_ehr_data_received': 0,
        u'last_ehr_submission_date': u'2018-01-06',
      }
    )

    response = self.send_get('MetricsEHR/Organizations', query_string=urllib.urlencode({
      'end_date': '2018-01-02',
      'interval': 'day'
    }))
    self.assertEqual(
      response[str(self.org_foo_a.externalId)],
      {
        u'organization_id': self.org_foo_a.externalId,
        u'organization_name': unicode(self.org_foo_a.displayName),
        u'total_participants': 2,
        u'total_primary_consented': 1,
        u'total_ehr_consented': 0,
        u'total_core_participants': 0,
        u'total_ehr_data_received': 0,
        u'last_ehr_submission_date': u'2018-01-06',
      }
    )

    response = self.send_get('MetricsEHR/Organizations', query_string=urllib.urlencode({
      'end_date': '2018-01-03',
      'interval': 'day'
    }))
    self.assertEqual(
      response[str(self.org_foo_a.externalId)],
      {
        u'organization_id': self.org_foo_a.externalId,
        u'organization_name': unicode(self.org_foo_a.displayName),
        u'total_participants': 2,
        u'total_primary_consented': 2,
        u'total_ehr_consented': 1,
        u'total_core_participants': 0,
        u'total_ehr_data_received': 0,
        u'last_ehr_submission_date': u'2018-01-06',
      }
    )

    response = self.send_get('MetricsEHR/Organizations', query_string=urllib.urlencode({
      'end_date': '2018-01-04',
      'interval': 'day'
    }))
    self.assertEqual(
      response[str(self.org_foo_a.externalId)],
      {
        u'organization_id': self.org_foo_a.externalId,
        u'organization_name': unicode(self.org_foo_a.displayName),
        u'total_participants': 2,
        u'total_primary_consented': 2,
        u'total_ehr_consented': 2,
        u'total_core_participants': 1,
        u'total_ehr_data_received': 0,
        u'last_ehr_submission_date': u'2018-01-06',
      }
    )

    response = self.send_get('MetricsEHR/Organizations', query_string=urllib.urlencode({
      'end_date': '2018-01-05',
      'interval': 'day'
    }))
    self.assertEqual(
      response[str(self.org_foo_a.externalId)],
      {
        u'organization_id': self.org_foo_a.externalId,
        u'organization_name': unicode(self.org_foo_a.displayName),
        u'total_participants': 2,
        u'total_primary_consented': 2,
        u'total_ehr_consented': 2,
        u'total_core_participants': 2,
        u'total_ehr_data_received': 1,
        u'last_ehr_submission_date': u'2018-01-06',
      }
    )

    response = self.send_get('MetricsEHR/Organizations', query_string=urllib.urlencode({
      'end_date': '2018-01-06',
      'interval': 'day'
    }))
    self.assertEqual(
      response[str(self.org_foo_a.externalId)],
      {
        u'organization_id': self.org_foo_a.externalId,
        u'organization_name': unicode(self.org_foo_a.displayName),
        u'total_participants': 2,
        u'total_primary_consented': 2,
        u'total_ehr_consented': 2,
        u'total_core_participants': 2,
        u'total_ehr_data_received': 2,
        u'last_ehr_submission_date': u'2018-01-06',
      }
    )
Beispiel #51
0
def external_login_confirm_email_get(auth, uid, token):
    """
    View for email confirmation links when user first login through external identity provider.
    HTTP Method: GET

    When users click the confirm link, they are expected not to be logged in. If not, they will be logged out first and
    redirected back to this view. After OSF verifies the link and performs all actions, they will be automatically
    logged in through CAS and redirected back to this view again being authenticated.

    :param auth: the auth context
    :param uid: the user's primary key
    :param token: the verification token
    """

    user = User.load(uid)
    if not user:
        raise HTTPError(http.BAD_REQUEST)

    destination = request.args.get('destination')
    if not destination:
        raise HTTPError(http.BAD_REQUEST)

    # if user is already logged in
    if auth and auth.user:
        # if it is a wrong user
        if auth.user._id != user._id:
            return auth_logout(redirect_url=request.url)
        # if it is the expected user
        new = request.args.get('new', None)
        if destination in campaigns.get_campaigns():
            return redirect(campaigns.campaign_url_for(destination))
        if new:
            status.push_status_message(language.WELCOME_MESSAGE,
                                       kind='default',
                                       jumbotron=True,
                                       trust=True)
        return redirect(web_url_for('dashboard'))

    # token is invalid
    if token not in user.email_verifications:
        raise HTTPError(http.BAD_REQUEST)
    verification = user.email_verifications[token]
    email = verification['email']
    provider = verification['external_identity'].keys()[0]
    provider_id = verification['external_identity'][provider].keys()[0]
    # wrong provider
    if provider not in user.external_identity:
        raise HTTPError(http.BAD_REQUEST)
    external_status = user.external_identity[provider][provider_id]

    try:
        ensure_external_identity_uniqueness(provider, provider_id, user)
    except ValidationError as e:
        raise HTTPError(http.FORBIDDEN, e.message)

    if not user.is_registered:
        user.register(email)

    if email.lower() not in user.emails:
        user.emails.append(email.lower())

    user.date_last_logged_in = timezone.now()
    user.external_identity[provider][provider_id] = 'VERIFIED'
    user.social[provider.lower()] = provider_id
    del user.email_verifications[token]
    user.verification_key = generate_verification_key()
    user.save()

    service_url = request.url

    if external_status == 'CREATE':
        mails.send_mail(to_addr=user.username,
                        mail=mails.WELCOME,
                        mimetype='html',
                        user=user)
        service_url += '&{}'.format(urllib.urlencode({'new': 'true'}))
    elif external_status == 'LINK':
        mails.send_mail(
            user=user,
            to_addr=user.username,
            mail=mails.EXTERNAL_LOGIN_LINK_SUCCESS,
            external_id_provider=provider,
        )

    # redirect to CAS and authenticate the user with the verification key
    return redirect(
        cas.get_login_url(service_url,
                          username=user.username,
                          verification_key=user.verification_key))
                    header = {'Content-type': 'Content-Type: application/json'}
                    r = requests.put(final_url, verify=x509main.CERT_FILE, cert=(x509main.CLIENT_CERT_PEM, x509main.CLIENT_CERT_KEY), data=data, headers=header)
                elif verb == 'DELETE':
                    header = {'Content-type': 'Content-Type: application/json'}
                    r = requests.delete(final_url, verify=x509main.CERT_FILE, cert=(x509main.CLIENT_CERT_PEM, x509main.CLIENT_CERT_KEY), headers=header)
                return r.status_code, r.text
            except Exception, ex:
                log.info ("into exception form validate_ssl_login with client cert")
                log.info (" Exception is {0}".format(ex))
                return 'error','error'
        else:
            try:
                r = requests.get("https://" + str(self.host.ip) + ":18091", verify=x509main.CERT_FILE)
                if r.status_code == 200:
                    header = {'Content-type': 'application/x-www-form-urlencoded'}
                    params = urllib.urlencode({'user':'******'.format(username), 'password':'******'.format(password)})               
                    r = requests.post("https://" + str(self.host.ip) + ":18091/uilogin", data=params, headers=header, verify=x509main.CERT_FILE)
                    return r.status_code
            except Exception, ex:
                log.info ("into exception form validate_ssl_login")
                log.info (" Exception is {0}".format(ex))
                return 'error','error'

    '''
    Call in curl requests to execute rest api's
    1. check for the verb that is GET or post or delete and decide which header to use
    2. check if the request is a simple curl to execute a rest api, no cert and no client auth
    3. Check if client cert is going to be used, in that case pass in the root cert, client key and cert
    4. Check the request is not for client cert, then pass in just the root cert
    5. Form the url, add url, header and ulr
    6. Add any data is there
def testRewordList(srwid):
    theMax = 0
    page = 1
    thecount = 1
    rewordId = 0
    pageIndex = 1
    while True:
        params = urllib.urlencode({
            'token': 'u~p36u0UjaIvJwxYJpp1wkGdbdeX7LbL',
            'filter': 'all',
            'version': '6.6.0.1',
            'srwid': srwid,
            'pageIndex': pageIndex,
            'pageSize': 10
        })
        dat = urllib.urlopen(
            "http://api.miaopai.com/m/getRewardVideos.json?%s" % params)
        jdat = json.loads(dat.read())
        videos = len(jdat['result']['list'])
        if (videos == 0):
            break
        else:
            #print "continue"
            it = jdat['result']['listSize']
            for i in range(0, videos):
                theTime = jdat['result']['list'][i]['channel']['ext']['length']
                #print theTime
                if theTime > theMax:
                    theMax = theTime
                if theTime >= 300:
                    print "第 : " + str(pageIndex) + "页"
                    print "时长 : " + str(theTime) + " 秒"
                    print "url : " + jdat['result']['list'][i]['channel'][
                        'stream']['base'] + ".mp4"
                    theTime = jdat['result']['list'][i]['channel']['ext'][
                        'finishTime']
                    theDate = convertToDate(theTime)
                    print "time : " + theDate
                    print "userName : "******"t : " + jdat['result']['list'][i]['channel'][
                            'ext']['t']
                    except:
                        print "t字段有特殊字符"
                    try:
                        print "ft : " + jdat['result']['list'][i]['channel'][
                            'ext']['ft']
                    except:
                        print "ft字段有特殊字符"
                    print "pic : " + jdat['result']['list'][i]['channel'][
                        'pic']['base'] + ".jpg"
                    print "***********************************************"
                    print ""
                else:
                    print str(thecount
                              ) + "  --  this video is less than 300 seconds !"
                thecount = thecount + 1
            pageIndex = pageIndex + 1
        sleep(2)
    print ""
    print ""
    print ""
    print "the max is : "
    print theMax
#Enter location: South Federal University 
#Retrieving http://...
#Retrieved 2101 characters
#Place id ChIJJ8oO7_B_bIcR2AlhC8nKlok 

#I do not guarantee this solution will work for the actual assignment. The assignment varies from time to time.

import urllib
import json
serviceurl = "http://maps.googleapis.com/maps/api/geocode/json?"

while True:
	address = raw_input("Enter location:")
	if len(address) <1: break
	
	url = serviceurl+ urllib.urlencode ({"sensor": "false,", "address":address})
	print "retrieving", url
	uh = urllib.urlopen(url)
	data = uh.read()
	print "retrieved", len(data), "characters"
	try: js=json.loads(str(data))
	except: js= None
	if "status" not in js or js['status'] != "OK":
		print "=== failure to retrieve ==="
		print data
		continue
		
	print json.dumps(js, indent = 4)
	
	lat = js["results"] [0] ["geometry"] ["location"]["lat"]
	lng = js["results"] [0] ["geometry"] ["location"]["lng"]
Beispiel #55
0
  def _request(self, request, auth_required=True):
    """
    Make an HTTP(S) request to an API endpoint based on what's specified in the 
    request object passed

    ## Input

    Required request keys:
      api
        Either REST or SOAP

      call
        Name of the SOAP method or relative path of the REST URL 

    Optional keys:
      query
        Contents of the query string passed as a dict

      data
        Data to post. For SOAP API calls this will be the SOAP envelope. For
        REST API calls this will be a dict converted to JSON automatically 
        by this method

      use_cookie_auth
        Whether or not to use an HTTP Cookie in lieu of a querystring for authorization

    ## Output

    Returns a dict:
      status
        Number HTTP status code returned by the response, if any

      raw
        The raw contents of the response, if any

      data
        A python dict representing the data contained in the response, if any
    """
    for required_key in [
      'api',
      'call'
      ]:
      if not request.has_key(required_key) and request[required_key]:
        self.log("All requests are required to have a key [{}] with a value".format(required_key), level='critical')
        return None

    url = None
    if request['api'] == self.API_TYPE_REST:
      url = "{}/{}".format(self._rest_api_endpoint, request['call'].lstrip('/'))
    else:
      url = self._soap_api_endpoint

    self.log("Making a request to {}".format(url), level='debug')

    # add the authentication parameters
    if auth_required:
      if request['api'] == self.API_TYPE_REST:
        if not request['use_cookie_auth']: # sID is a query string
          if not request['query']: request['query'] = {}
          request['query']['sID'] = self._sessions[self.API_TYPE_REST]
      elif request['api'] == self.API_TYPE_SOAP:
        # sID is part of the data
        if not request['data']: request['data'] = {}
        request['data']['sID'] = self._sessions[self.API_TYPE_SOAP]

    # remove any blank request keys
    for k, v in request.items():
      if not v: request[k] = None

    # prep the query string
    if request.has_key('query') and request['query']:
      # get with query string
      qs = {}
      for k, v in request['query'].items(): # strip out null entries
        if v: qs[k] = v

      url += '?%s' % urllib.urlencode(qs)
      self.log("Added query string. Full URL is now {}".format(url), level='debug')

    self.log("URL to request is: {}".format(url))

    # Prep the SSL context
    ssl_context = ssl.create_default_context()
    if self.ignore_ssl_validation:
      ssl_context.check_hostname = False
      ssl_context.verify_mode = ssl.CERT_NONE
      self.log("SSL certificate validation has been disabled for this call", level='warning')

    # Prep the URL opener
    url_opener = urllib2.build_opener(urllib2.HTTPSHandler(context=ssl_context))
  
    # Prep the request
    request_type = 'GET'
    headers = {
      'Accept': 'application/json,text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*',
      'Content-Type': 'application/json',
      }

    # authentication calls don't accept the Accept header
    if request['call'].startswith('authentication'): del(headers['Accept'])
    
    # some rest calls use a cookie to pass the sID
    if request['api'] == self.API_TYPE_REST and request['use_cookie_auth']:
      headers['Cookie'] = 'sID="{}"'.format(self._sessions[self.API_TYPE_REST])

    if request['api'] == self.API_TYPE_REST and request['call'] in [
      'apiVersion',
      'status/manager/ping'
      ]:
      headers = {
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*',
        'Content-Type': 'text/plain',
        }

    if request['api'] == self.API_TYPE_SOAP:
      # always a POST
      headers = {
        'SOAPAction': '',
        'content-type': 'application/soap+xml'
        }
      data = self._prep_data_for_soap(request['call'], request['data'])
      url_request = urllib2.Request(url, data=data, headers=headers)
      request_type = 'POST'
      self.log("Making a SOAP request with headers {}".format(headers), level='debug')
      self.log("   and data {}".format(data), level='debug')
    elif request['call'] == 'authentication/logout':
      url_request = urllib2.Request(url, headers=headers)
      setattr(url_request, 'get_method', lambda: 'DELETE') # make this request use the DELETE HTTP verb
      request_type = 'DELETE'
      self.log("Making a REST DELETE request with headers {}".format(headers), level='debug')
    elif request.has_key('data') and request['data']:
      # POST
      url_request = urllib2.Request(url, data=json.dumps(request['data']), headers=headers)
      request_type = 'POST'
      self.log("Making a REST POST request with headers {}".format(headers), level='debug')
      self.log("    and data {}".format(request['data']), level='debug')
    else:
      # GET
      url_request = urllib2.Request(url, headers=headers)
      self.log("Making a REST GET request with headers {}".format(headers), level='debug')

    # Make the request
    response = None
    try:
      response = url_opener.open(url_request)
    except Exception, url_err:
      self.log("Failed to make {} {} call [{}]".format(request['api'].upper(), request_type, request['call'].lstrip('/')), err=url_err)
def testMyPage(suid):
    theMax = 0
    timeflag = 0
    page = 1
    thecount = 1
    while (int(timeflag) != -1):
        #print timeflag
        #params = urllib.urlencode({'token': 'u~p36u0UjaIvJwxYJpp1wkGdbdeX7LbL','suid':'Z1fV~4uV6WRqfs3xndogdA__','version': '6.6.0.1','timeflag':timeflag,'per':20})
        params = urllib.urlencode({
            'token': 'u~p36u0UjaIvJwxYJpp1wkGdbdeX7LbL',
            'suid': suid,
            'version': '6.6.0.1',
            'timeflag': timeflag,
            'per': 20
        })
        dat = urllib.urlopen(
            "http://api.miaopai.com/m/channel_forward_reward.json?%s" % params)
        jdat = json.loads(dat.read())
        timeflagOld = timeflag
        timeflag = jdat['result']['timeflag']
        it = jdat['result']['stream']['cnt']

        it = int(jdat['result']['stream']['cnt'])
        for i in range(0, it):
            judge = jdat['result']['stream']['list'][i].has_key('channel')
            #print judge
            if not judge:
                pass
            else:
                theTime = jdat['result']['stream']['list'][i]['channel'][
                    'ext']['length']
                #print theTime
                if theTime > theMax:
                    theMax = theTime
                if theTime >= 300:
                    print getUserName(suid) + "第 " + str(page) + "页"
                    print 'timeFlag : ' + str(timeflagOld)
                    #print 'timeFlag : ' + str(jdat['result']['timeflag'])
                    print "时长 : " + str(theTime) + " 秒"
                    print "url : " + jdat['result']['stream']['list'][i][
                        'channel']['stream']['base'] + ".mp4"
                    print "time : " + jdat['result']['stream']['list'][i][
                        'channel']['ext']['finishTimeNice']
                    print "t : " + jdat['result']['stream']['list'][i][
                        'channel']['ext']['t']
                    print "ft : " + jdat['result']['stream']['list'][i][
                        'channel']['ext']['ft']
                    print "pic : " + jdat['result']['stream']['list'][i][
                        'channel']['pic']['base'] + ".jpg"
                    print "***********************************************"
                    print ""
                else:
                    print str(thecount
                              ) + "  --  this video is less than 300 seconds !"
                thecount = thecount + 1
        page = page + 1
        sleep(2)
    print ""
    print ""
    print ""
    print "the max is : "
    print theMax
Beispiel #57
0
#return submitdata str
#([(1,2),(2,4),(3,1)]) => '1$2}2$4}3$1'
def gen_post_string(answer):
    def concat_pair(pair):
        return '$'.join([str(pair[0]),str(pair[1])])
    
    tmp_list = []
    for x in answer:
        tmp_list.append(concat_pair(x))
    return '}'.join(tmp_list)

jq_base = "http://www.sojump.com/jq/{}.aspx"
uri_base = "http://www.sojump.com/handler/processjq.ashx?{}"

#answer is in this form:[(1,2),(2,1),(3,5)...],these answer is generated randomly
#answer_list = [1,2,3,4,1,2,3,4,1,2]
#answer = zip(range(1,11),answer_list)
answer = zip(range(1,11),[random.randint(1,4) for x in range(11)])

#post传参与get传参的区别(开头的链接)
post_data = urllib.urlencode({'submitdata':gen_post_string(answer)})
get_data = urllib.urlencode(gen_uri_param())

request_url = uri_base.format(get_data)
request = urllib2.Request(request_url,post_data)
result = urllib2.urlopen(request)
print result.read()

# <codecell>
Beispiel #58
0
#!/usr/bin/env python
import urllib
import json as m_json

query = raw_input ( 'Query: ' )
query = urllib.urlencode ( { 'q' : query } )
# rz=8 is the max (1-8) that I can return with this API: https://developers.google.com/image-search/v1/jsondevguide?csw=1#basic_query

for i = 1; i < 100; i+8:
	response = urllib.urlopen ( 'http://ajax.googleapis.com/ajax/services/search/web?v=1.0&rsz=large&start=0&' + query).read()
	json = m_json.loads ( response )
	results = json [ 'responseData' ] [ 'results' ]

	for result in results:
	    title = result['title']
	#     # url = result['url']   # was URL in the original and that threw a name error exception
	#     # print ( title + '; ' + url )
	    print ( title )

i+=8	    
Beispiel #59
0
#  -   8 = +/-0.512V
#  -  16 = +/-0.256V
# See table 3 in the ADS1015/ADS1115 datasheet for more info on gain.
GAIN = 1

# Main loop.

    # Read all the ADC channel values in a list.
values = [0]*5
for i in range(4):
    # Read the specified ADC channel using the previously set gain value.
    values[i] = adc.read_adc(i, gain=GAIN)
    values[4]= ((values[2]/34.7)/950)*100
        # Note you can also pass in an optional data_rate parameter that controls
        # the ADC conversion time (in samples/second). Each chip has a different
        # set of allowed data rate values, see datasheet Table 9 config register
        # DR bit values.
        #values[i] = adc.read_adc(i, gain=GAIN, data_rate=128)
        # Each value will be a 12 or 16 bit signed integer value depending on the
        # ADC (ADS1015 = 12-bit, ADS1115 = 16-bit).
    # Print the ADC values.
params = urllib.urlencode({'field1': values[4], 'key':'8FPLTV0BKUPES6M1'})
headers = {"Content-type": "application/x-www-form-urlencoded","Accept":"text/plain"}
conn = httplib.HTTPConnection("api.thingspeak.com:80")
conn.request("POST", "/update", params, headers)
response = conn.getresponse()
print response.status, response.reason
data = response.read()
conn.close()
    
print values[4]
Beispiel #60
0
 def get_action_url(self, action, key=None):
     args = {'action': action}
     if key:
         args['key'] = key
     return self.canonicalize_url(
         '/announcements?%s' % urllib.urlencode(args))