Ejemplo n.º 1
0
  def get(self,slug):
	global keystring,pagenum,pagesize
	startime = time.time()
	pageurl = 'http://cdn.top100.cn/flashremote/SongBO/GetSearch.as?Keyword='
	p=urllib.unquote(slug).split('/',2)
	k = urllib.unquote(p[0])
	if len(p) == 2:
		page = p[1]
	else:
		page = 1
	url = pageurl + _en(k)+'&Pagesize='+_en(str(pagesize))+'&Pagenum='+_en(str(page))
	param1 = url.split('?',2)
	param2 = (param1[1]+keystring).split('&') 
	param = urllib.quote_plus(param1[1]+keystring)
	url += '&songname='+_en(hashlib.md5(param.upper()).hexdigest());
	content = url_get_contents(url)
	obj = json.read(_de(content))
	content = json.read(obj['content'])
	template_values = {
		'content': content,
		'page': page,
		'executionTime': time.time()-startime,
		'keyword': k,
		}
	render_template(self, 'mp3search.html', template_values)
Ejemplo n.º 2
0
 def GET(self):
     input = web.input()
     web.write("Content-type: text/plain\n\n")
     stacks=json.read(input["stacks"])
     payouts=json.read(input["payouts"])
     icm = ICM(stacks,payouts)
     print json.write(icm.equities)
 def __init__(self, url, unique_key='_id', **kwargs):
     """Establish a connection to Algolia using target url
         'APPLICATION_ID:API_KEY:INDEX_NAME'
     """
     application_id, api_key, index = url.split(':')
     self.algolia = algoliasearch.Client(application_id, api_key)
     self.index = self.algolia.initIndex(index)
     self.unique_key = unique_key
     self.last_object_id = None
     self.batch = []
     self.mutex = RLock()
     self.auto_commit = kwargs.pop('auto_commit', True)
     self.run_auto_commit()
     try:
         json = open("algolia_fields_" + index + ".json", 'r')
         self.attributes_filter = decoder.decode(json.read())
         logging.info("Algolia Connector: Start with filter.")
     except IOError:  # No "fields" filter file
         self.attributes_filter = None
         logging.info("Algolia Connector: Start without filter.")
     try:
         json = open("algolia_remap_" + index + ".json", 'r')
         self.attributes_remap = decoder.decode(json.read())
         logging.info("Algolia Connector: Start with remapper.")
     except IOError:  # No "remap" filter file
         self.attributes_remap = None
         logging.info("Algolia Connector: Start without remapper.")
     try:
         f = open("algolia_postproc_" + index + ".py", 'r')
         self.postproc = f.read()
         logging.info("Algolia Connector: Start with post processing.")
     except IOError:  # No "postproc" filter file
         self.postproc = None
         logging.info("Algolia Connector: Start without post processing.")
Ejemplo n.º 4
0
def backup_month(start_tick,stop_tick,yyyymm):
    """备份一个月的短信"""
    conn=sqlite3.connect(DB_FILENAME)
    curr=conn.cursor()
    sql=SQL_GETSMS%{
        'start_tick':start_tick,
        'stop_tick':stop_tick,}
    #print sql
    curr.execute(sql)
    dataset=curr.fetchall()
    savedset=set()
    if os.path.exists(INBOX_FILENAME%{'yyyymm':yyyymm}):
        fr_inbox=open(INBOX_FILENAME%{'yyyymm':yyyymm},'r')
        fr_sent=open(SENT_FILENAME%{'yyyymm':yyyymm},'r')
        for line in fr_inbox.xreadlines():
            msgdict=json.read(line)
            savedset.add(msgdict['msgid'])
        for line in fr_sent.xreadlines():
            msgdict=json.read(line)
            savedset.add(msgdict['msgid'])
        fr_inbox.close()
        fr_sent.close()
    msglist=[]
    fw_inbox=open(INBOX_FILENAME%{'yyyymm':yyyymm},'a+')
    fw_sent=open(SENT_FILENAME%{'yyyymm':yyyymm},'a+')
    #print 'len(dataset)=',len(dataset)
    for (starttime,outgoing,freetext,remoteuid) in dataset:
        #print repr(outgoing),repr(freetext),repr(starttime)
        msgdict={
                'msg':freetext.encode('utf-8'),
                'msgid':'%d-%s-%d'%(starttime,
                    remoteuid.encode('utf-8'),len(freetext)),
                'ts':starttime,
                }
        if msgdict['msgid'] in savedset:
            continue
        if outgoing==1:
            msgdict['tfrom']=LOCAL_PHONENUMBER
            msgdict['tto']=remoteuid.encode('utf-8')
            fw_sent.write(json.write(msgdict)+'\n')
        elif outgoing==0:
            msgdict['tto']=LOCAL_PHONENUMBER
            msgdict['tfrom']=remoteuid.encode('utf-8')
            fw_inbox.write(json.write(msgdict)+'\n')
        else:
            raise ValueError('Unknow outgoing=%d'%outgoing)
        #print msgdict
    # do it
    fw_inbox.close()
    fw_sent.close()
    curr.close()
    conn.close()
    return
Ejemplo n.º 5
0
 def run(self, data):
   try:
     obj, i = json.read(data)
     mailbox.send(obj)
   except:
     mailbox.send(data)
   return ''
Ejemplo n.º 6
0
def json_load(text):
    ''' Load JSON data using what ever resources are available. '''
    if OLD_SUGAR_SYSTEM is True:
        listdata = json.read(text)
    else:
        # Remove MAGIC NUMBER, if present, and leading whitespace
        if text[0:2] == MAGICNUMBER:
            clean_text = text[2:].lstrip()
        else:
            clean_text = text.lstrip()
        # Strip out trailing whitespace, nulls, and newlines
        clean_text = clean_text.replace('\12', '')
        clean_text = clean_text.replace('\00', '')
        clean_text = clean_text.rstrip()
        # Look for missing ']'s
        left_count = clean_text.count('[')
        right_count = clean_text.count(']')
        while left_count > right_count:
            clean_text += ']'
            right_count = clean_text.count(']')
        io = StringIO(clean_text)
        try:
            listdata = jload(io)
        except ValueError:
            # Assume that text is ascii list
            listdata = text.split()
            for i, value in enumerate(listdata):
                listdata[i] = convert(value, float)
    # json converts tuples to lists, so we need to convert back,
    return _tuplify(listdata)
Ejemplo n.º 7
0
    def fromAjax(self, text, languageFrom, languageTo):
        """
        Returns a simple string translating the text from "languageFrom" to
        "LanguageTo" using Google Translate AJAX Service.
        """
        LANG={
            "arabic":"ar", "bulgarian":"bg", "chinese":"zh-CN",
            "croatian":"hr", "czech":"cs", "danish":"da", "dutch":"nl",
            "english":"en", "finnish":"fi", "french":"fr", "german":"de",
            "greek":"el", "hindi":"hi", "italian":"it", "japanese":"ja",
            "korean":"ko", "norwegian":"no", "polish":"pl", "portugese":"pt",
            "romanian":"ro", "russian":"ru", "spanish":"es", "swedish":"sv" }

        base_url='http://ajax.googleapis.com/ajax/services/language/translate?'
        langpair='%s|%s'%(LANG.get(languageFrom.lower(),languageFrom),
                          LANG.get(languageTo.lower(),languageTo))
        params=urllib.urlencode( (('v',1.0),
                           ('q',text.encode('utf-8')),
                           ('langpair',langpair),) )
        url=base_url+params
        content=urllib2.urlopen(url).read()
        try: trans_dict=json.loads(content)
        except AttributeError:
            try: trans_dict=json.load(content)
            except AttributeError: trans_dict=json.read(content)
        return trans_dict['responseData']['translatedText']
Ejemplo n.º 8
0
  def run(self):
    while not self.__stopevent.isSet():
      _data = self.socket.socket.recv(4096)
      if len(_data) == 4096:
        self.socket.socket.setblocking(0)
        while(1):
          try:
            buf = self.socket.socket.recv(4096)
            _data += buf
            if len(buf) == 4096:
              continue
          except socket.error:
            self.socket.socket.setblocking(1)
            break
      print "got new json data: " + _data

      if len(_data) == 0:
        break
      
      js = json.read(_data)
      if "id" in js:
        action = self.socket.getActionForId(js['id'])
      else:
        action = "ANNOUNCEMENTS"

      socketData = BSSocketData()
      socketData.action = action
      socketData.data = js
      if ("id" in js):
        if not self.socket.notifyId(int(js['id']), socketData):
          self.q.put(socketData)
      else:
        self.q.put(socketData)
def testOutputDataset(datasetName, requestType):
	 url='https://cmsweb.cern.ch/phedex/datasvc/json/prod/Subscriptions?dataset=' + datasetName
         result = json.read(urllib2.urlopen(url).read())
	 datasets=result['phedex']['dataset']
	 if len(datasets)>0:
		dicts=datasets[0]
		subscriptions=dicts['subscription']
		for subscription in subscriptions:
			if subscription['level']=='DATASET' and subscription['custodial']=='y':
				if 'MonteCarlo' in requestType:
					if subscription['custodial']=='y':
						print "This dataset is subscribed : "+ datasetName
						print "Custodial: "+subscription['custodial']
						request=subscription['request']
						print "Request page: https://cmsweb.cern.ch/phedex/prod/Request::View?request="+str(request)
						return
				else:
					print "This dataset is subscribed : "+ datasetName
					print "Custodial: "+subscription['custodial']
					request=subscription['request']
					print "Request page: https://cmsweb.cern.ch/phedex/prod/Request::View?request="+str(request)
					return
			else:
				print "The Subscription exist but not custodial"
				request=subscription['request']
				print "Request page: https://cmsweb.cern.ch/phedex/prod/Request::View?request="+str(request)
				
	 else:
		print "This dataset wasn't subscribed: "+ datasetName
Ejemplo n.º 10
0
    def jsonCmd(self, method, params = {}, retry = 0):
        if self.__nextdownload > datetime.datetime.now():
            diff = self.__nextdownload - datetime.datetime.now()
            secs = diff.microseconds/1000000.0 + diff.seconds + diff.days*24*3600
            time.sleep(secs)

        # We could use system.listMethods to check that cmd["method"]
        # is still recognised by the server?
        cmd = urllib.quote(json.JsonWriter().write({"id":self.__jsonid, "method":method, "params":params}))
        try:
            ret = json.read(urllib2.urlopen(self.__jsonurl, cmd).read())
            self.__nextdownload = datetime.datetime.now() + datetime.timedelta(seconds=self.config.getfloat("DEFAULT", "waitseconds"))
        except urllib2.URLError, e:
            # Retry on the following errors:
            # 104 - Connection reset
            # 110 - Connection timed out
            if e.reason[0] == 104 or e.reason[0] == 110:
                if retry < 5:
                    self.__output("%s - will retry in 5 seconds" % e.reason[1])
                    time.sleep(5)
                    return self.jsonCmd(method, params, retry + 1)
                else:
                    self.__output("%s - giving up" % e.reason[1])
            print >> sys.stderr, ("URL Error %d: %s (%s)" % (e.reason[0], e.reason[1], self.__jsonurl))
            sys.exit(3)
Ejemplo n.º 11
0
def image_list(parent, title, user):
    """Get the image list for a specific user"""
    logger.log( 9, 'image_list(parent=%r, title=%r, user=%r)', parent, title, user)
    items = []
    web = 'http://www.flickr.com/services/rest/?method=flickr.people.getPublicPhotos&user_id=' + user + \
        '&format=json&api_key=' + config.FLICKR_KEY + '&per_page=' + str(config.FLICKR_LIMIT) + \
        '&page=1&extras=original_format'
    url=urllib.urlopen(web)
    flickr=url.read()
    flickr=flickr.replace("jsonFlickrApi(","");
    data = json.read(flickr[:-1])
    for foto in data['photos']['photo']:
            #items.append(ImageItem(y[1],parent,foto["title"]))

        mi = menu.MenuItem(foto["title"], parent.showimage, 0)
        mi.arg = (foto["title"],"http://farm3.static.flickr.com/" + foto["server"] + "/" + \
            foto["id"] + "_" + foto["originalsecret"] +  "_o.jpg",foto["id"])
        imagen = 'http://farm3.static.flickr.com/' + foto['server'] + '/' + foto['id'] + '_' + foto['secret'] + '_m.jpg'
        file = config.FLICKR_DIR + '/' + foto['id'].replace('-','_') + '_t.jpg'
        if not os.path.exists(file):
            box = PopupBox(_('Downloading thumbnail for picture "') + foto['title'] + '"', width=800)
            box.show()
            urllib.urlretrieve(imagen,file)
            box.destroy()
        mi.image = file
        items.append(mi)
    return items
Ejemplo n.º 12
0
def getGlobalTag(url, workflow):
        conn  =  httplib.HTTPSConnection(url, cert_file = os.getenv('X509_USER_PROXY'), key_file = os.getenv('X509_USER_PROXY'))
        r1=conn.request("GET",'/reqmgr/reqMgr/request?requestName='+workflow)
        r2=conn.getresponse()
        request = json.read(r2.read())
        globalTag=request['GlobalTag']
        return globalTag
Ejemplo n.º 13
0
def getCacheID(url, workflow):
        conn  =  httplib.HTTPSConnection(url, cert_file = os.getenv('X509_USER_PROXY'), key_file = os.getenv('X509_USER_PROXY'))
        r1=conn.request("GET",'/reqmgr/reqMgr/request?requestName='+workflow)
        r2=conn.getresponse()
        request = json.read(r2.read())
        cacheID=request['StepOneConfigCacheID']
        return cacheID
Ejemplo n.º 14
0
 def checkArgvs(self):
     """check argvs
     """
     try:
         #{
         #"src":"http://xxxx",
         #"dest_path":"/home/tudou/0/a/b/c.flv",
         #"md5":"21345",
         #"proxy":"1.2.3.4:7080",
         #"threads":5
         #}
         self.job = json.read(self.stdin.readline().strip())
         self.stdout.write("NEED###%s%s"%(self.job, os.linesep))
         self.stdout.flush()
         self.src      =self.job["src"]
         self.storage_root=self.set_storage_root()
         self.dest_path=self.set_dest_path()
         self.www_root =self.set_www_root()
         self.ip       =self.set_ip()
         self.dst      =self.set_dst()
         self.tmp      =self.job["tmp"]
         self.md5      =self.job["md5"]
         if self.job.has_key("proxy"):
             self.proxy=self.job["proxy"]
         if self.job.has_key("thread"):
             self.thread=self.job["thread"] or self.thread
         if self.job.has_key("check_url"):
             self.check_url=self.job["check_url"]
         self.timeout=self.job["timeout"]
         self.outcome=os.path.join(self.tmp, "speeds.txt")
     except Exception, e:
         self.msg="check argument error. %s"%e
         self.stdout.write("RESULT###%s%s"%(json.write({"result":400, "msg":self.msg, "aux":self.get_aux_by_null(), "v":VERSION}), os.linesep))
         self.stdout.flush()
         raise Exception(self.msg) 
Ejemplo n.º 15
0
    def getListings(self, start, end):
        """
        Get all listings from start to end - both must be datetime objects
        and at least start must be a time where the hours are divisible by
        zero, and minutes, seconds and milliseconds set to 0.
        """
        # Set up empty listings
        self._listings = {}
        for id in self._selectedChannels:
            self._listings[id] = []

        # Loop from start to end, adding 4 hours each time
        while start <= end:
            timestamp = start.strftime('%s')
            url = self._listingsurl % timestamp

            data = self._getURL(url)
            object = json.read(data)

            for channel in object['channels']:
                # Only look after selected channels
                channelid = str(channel['id'])
                if self._listings.has_key(channelid):
                    for program in channel['program']:
                        # Only get details for specified channels
                        if self._getDetails.has_key(channelid):
                            program.update(self.getDetails(program['program_id']))
                            pass
                        self._listings[channelid].append(program)

            #start += datetime.timedelta(800)

            start += datetime.timedelta(0,0,0,0,0,4)
Ejemplo n.º 16
0
    def process_response(self, result):
        docs = []
        rootDocs = []
        docsDict = {}
        # Build a dictionary of the annotations
        for doc in result:
            # hack is done here to replace [] with null as json.py does not properly parse
            jsonStr = unicode(doc.get("jsonString").replace("[]", "null")).encode("utf-8")
            doc = json.read(jsonStr)
            doc["replies"] = []
            docs.append(doc)
            docsDict[doc["uri"]] = doc
            if doc["annotates"]["uri"] == doc["annotates"]["rootUri"]:
                rootDocs.append(doc)

        # Now process the dictionary
        for doc in docs:
            # If we are NOT a top level annotation
            if doc["annotates"]["uri"] != doc["annotates"]["rootUri"]:
                # Find what we are annotating
                try:
                    d = docsDict[doc["annotates"]["uri"]]
                    d["replies"].append(doc)  # Add ourselves to its reply list
                except:
                    # TODO KeyError
                    pass
        return json.write(rootDocs)
Ejemplo n.º 17
0
def fromAjax(text, languageFrom, languageTo):
        """
        Returns a simple string translating the text from "languageFrom" to
        "LanguageTo" using Google Translate AJAX Service.
        """
        LANG = langCode
 
        base_url = 'http://ajax.googleapis.com/ajax/services/language/translate?'
        langpair = '%s|%s'%(LANG.get(languageFrom.lower(),languageFrom),
                          LANG.get(languageTo.lower(),languageTo))
        try:
                params=urllib.urlencode( (('v',1.0),
                                   ('q',text.encode('utf-8')),
                                   ('langpair',langpair),) )
                # print params
        except UnicodeDecodeError:
                pass
        url = base_url+params
        content = urllib2.urlopen(url).read()
        try: trans_dict=json.loads(content)
        except AttributeError:
            try: trans_dict=json.load(content)
            except AttributeError: trans_dict=json.read(content)
        try:
            return trans_dict['responseData']['translatedText']
        except TypeError: pass
Ejemplo n.º 18
0
 def testReadWriteCopies(self):
     orig_obj = {'a':' " '}
     json_str = json.write(orig_obj)
     copy_obj = json.read(json_str)
     self.assertEqual(orig_obj, copy_obj)
     self.assertEqual(True, orig_obj == copy_obj)
     self.assertEqual(False, orig_obj is copy_obj)
Ejemplo n.º 19
0
def parasOperator(game_id,identity,game_name) :
    t_ParasOperatorUrl = 'http://fcd.5173.com/commondata/Category.aspx?type=operator&id='+identity
    t_Content = urllib.urlopen(t_ParasOperatorUrl).read()

    t_Content = DealJsonString(t_Content)

    try :
        t_Data = json.read(t_Content)

        if len(t_Data) == 0 :
            return False;
        else :
            UpdateGameOperator(identity)

        t_OperatorIdBegin = getMaxOperatorId(game_id)

        for t_Operator in t_Data :
            t_OperatorIdBegin += 1
            t_OperatorInfo = CGameOperator()

            t_OperatorInfo.game_id = game_id
            t_OperatorInfo.game_identity = identity
            t_OperatorInfo.game_name = game_name
            t_OperatorInfo.operator_id = t_OperatorIdBegin
            t_OperatorInfo.operator_identity = t_Operator["id"]
            t_OperatorInfo.operator_name = t_Operator["name"].decode('gbk').encode('utf-8')
            InsertGameOperatorInfo(t_OperatorInfo)
    except :
        logger.error("json decode error,url:"+t_ParasOperatorUrl)
        logger.error( sys.exc_info())
 def test_write_all_labels(self):
     """dumping json into a file"""
     filename = "test_labels.json"
     # remove the current file
     if os.path.exists(filename):
         os.system("rm " + filename)
     # make up some labels
     labels = []
     # fake label 1
     label = {"line_number": 5, "bank": 0, "label": "SomeLabel", "address": 0x10}
     labels.append(label)
     # fake label 2
     label = {"line_number": 15, "bank": 2, "label": "SomeOtherLabel", "address": 0x9F0A}
     labels.append(label)
     # dump to file
     write_all_labels(labels, filename=filename)
     # open the file and read the contents
     file_handler = open(filename, "r")
     contents = file_handler.read()
     file_handler.close()
     # parse into json
     obj = json.read(contents)
     # begin testing
     self.assertEqual(len(obj), len(labels))
     self.assertEqual(len(obj), 2)
     self.assertEqual(obj, labels)
Ejemplo n.º 21
0
def getCampaign(url, workflow):
    conn = httplib.HTTPSConnection(url, cert_file=os.getenv("X509_USER_PROXY"), key_file=os.getenv("X509_USER_PROXY"))
    r1 = conn.request("GET", "/reqmgr/reqMgr/request?requestName=" + workflow)
    r2 = conn.getresponse()
    request = json.read(r2.read())
    campaign = request["Campaign"]
    return campaign
Ejemplo n.º 22
0
def parseGroup(game_id,game_identity,game_name,operator_id,operator_identity,operator_name,area_id,area_identity,area_name) :
    t_ParasGroupUrl = 'http://fcd.5173.com/commondata/Category.aspx?type=server&id='+area_identity
    t_Content = urllib.urlopen(t_ParasGroupUrl).read()

    t_Content = DealJsonString(t_Content)

    try :
        t_Data = json.read(t_Content)

        t_GroupIdBegin = getMaxGroupId(game_id,area_id)
        for t_Operator in t_Data :
            t_GroupIdBegin += 1
            t_GroupInfo = CGameGroupArea()

            t_GroupInfo.game_id = game_id
            t_GroupInfo.game_identity = game_identity
            t_GroupInfo.game_name = game_name
            t_GroupInfo.operator_id = operator_id
            t_GroupInfo.operator_identity = operator_identity
            t_GroupInfo.operator_name = operator_name
            t_GroupInfo.area_id = area_id
            t_GroupInfo.area_identity = area_identity
            t_GroupInfo.area_name = area_name
            t_GroupInfo.group_id = t_GroupIdBegin
            t_GroupInfo.group_identity = t_Operator["id"]
            t_GroupInfo.group_name = t_Operator["name"].decode('gbk').encode('utf-8')
            InsertGameAreaGroupInfo(t_GroupInfo)
    except :
        logger.error("json decode error,url:"+t_ParasGroupUrl)
        logger.error( sys.exc_info())
Ejemplo n.º 23
0
    def _init_desc(self):
        """
        definition is [mapping, filter, sub, pub ]
        mapping is list of  tuples or dictionaries, [("src-attr","dest-attr")|("src-class.attr","dst-class.attr")]
        filter is list of tuple of tuples, or tuple of dictionaries [("P|S|D|N", "src","dst"]
        sub/pub lists of  policy sets: event, matching, cmd, output/input
        policy sets are lists of polices
        policy is list of actions
        action is tuple of "condition", "action", "action if condition failed"
        condition is one of (("if", "op","arg1","arg2"), (filter, op, arg1), (reduce, )
        action is list of verbs
        verb is one of ("stop","append(noun)","set (noun, noun)")
        
        """
        self.parsed_fmdef=json.read(self.fmdef)
        
        self._validate_map()
        
        self._validate_filter()

        for l in self.sub.keys():
            if self.parsed_fmdef[2].has_key(l):
                self.sub[l]=self.parsed_fmdef[2][l]
        for l in self.pub.keys():
            if self.parsed_fmdef[3].has_key(l):
                self.pub[l]=self.parsed_fmdef[3][l]
        pass
Ejemplo n.º 24
0
 def handleJsonRpc(self):
     # Get parameter values from the "get" query string or "post" args
     fields = cgi.FieldStorage()
     request = fields.getfirst("request")
     try:
         if request == None:
             raise InvalidJsonRpcRequest
         req = json.read(request)
         id = req["id"]
         params = req["params"]
         methodname = req["method"]
     except:
         self.sendResponse(None, None, InvalidJsonRpcRequest("Empty or malformed JSON-RPC request.").__dict__)
         return ()
     try:  # to get a callable obj
         method = self.getMethodByName(methodname)
     except:
         method = None
         self.sendResponse(id, None, MethodNotFound(req["method"]).__dict__)
         return ()
     if method:
         try:
             result = method(*params)
             if id is not None:
                 self.sendResponse(id, result, None)
                 return ()
         except SystemExit:
             pass
         except:  # error inside the callable object
             s = getTracebackStr()
             self.sendResponse(id, None, ApplicationError(s).__dict__)
             return ()
Ejemplo n.º 25
0
    def _q_lookup(self, request, component):
        request = quixote.get_request()
        response = quixote.get_response()

        if component not in self.get_fields:
            raise TraversalError('No meta key "%s"' % component)

        http.json_response()
        
        if request.get_method() in ('POST', 'PUT'):
            if '_json' in request.form:
                data = json.read(request.form['_json'])
                
                # Return a callable thing
                def set_a_meta(request):
                    ret = self.set_meta(component, data)
                    ref = request.get_environ('HTTP_REFERER')
                    if ref is not None:
                        p = self.image.pic()
                        response.redirect(ref + '#pic%d' % p.id)

                    http.json_response()
                    return json.write(ret)

                return set_a_meta
            
            respose.set_status(204)     # no content
            return ''
        
        p = self.image.pic()
        return json.write(self.get_fields[component](p))
Ejemplo n.º 26
0
def parseArea(game_id,identity,game_name) :
    t_ParasAreaUrl = 'http://fcd.5173.com/commondata/Category.aspx?type=area&id='+identity

    t_Content = urllib.urlopen(t_ParasAreaUrl).read()

    t_Content = DealJsonString(t_Content)

    try :
        t_Data = json.read(t_Content)

        t_AreaIdBegin = getMaxAreaId(game_id,-1)
        for t_Operator in t_Data :
            t_AreaIdBegin += 1
            t_AreaInfo = CGameArea()

            t_AreaInfo.game_id = game_id
            t_AreaInfo.game_identity = identity
            t_AreaInfo.game_name = game_name
            t_AreaInfo.operator_id = -1
            t_AreaInfo.operator_identity = ''
            t_AreaInfo.operator_name = ''
            t_AreaInfo.area_id = t_AreaIdBegin
            t_AreaInfo.area_identity = t_Operator["id"]
            t_AreaInfo.area_name = t_Operator["name"].decode('gbk').encode('utf-8')
            InsertGameAreaInfo(t_AreaInfo)
    except :
        logger.error("json decode error,url:"+t_ParasAreaUrl)
        logger.error( sys.exc_info())
Ejemplo n.º 27
0
	def __init__(self,tiapp,project_dir,java,classes_dir,root_dir):
		self.tiapp = tiapp
		self.java = java
		self.appname = tiapp.properties['name']
		self.classes_dir = classes_dir
		self.template_dir = os.path.abspath(os.path.dirname(sys._getframe(0).f_code.co_filename))
		self.appid = tiapp.properties['id']
		self.root_dir = root_dir
		self.project_dir = os.path.abspath(os.path.expanduser(project_dir))
		self.modules = set()
		self.jar_libraries = []
		
		json_contents = open(os.path.join(self.template_dir,'dependency.json')).read()
		self.depends_map = json.read(json_contents)
		
		# go ahead and slurp in any required modules
		for required in self.depends_map['required']:
			self.add_required_module(required)
		
		if (tiapp.has_app_property('ti.android.include_all_modules')):
			if tiapp.to_bool(tiapp.get_app_property('ti.android.include_all_modules')):
				print '[INFO] Force including all modules...'
				sys.stdout.flush()
				for module in self.depends_map['dependencies'].keys():
					self.add_required_module(module)

		self.module_methods = set()
		self.js_files = {}
		self.html_scripts = []
		self.compiled_files = []
Ejemplo n.º 28
0
    def _q_index(self, request):
        request = quixote.get_request()
        response = quixote.get_response()

        if request.get_method() in ('POST', 'PUT'):
            if '_json' in request.form:
                ret = {}
                try:
                    changes = json.read(request.form['_json'])

                    for n,v in changes.items():
                        ret[n] = self.set_meta(n, v)

                    ref = request.get_environ('HTTP_REFERER')
                    if ref is not None:
                        p = self.image.pic()
                        response.redirect(ref + '#pic%d' % p.id)

                    http.json_response()
                    return json.write(ret)
                except json.ReadException:
                    raise QueryError('badly formatted JSON')

            response.set_status(204) # no content
            return ''
            
        meta = self.get_meta()

        http.json_response()
        return json.write(meta)
Ejemplo n.º 29
0
	def _marshaled_dispatch(self, data, dispatch_method = None):
		"""Dispatches a JSON-RPC method from marshalled (JSON) data.
	
		JSON-RPC methods are dispatched from the marshalled (JSON) data
		using the _dispatch method and the result is returned as
		marshalled data. For backwards compatibility, a dispatch
		function can be provided as an argument (see comment in
		SimpleJSONRPCRequestHandler.do_POST) but overriding the
		existing method through subclassing is the prefered means
		of changing method dispatch behavior.
		"""
		print data
		rawreq = json.read(data)
	
		#params, method = xmlrpclib.loads(data)
		id = rawreq.get('id', 0)
		method = rawreq['method']
		params = rawreq.get('params', [])
		
		responseDict = {'id':id}
	
		# generate response
		try:
			if dispatch_method is not None:
				response = dispatch_method(method, params)
			else:
				response = self._dispatch(method, params)
			## wrap response in a singleton tuple
			#response = (response,)
			#response = xmlrpclib.dumps(response, methodresponse=1)
			responseDict['result'] = response
		except Fault, fault:
			#response = xmlrpclib.dumps(fault)
			responseDict['error'] = repr(response)
Ejemplo n.º 30
0
def parasGame() :
    t_ParseGameUrl = 'http://fcd.5173.com/commondata/Category.aspx?type=game'
    #t_ParseGameUrl = 'http://fcd.5173.com/commondata/Category.aspx?type=operator&id=5b4dca7c7fdd4fd280562b843ad36cbe'

    t_Content = urllib.urlopen(t_ParseGameUrl).read()

    #去掉前后的空格
    #t_Content = t_Content[1:t_Len-1]

    t_Content = DealJsonString(t_Content)

    try :
        t_Data = json.read(t_Content)

        t_GameIdBegin = getMaxGameId()
        for k,v in t_Data.items() :
            if k != "Types" and k != "HOT" :
                for t_Game in v :
                    t_GameIdBegin += 1
                    t_GameInfo = CGameInfo();
                    t_GameInfo.game_id =t_GameIdBegin
                    t_GameInfo.game_identity = t_Game["id"]
                    t_GameInfo.game_name = t_Game["name"].decode('gbk').encode('utf-8')
                    t_GameInfo.is_hot = t_Game["hot"]
                    t_GameInfo.type = k
                    t_GameInfo.category = t_Game["type"]
                    InsertGameInfo(t_GameInfo)
    except :
        logger.error( sys.exc_info())
Ejemplo n.º 31
0
 def loads(self, res):
     if hasattr(json, "loads"):
         res = json.loads(res)
     else:
         res = json.read(res)
     return res
Ejemplo n.º 32
0
from django.conf import settings
from django.contrib.auth.models import User
from graphite.account.models import Profile
from graphite.logger import log

# There are a couple different json modules floating around out there with
# different APIs. Hide the ugliness here.
try:
    import json
except ImportError:
    import simplejson as json

if hasattr(json, 'read') and not hasattr(json, 'loads'):
    json.loads = json.read
    json.dumps = json.write
    json.load = lambda file: json.read(file.read())
    json.dump = lambda obj, file: file.write(json.write(obj))


def getProfile(request, allowDefault=True):
    if request.user.is_authenticated():
        return Profile.objects.get_or_create(user=request.user)[0]
    elif allowDefault:
        return default_profile()


def getProfileByUsername(username):
    try:
        return Profile.objects.get(user__username=username)
    except Profile.DoesNotExist:
        return None
Ejemplo n.º 33
0
 def get_channel_statistics(self):
     url = f'https://www.googleapis.com/youtube/v3/channels?part=statistics&id={self.channel_id}&key={self.api_key}'
     print(url)
     json_url = requests.get(url)
     data = json.read(json_url.text)
     print(data)
Ejemplo n.º 34
0
 def fromJson(self, text):
     return self.fromPython(json.read(text))
Ejemplo n.º 35
0
def comments():
    attempts = 0
    accessible = 0
    index = 1
    with io.open("Google - My Activity.html", "r",
                 encoding="utf-8") as raw_html:
        html = raw_html.read().replace("\n", "").replace("'", "`")
        comments = str(
            re.findall(
                '<div class="QTGV3c" jsname="r4nke">(.*?)</div><div class="SiEggd">',
                html))
        uuids = str(re.findall('data-token="(.*?)" data-date', html))
        links = str(
            re.findall(
                '<div class="iXL6O"><a href="(.*?)" jslog="65086; track:click"',
                html))
        for i in range(int(links.count("'") / 2)):
            link = links.split("'")[index]
            comment = comments.split("'")[index]
            uuid = uuids.split("'")[index]
            instances = 0
            index += 2
            print('\n"' + comment.replace("`", "'") + '"')
            print(link + "\n")
            for i in range(0, 3, 1):
                rotate_connection()
                fetch_comments(
                    link.replace("https://www.youtube.com/watch?v=", ""))
                if private == bool(True):
                    break
                with open("temp_comments.json", "r") as json:
                    j = json.read()
                    if j.find(uuid) >= 0:
                        print("[ ✓ ]", end="")
                        instances += 1
                    else:
                        print("[ X ]", end="")
                        if instances > 0:
                            instances -= 1
                    try:
                        r = get_tor_session().get("https://ip.seeip.org/geoip")
                        r_dict = r.json()
                        print(" " + r_dict["country"] + " (" + r_dict["ip"] +
                              ")")
                    except IOError:
                        print(" Unknown location.")
            if private == bool(False):
                if instances > 0:
                    accessible += 1
                    print("\nAccessible.")
                elif instances == 0:
                    print("\nNon-accessible.")
            attempts += 1

        if attempts == accessible and accessible > 0:
            print(
                "No abnormal behavior detected. All comments are publicly available."
            )
        elif attempts > accessible:
            print("Questionable behavior in " + str(attempts - accessible) +
                  " comment(s) of " + str(attempts) + " attempted.")
        else:
            print(
                str(accessible) + " of " + str(attempts) +
                " comments publicly available.")
Ejemplo n.º 36
0
from nltk.corpus import stopwords
from nltk.stem import SnowballStemmer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.naive_bayes import MultinomialNB
from sklearn.svm import LinearSVC
from sklearn.pipeline import Pipeline
from sklearn.model_selection import train_test_split
from sklearn.feature_selection import SelectKBest, chi2
import pickle
import os
# from . import views
import json

json_file_path = "loan_no_loan.json"
with open(os.path.abspath(json_file_path), 'r') as j:
    contents = json.loads(j.read())

data = pd.DataFrame(contents)

# stemmer = SnowballStemmer('english')
# words = stopwords.words("english")

X_train, X_test, y_train, y_test = train_test_split(data['post'],
                                                    data.intent,
                                                    test_size=0.2)

pipeline = Pipeline([('vect',
                      TfidfVectorizer(ngram_range=(1, 2),
                                      stop_words="english",
                                      sublinear_tf=True)),
                     ('chi', SelectKBest(chi2, k='all')),
Ejemplo n.º 37
0
import urllib2
import urlparse
import uuid

# Find a JSON parser
try:
    import simplejson
    _parse_json = lambda s: simplejson.loads(s.decode("utf-8"))
except ImportError:
    try:
        import cjson
        _parse_json = lambda s: cjson.decode(s.decode("utf-8"), True)
    except ImportError:
        try:
            import json
            _parse_json = lambda s: _unicodify(json.read(s))
        except ImportError:
            # For Google AppEngine
            from django.utils import simplejson
            _parse_json = lambda s: simplejson.loads(s.decode("utf-8"))

_FRIENDFEED_API_BASE = "http://friendfeed-api.com/v2"
_FRIENDFEED_OAUTH_BASE = "https://friendfeed.com/account/oauth"


def _authenticated(method):
    @functools.wraps(method)
    def wrapper(self, *args, **kwargs):
        if not self.consumer_token or not self.access_token:
            raise Exception("OAuth required for this method")
        return method(self, *args, **kwargs)
Ejemplo n.º 38
0
    )  # Number of rows that will be imported randomly

    for column in columns:
        column_as_df = json_normalize(df[column])
        column_as_df.columns = [
            f"{column}.{subcolumn}" for subcolumn in column_as_df.columns
        ]
        df = df.drop(column, axis=1).merge(
            column_as_df, right_index=True,
            left_index=True)  #drop original columns after extracting values

    return df


#import the data using the name and extension
df_train = json.read("train.csv")

#first 5 rows of our dataset
df_train.head()


def missing_values(data):
    total = data.isnull().sum().sort_values(ascending=False)
    percent = (data.isnull().sum() / data.isnull().count() *
               100).sort_values(ascending=False)
    df = pd.concat([total, percent], axis=1, keys=['Total', 'Percent'])
    print("Total columns at least one Values: ")
    print(df[~(df['Total'] == 0)])  # Returning values of nulls different of 0

    print(
        "\n Total of Sales % of Total: ",
Ejemplo n.º 39
0
# log from ipython as I played with the pricing api
#index.json from https://pricing.us-east-1.amazonaws.com/offers/v1.0/aws/AmazonEC2/current/index.json
# Per http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/price-changes.html#download-the-offer-index
# But this may no longer be supported as it (EC2) is not part of the parent index of all services any longer

import json
with open('index.json', 'r') as foo:
    data = json.read(foo)

with open('index.json', 'r') as foo:
    data = json.load(foo)

data.keys()
for key in data:
    print(data[key])

data.keys()
for key in data:
    if key != 'products':
        print(data[key])

data.keys()
for key in data:
    print(key)

for key in data:
    if key != u'products':
        print(data[key])

for key in data:
    if key != u'products':
Ejemplo n.º 40
0
    def colorize(self, msg):
        state, foo = msg.split(' ', 1)
        if state in ('OK', 'UP'):
            msg = '9' + msg
        elif state in ('WARNING'):
            msg = '8' + msg
        elif state in ('CRITICAL', 'DOWN', 'UNREACHABLE'):
            msg = '4' + msg
        elif state in ('UNKNOWN'):
            msg = '7' + msg

        return msg


if __name__ == '__main__':
    if len(sys.argv) < 2:
        print "statusbot.py config.json"
        exit(1)
    fh = open(sys.argv[1], 'r')
    config = json.read(fh.read())
    fh.close()

    c = StatusBot(nickname=config['nick'],
                  server=config['server'],
                  ssl=config['ssl'],
                  default_chan=config['channel'],
                  fifo=config['fifo'])
    c.connect()
    while True:
        c.one_loop()
Ejemplo n.º 41
0
import json

try:
    json.dumps
    json.loads
except AttributeError:
    try:  # monkey patching for python-json package
        json.dumps = lambda obj, *args, **kwargs: json.write(obj)
        json.loads = lambda str, *args, **kwargs: json.read(str)
    except AttributeError:
        raise ImportError('Could not load an appropriate JSON library '
                          'currently supported are simplejson, '
                          'python3.7 json and python-json')

loads = json.loads
dumps = json.dumps
Ejemplo n.º 42
0
 def ReadObjectFromJsonString(string):
     return json.read(string)
Ejemplo n.º 43
0
 def fromFd(cls, fd):
     data = json.read(fd)
     return cls(data)
Ejemplo n.º 44
0
parser.add_argument("asm_file", help="Assembly directives to assemble")
parser.add_argument("-o",
                    "--outfile",
                    help="Output MIF format file for FPGA program",
                    default="asm-out.mif")
parser.add_argument("--opcodes",
                    help="JSON Definition file to map mnemonics to opcodes")
parser.add_argument("-v",
                    "--verbose",
                    action="store_true",
                    help="Generate verbose output")
args = parser.parse_args()

if args.opcodes:
    with open(parser.opcodes, 'r') as infile:
        opcodes = json.read(parser.opcodes)
else:
    opcodes = {
        "LD": int("0b0000", 0),
        "ST": int("0b0001", 0),
        "CPY": int("0b0010", 0),
        "SWAP": int("0b0011", 0),
        "ADD": int("0b0100", 0),
        "SUB": int("0b0101", 0),
        "ADDC": int("0b0110", 0),
        "SUBC": int("0b0111", 0),
        "AND": int("0b1000", 0),
        "OR": int("0b1001", 0),
        "NOT": int("0b1010", 0),
        "SHRA": int("0b1011", 0),
        "ROTR": int("0b1100", 0),
Ejemplo n.º 45
0
    def upload(self):
        self.getDuration()
        # Take screenshots at even increments between 20% and 80% of the duration
        stops = range(20, 81, 60 / (self.shots - 1))
        imgs = []
        try:
            for stop in stops:
                imgfilename = TMPDIR + "screen%d.png" % len(imgs)
                ffmpegproc = subprocess.Popen([
                    r"/usr/bin/ffmpeg", "-ss",
                    str((self.duration * stop) / 100), "-i", self.path,
                    "-vframes", "1", "-y", "-qscale", "0", "-f", "image2",
                    imgfilename
                ],
                                              stdout=subprocess.PIPE,
                                              stderr=subprocess.STDOUT)
                stdoutdata, stderrdata = ffmpegproc.communicate()
                if ffmpegproc.returncode != 0:
                    raise Exception("Ffmpeg call failed, error text:\n" +
                                    str(stdoutdata))
                imgs.append(imgfilename)
        except OSError:
            sys.stderr.write(
                "Error: Ffmpeg not installed, refer to http://www.ffmpeg.org/download.html for installation"
            )
            exit(1)
        except Exception as e:
            print(str(e))
            exit(1)

        opener = urllib2.build_opener(
            MultipartPostHandler.MultipartPostHandler)

        try:
            for img in imgs:
                params = ({'ImageUp': open(img, "rb")})
                socket = opener.open("https://images.baconbits.org/upload.php",
                                     params)
                json_str = socket.read()
                if hasattr(json, 'loads'):
                    read = json.loads(json_str)
                elif hasattr(json, 'read'):
                    read = json.read(json_str)
                else:
                    err_msg = "I cannot decipher your `json`;\n" + \
                     "please report the following output to the bB forum:\n" + \
                     ("%s" % dir(json))
                    raise Exception(err_msg)
                self.imageurl.append("https://images.baconbits.org/images/" +
                                     read['ImgName'])
            return True
        except urllib2.URLError, s:
            if self.tries < 3:
                self.tries += 1
                sys.stderr.write(
                    'Connection timed out, retrying to upload screenshots to imgur. This is try: '
                )
                sys.stderr.write(str(self.tries))
                sys.stderr.write('\n')
                sys.stderr.write(str(s))
                self.upload()
            return True
Ejemplo n.º 46
0
# -*- coding: utf-8 -*-
'''
Created on Jul 29, 2011

@author: wangyouhua
'''

import json

from t4py.tblog.tblog import TBlog
from t4py.tblog.constants import CONSUMER_KEY
from t4py.tblog.constants import CONSUMER_SECRET
from t4py.http.oauth import OAuthToken
from t4py.utils.token_util import TokenUtil

util = TokenUtil()
str = util.get_token_str('user_test')
t = TBlog(CONSUMER_KEY, CONSUMER_SECRET)
t._request_handler.access_token = OAuthToken.from_string(str)

friends = t.statuses_friends()
result = json.read(friends)  #get friends
for friend in result["users"]:  #Ĭ默认只返回30个关注列表
    print friend["name"].decode("utf-8")
#!/usr/bin/python

import sys, urllib2
import json, re
from SpyTools import *

lang = sys.argv[1]

opener = urllib2.build_opener()
opener.addheaders = [('User-agent', 'BrightByte/WikiWord/WikipediaDisambiguationSpy <*****@*****.**>')]

u = "http://%s.wikipedia.org/w/api.php?action=query&prop=links&plnamespace=10&pllimit=50&titles=MediaWiki:Disambiguationspage&format=json" % lang
js = opener.open(u).read();

data = json.read(js)

disambig = []

pid = data["query"]["pages"].keys()[0]
page = data["query"]["pages"][pid]

if not "links" in page:
  sys.exit()

links = page["links"]

for link in links:
  n = link["title"]
  n = re.sub("^[^:]*:", "", n)
  disambig.append(n)
Ejemplo n.º 48
0
 def toJSON(self, indata):
     return json.read(indata)
from viz.plot_classification_model_evaluation import *
from data.generate_shards import genereate_shards

parser = argparse.ArgumentParser()

parser.add_argument('-r', '--results_dir', type=str, required=True)
parser.add_argument('-n', '--model-num', type=int, default=0)

args = parser.parse_args()

results_dir = args.results_dir
model_num = args.model_num

with open(os.path.join(results_dir, 'config.json'), 'r') as f:
    config = json.read(f)

model_config = config['model_config']['data_model']
data_config = config['data_config']

# Load the correct model
# TODO: abstract the general model loading process?
if model_config['type'] == 'bnn':
    if config['model_config']['type'] == 'pvi':
        from src.methods.pvi.model_bnn import BNNModel
        model = BNNModel.load_model(
            os.path.join(results_dir, f'best_model_{model_num}'))

# Load the relevant data
# In sample sets, out of sample sets.
if data_config['dataset'] == 'MNIST':
Ejemplo n.º 50
0
from random import getrandbits

from google.appengine.api import urlfetch

# We require a JSON parsing library. These seem to be the most popular.
try:
    import cjson
    decode_json = lambda s: cjson.decode(s.decode("utf-8"), True)
except ImportError:
    try:
        # Django includes simplejson
        from django.utils import simplejson
        decode_json = lambda s: simplejson.loads(s.decode("utf-8"))
    except ImportError:
        import json
        decode_json = lambda s: _unicodify(json.read(s))


# ------------------------------------------------------------------------------
# oauth client
# ------------------------------------------------------------------------------

class OAuthToken(object):
    '''OAuthToken is a data type that represents an End User via either an access or request token.'''

    key = None
    secret = None

    '''
    key = the token
    secret = the token secret
Ejemplo n.º 51
0
def json_load(text):
    """ Load JSON data. """
    listdata = json.read(text)
    return listdata
Ejemplo n.º 52
0
def getDeviceByName(devicename):
    if os.path.isfile('Devices/' + devicename + '.json'):
        with open('Devices/' + devicename + '.json') as json:
            return json.read()
    else:
        return "NOPE", 404
Ejemplo n.º 53
0
def getWorkflowByName(workflowname):
    if os.path.isfile('Workflows/' + workflowname + '.json'):
        with open('Workflows/' + workflowname + '.json') as json:
            return json.read()
    else:
        return "NOPE", 404
Ejemplo n.º 54
0
if params.has_key('code'):
    code = params['code'].value

    #construct POST object for access token fetch request
    postvals = {
        'grant_type': 'authorization_code',
        'client_id': key,
        'client_secret': secret,
        'code': code,
        'redirect_uri': callback_url
    }

    #make request to capture access token
    params = urllib.urlencode(postvals)
    f = urllib.urlopen(access_token_endpoint, params)
    token = json.read(f.read())

    print "<h1>token</h1>"
    print token
    print "<br /><br />"

    #build headers for protected data request
    headers = {'Accept': 'application/json'}

    #make OAuth signed request for protected user profile
    profile_url = "https://api.gowalla.com/users/me?oauth_token=" + token[
        'access_token']
    request = urllib2.Request(profile_url, None, headers)
    response = urllib2.urlopen(request)
    profile = response.read()
Ejemplo n.º 55
0
def control(env, start_response):
    # Save the time the results were received.
    thetime = time_module.time()

    def cc_start_response(status, headers):
        c = get_counter()
        start_response(status, headers + [counter_cookie_header(c)])
        set_counter(c + 1)

    ip = None
    if env.has_key('HTTP_X_FORWARDED_FOR'):
        ip = env['HTTP_X_FORWARDED_FOR']
    else:
        ip = env['REMOTE_ADDR']

    user_agent = "Unknown user agent"
    if env.has_key('USER_AGENT'):
        user_agent = env['USER_AGENT']
    elif env.has_key('HTTP_USER_AGENT'):
        user_agent = env['HTTP_USER_AGENT']

    base = env.has_key('REQUEST_URI') and env['REQUEST_URI'] or env['PATH_INFO']

    last = filter(lambda x: x != [], base.split('/'))[-1];

    if last in STATIC_FILES:
        contents = None
        f = None
        try:
            f = open(os.path.join(PWD, last))
            contents = f.read()
        except IOError:
            start_response('500 Internal Server Error', [('Content-Type', 'text/html; charset=utf-8')])
            return ["<html><body><h1>500 Internal Server Error</h1></body></html>"]
        finally:
            if f: f.close()

        rr = last == 'main.js' and cc_start_response or start_response
        rr('200 OK', [('Content-Type', (last == 'spr.html' and 'text/html' or 'text/javascript') +'; charset=utf-8')])
        return [contents]
    elif last == PY_SCRIPT_NAME:
        qs = env.has_key('QUERY_STRING') and env['QUERY_STRING'].lstrip('?') or ''
        qs_hash = cgi.parse_qs(qs)

        # Is it a request for a JS/CSS include file?
        if qs_hash.has_key('include'): 
            if qs_hash['include'][0] == 'js':
                m = create_monster_string(JS_INCLUDES_DIR, '.js', JS_INCLUDES_LIST)
                start_response('200 OK', [('Content-Type', 'text/javascript; charset=utf-8'), ('Pragma', 'no-cache')])
                return [m]
            elif qs_hash['include'][0] == 'css':
                m = create_monster_string(CSS_INCLUDES_DIR, '.css', CSS_INCLUDES_LIST)
                start_response('200 OK', [('Content-Type', 'text/css; charset=utf-8'), ('Pragma', 'no-cache')])
                return [m]

        # ...if not, it's some results.

        if not (env['REQUEST_METHOD'] == 'POST') and (env.has_key('CONTENT_LENGTH')):
            start_response('400 Bad Request', [('Content-Type', 'text/html; charset=utf-8')])
            return ["<html><body><h1>400 Bad Request</h1></body></html>"]

        content_length = None
        try:
            content_length = int(env['CONTENT_LENGTH'])
        except ValueError:
            start_response('500 Internal Server Error', [('Content-Type', 'text/html; charset=utf-8')])
            return ["<html><body><h1>500 Internal Server Error</h1></body></html>"]

        post_data = env['wsgi.input'].read(content_length)

        # This will be called in the normal course of events, and if
        # there is an error parsing the JSON.
        def backup_raw_post_data(header=None):
            bf = None
            try:
                bf = lock_and_open(os.path.join(PWD, RESULT_FILES_DIR, RAW_RESULT_FILE_NAME), "a")
                if header:
                    bf.write("\n")
                    bf.write(header)
                bf.write(post_data)
            except:
                pass
            finally:
                if bf: unlock_and_close(bf)

        rf = None
        try:
            parsed_json = json.read(post_data)
            random_counter, counter, main_results = rearrange(parsed_json, thetime, ip)
            header = '#\n# Results on %s.\n# USER AGENT: %s\n# %s\n#\n' % \
                (time_module.strftime("%A %B %d %Y %H:%M:%S UTC",
                                      time_module.gmtime(thetime)),
                 user_agent,
                 "Design number was " + ((random_counter and "random = " or "non-random = ") + str(counter)))
            rf = lock_and_open(os.path.join(PWD, RESULT_FILES_DIR, RESULT_FILE_NAME), "a")
            backup_raw_post_data(header)
            csv_results = to_csv(main_results)
            rf.write(header)
            rf.write(csv_results)

            start_response('200 OK', [('Content-Type', 'text/plain; charset=ascii')])
            return ["OK"]
        except json.ReadException:
            backup_raw_post_data(header="# BAD REQUEST FROM %s\n" % user_agent)
            start_response('400 Bad Request', [('Content-Type', 'text/html; charset=utf-8')])
            return ["<html><body><1>400 Bad Request</h1></body></html>"]
        except HighLevelParseError:
            backup_raw_post_data(header="# BAD REQUEST FROM %s\n" % user_agent)
            start_response('400 Bad Request', [('Content-Type', 'text/html; charset=utf-8')])
            return ["<html><body><1>400 Bad Request</h1></body></html>"]
        except IOError:
            start_response('500 Internal Server Error', [('Content-Type', 'text/html; charset=utf-8')])
            return ["<html><body><h1>500 Internal Server Error</h1></body></html>"]
        finally:
            if rf: unlock_and_close(rf)
    else:
        start_response('404 Not Found', [('Content-Type', 'text/html; charset=utf-8')])
        return ["<html><body><h1>404 Not Found</h1></body></html>"]
Ejemplo n.º 56
0
 def load(self, filename):
     with open(filename, 'r') as f:
         o = json.read(filename)
         self._taxonomy = o[TAXONOMY]
         self._calibrating_multisets = o[CALIBRATING_SET]
         self._taxonomy_calibrate = o[TAXONOMY_CALIBRATE]
Ejemplo n.º 57
0
import urllib, json, appuifw, time

URL = "http://developer.yahooapis.com/TimeService/V1/" +\
      "getTime?appid=MobilePython&output=json"

output = json.read(urllib.urlopen(URL).read())
print "Yahoo response: ", output
tstamp = int(output["Result"]["Timestamp"])

appuifw.note(u"Yahoo says that time is %s" % time.ctime(tstamp))
Ejemplo n.º 58
0
    def wlcg_pledges(self, month=datetime.datetime.now().month, year=datetime.datetime.now().year):     
            
        thisyear = str(year)
        debug=0
	sites_per_accountname={}
	fednames_and_accounts={}

        cms_fed = {}
        atlas_fed = {}
        alice_fed = {}

        cms_pledge = {}
        atlas_pledge = {}
        alice_pledge = {}

        url = 'http://gstat-wlcg.cern.ch/apps/topology/2/json'
        response  = urllib2.urlopen(url)
	s = response.read()
	x = json.read(s)
	#resourcesnestedlist=x['aaData']
	for obj in x:
	    if('USA'==obj['Country']):   
		try:
			sites_per_accountname[obj['FederationAccountingName']].append(obj['Site'])
		except KeyError:
			sites_per_accountname[obj['FederationAccountingName']]=[obj['Site']]

		fednames_and_accounts[obj['Federation']]=obj['FederationAccountingName']#Name is key
	    if('USA'==obj['Country'] and obj['Federation'].find('CMS') >= 0):  
		cms_fed[obj['FederationAccountingName']]=1;
	    if('USA'==obj['Country'] and obj['Federation'].find('ATLAS') >= 0):  
		atlas_fed[obj['FederationAccountingName']]=1;
	    if('USA'==obj['Country'] and obj['Federation'].find('ALICE') >= 0):  
		alice_fed[obj['FederationAccountingName']]=1;
        if(debug > 0):
	        for key in sites_per_accountname:
			print "\n FederationAccountingName: ",key	
			for key2 in sites_per_accountname[key]:
				print "\n\t Site: %s"%(key2)	
	        for key in cms_fed:
			print "\n CMS FederationAccountingName- %s "%(key)	
	        for key in atlas_fed:
			print "\n ATLAS FederationAccountingName- %s "%(key)	
	        for key in alice_fed:
			print "\n ALICE FederationAccountingName- %s "%(key)	

        url = 'http://gstat-wlcg.cern.ch/apps/pledges/resources/'+thisyear+'/2/json'
        response  = urllib2.urlopen(url)
	s = response.read()
	x = json.read(s)
	#resourcesnestedlist=x['aaData']
	for obj in x:
	    if('USA'==obj['Country'] and 'HEP-SPEC06' == obj['PledgeUnit']):
		try:
		   int(obj['ATLAS']) #atlas number exists
                   atlas_pledge[fednames_and_accounts[obj['Federation']]] = {'pledge': obj['ATLAS'], 'site_names': sites_per_accountname[fednames_and_accounts[obj['Federation']]]}
		except:
		   None

		try:
		   int(obj['CMS']) #cms number exists
                   cms_pledge[fednames_and_accounts[obj['Federation']]] = {'pledge': obj['CMS'], 'site_names': sites_per_accountname[fednames_and_accounts[obj['Federation']]]}
		except:
		   None

		try:
		   int(obj['ALICE']) #alice number exists
                   alice_pledge[fednames_and_accounts[obj['Federation']]] = {'pledge': obj['ALICE'], 'site_names': sites_per_accountname[fednames_and_accounts[obj['Federation']]]}
		except:
		   None
        if(debug > 0):
	        for key in cms_pledge:
			print "\n %s %s"%(key, cms_pledge[key]['pledge'])	
			for key2 in cms_pledge[key]['site_names']:
				print "\n\t %s"%(key2)	
        if(debug > 0):
	        for key in cms_fed:
			print "\n cms_fed: ",key	
	        for key in atlas_fed:
			print "\n atlas_fed: ",key	
        return atlas_pledge, cms_pledge, atlas_fed, cms_fed, alice_pledge, alice_fed
Ejemplo n.º 59
0
def read_prog(prog):
    return json.read(prog)