Exemple #1
0
 def json(self):
     metadata = self.get()
     if self.downloader.verbose:
         print metadata
     if json:
         return json.loads(metadata)
     else:
         return minjson.safeRead(metadata)
Exemple #2
0
 def json(self):
     metadata = self.get()
     if self.downloader.verbose:
         print metadata
     if json:
         return json.loads(metadata)
     else:
         return minjson.safeRead(metadata)
Exemple #3
0
 def __send_removal(self):
     url = '%suninstall/' % BASE_URL
     data = {'installId': self.key, 'agentKey': self.config.agent_key}
     request = urllib2.urlopen(url, urllib.urlencode(data))
     response = request.read()
     if json:
         return json.loads(response)
     else:
         return minjson.safeRead(response)
Exemple #4
0
 def __get_installs(self):
     url = '%supdate/' % BASE_URL
     data = {
         'agentKey': self.config.agent_key
     }
     request = urllib2.urlopen(url, urllib.urlencode(data))
     response = request.read()
     if json:
         return json.loads(response)
     else:
         return minjson.safeRead(response)
Exemple #5
0
 def __send_removal(self):
     url = '%suninstall/' % BASE_URL
     data = {
         'installId': self.key,
         'agentKey': self.config.agent_key
     }
     request = urllib2.urlopen(url, urllib.urlencode(data))
     response = request.read()
     if json:
         return json.loads(response)
     else:
         return minjson.safeRead(response)
Exemple #6
0
 def run(self):
   status, output = commands.getstatusoutput("facter -j")
   if status != 0:
     return False
   else:
     try:
       try:
         import json
         return json.loads(output)
       except ImportError, e:
         self.checks_logger.debug('Could not load modern json module. Falling back to minjson.')
         import minjson
         return minjson.safeRead(output)
     except Exception, e:
         import traceback
         self.checks_logger.error('Unable to load facter JSON - Exception = ' + traceback.format_exc())
         return False
def extractFacebookLikes(inFile3, facebookLikes):
    columnMark = '|::|'
    rowMark = '|;;|\n'
    count = 0
    fin = open(inFile3)
    for current in fin:
        if not (current[0:4] == columnMark and current[-5:] == rowMark):
            continue
        count += 1  # allCount
        if (count % 1000 == 0):
            print 'facebookLikes:' + str(count)
        data2 = current[4:-5]
        if (len(data2) <= 5):  # if |::|400|;;|, continue
            continue
        data = data2.replace("\\/", "/")
        data = data.replace("\\n", " ")
        dataJsonList = minjson.safeRead(data)
        for (oneUser, dataJson) in dataJsonList.items():
            userId = dataJson['id']
            if (dataJson.has_key('username')):
                userName = dataJson['username']
            else:
                userName = oneUser
            if (dataJson.has_key('likes')):
                likes = dataJson['likes']
            else:
                likes = 0
            website = '0'
            if (dataJson.has_key('website')):
                website = dataJson['website']
                #website = website.replace("\n",";")
                #website = website.replace("\t",";")
                #website = website.replace("  "," ")
                #website = website.replace("  "," ")
                #print 'before::' + website
                website = re.sub('\s+', ';', website)
                website = website.replace("\\/", "/")
                #website = website.replace("http://","")
                #print 'after::' + website

            if (not facebookLikes.has_key(userId)):
                facebookLikes[userId] = [userId, likes, userName, website]
            if (not facebookLikes.has_key(userName)):
                facebookLikes[userName] = [userId, likes, userName, website]
Exemple #8
0
 def run(self):
     status, output = commands.getstatusoutput("ohai")
     if status != 0:
         return False
     else:
         try:
             try:
                 import json
                 return json.loads(output)
             except ImportError, e:
                 self.checks_logger.debug(
                     'Could not load modern json module. Falling back to minjson.'
                 )
                 import minjson
                 return minjson.safeRead(output)
         except Exception, e:
             import traceback
             self.checks_logger.error(
                 'Unable to load facter JSON - Exception = ' +
                 traceback.format_exc())
             return False
Exemple #9
0
 def loads(data):
     return minjson.safeRead(data)
Exemple #10
0
# Decode the JSON
if int(pythonVersion[1]) >= 6:  # Don't bother checking major version since we only support v2 anyway
    import json

    try:
        updateInfo = json.loads(responseAgent)
    except Exception, e:
        print "Unable to get latest version info. Try again later."
        sys.exit(2)

else:
    import minjson

    try:
        updateInfo = minjson.safeRead(responseAgent)
    except Exception, e:
        print "Unable to get latest version info. Try again later."
        sys.exit(2)

# Loop through the new files and call the download function
for agentFile in updateInfo["files"]:
    agentFile["tempFile"] = downloadFile(agentFile)

# If we got to here then everything worked out fine. However, all the files are
# still in temporary locations so we need to move them.
#
# Make sure doesn't exist already.
# Usage of shutil prevents [Errno 18] Invalid cross-device link (case 26878) -
# http://mail.python.org/pipermail/python-list/2005-February/308026.html
if os.path.exists("sd-agent/"):
Exemple #11
0
				
				mainLogger.debug('Update: decoding JSON (json)')
				
				try:
					updateInfo = json.loads(response)
				except Exception, e:
					print 'Unable to get latest version info. Try again later.'
					sys.exit(1)
				
			else:
				import minjson
				
				mainLogger.debug('Update: decoding JSON (minjson)')
				
				try:
					updateInfo = minjson.safeRead(response)
				except Exception, e:
					print 'Unable to get latest version info. Try again later.'
					sys.exit(1)
			
			# Do the version check	
			if updateInfo['version'] != agentConfig['version']:			
				import md5 # I know this is depreciated, but we still support Python 2.4 and hashlib is only in 2.5. Case 26918
				import urllib
				
				print 'A new version is available.'
				
				def downloadFile(agentFile, recursed = False):
					mainLogger.debug('Update: downloading ' + agentFile['name'])					
					print 'Downloading ' + agentFile['name']
					
Exemple #12
0
# Decode the JSON
if int(pythonVersion[1]) >= 6: # Don't bother checking major version since we only support v2 anyway
	import json
	
	try:
		updateInfo = json.loads(responseAgent)
	except Exception, e:
		print 'Unable to get latest version info. Try again later.'
		sys.exit(2)
	
else:
	import minjson
	
	try:
		updateInfo = minjson.safeRead(responseAgent)
	except Exception, e:
		print 'Unable to get latest version info. Try again later.'
		sys.exit(2)

# Loop through the new files and call the download function
for agentFile in updateInfo['files']:
	agentFile['tempFile'] = downloadFile(agentFile)			

# If we got to here then everything worked out fine. However, all the files are still in temporary locations so we need to move them
import os
import shutil # Prevents [Errno 18] Invalid cross-device link (case 26878) - http://mail.python.org/pipermail/python-list/2005-February/308026.html

# Make sure doesn't exist already
if os.path.exists('sd-agent/'):
		shutil.rmtree('sd-agent/')
Exemple #13
0
 def loads(data):
     return minjson.safeRead(data)
Exemple #14
0
                mainLogger.debug('Update: decoding JSON (json)')

                try:
                    updateInfo = json.loads(response)
                except Exception, e:
                    print 'Unable to get latest version info. Try again later.'
                    sys.exit(1)

            else:
                import minjson

                mainLogger.debug('Update: decoding JSON (minjson)')

                try:
                    updateInfo = minjson.safeRead(response)
                except Exception, e:
                    print 'Unable to get latest version info. Try again later.'
                    sys.exit(1)

            # Do the version check
            if updateInfo['version'] != agentConfig['version']:
                import md5  # I know this is depreciated, but we still support Python 2.4 and hashlib is only in 2.5. Case 26918
                import urllib

                print 'A new version is available.'

                def downloadFile(agentFile, recursed=False):
                    mainLogger.debug('Update: downloading ' +
                                     agentFile['name'])
                    print 'Downloading ' + agentFile['name']
Exemple #15
0
			return False

		except Exception, e:
			import traceback
			self.checksLogger.error('Unable to get CouchDB statistics - Exception = ' + traceback.format_exc())
			return False

		try:

			if int(pythonVersion[1]) >= 6:
				self.checksLogger.debug('getCouchDBStatus: json read')
				stats = json.loads(response)

			else:
				self.checksLogger.debug('getCouchDBStatus: minjson read')
				stats = minjson.safeRead(response)

		except Exception, e:
			import traceback
			self.checksLogger.error('Unable to load CouchDB database JSON - Exception = ' + traceback.format_exc())
			return False

		couchdb['stats'] = stats

		# Next, get all database names.
		endpoint = '/_all_dbs/'

		try:
			url = '%s%s' % (self.agentConfig['CouchDBServer'], endpoint)
			self.checksLogger.debug('getCouchDBStatus: attempting urlopen')
			req = urllib2.Request(url, None, headers)
Exemple #16
0
def facebookProfile(fileName, fout, userList):
    # extract Twitter and Link from Facebook Profiles
    # output:
    # facebookUserID likes TwitterName link
    fin = open(fileName)
    columnMark = '|::|'
    rowMark = '|;;|\n'
    count = 0
    for current in fin:
        if not (current[0:4] == columnMark and current[-5:] == rowMark):
            continue
        count += 1  # allCount
        if (count % 100000 == 0):
            print count
        data2 = current[4:-5]
        data = data2.replace("\\/", "/")
        data = data.replace("\\n", " ")
        dataJson = minjson.safeRead(data)
        userID = dataJson['id']

        #if(userID == '108756882487616'):
        #    print data

        if (dataJson.has_key('likes')):
            likes = dataJson['likes']
        else:
            continue
            #print current
            #likes = '0'
        if (dataJson.has_key('website')):
            website = dataJson['website']
            website = website.replace("\n", " ")
            website = website.replace(";", " ")
            website = website.replace("\t", " ")
            website = website.replace("  ", " ")
            website = website.replace("\\/", "/")
            website = website.replace("http:\\", "")
            websiteList = website.split(' ')
            website = ''
            for urlLink in websiteList:
                if (len(urlLink) <= 1):
                    continue
                urlLink = urlLink.replace("http://", "")
                if (urlLink[len(urlLink) - 1:len(urlLink)] == "/"):
                    urlLink = urlLink[0:len(urlLink) - 1]
                website += urlLink + ';'
            if (website[len(website) - 1:len(website)] == ";"):
                website = website[0:len(website) - 1]
        else:
            website = '0'
        if (dataJson.has_key('description')):
            description = dataJson['description']
            #description = description.decode('unicode_escape','ignore')
        else:
            description = '0'
        if (dataJson.has_key('general_info')):
            general_info = dataJson['general_info']
        else:
            general_info = '0'
        if (dataJson.has_key('about')):
            about = dataJson['about']
        else:
            about = '0'
        if (not userList.has_key(userID)):
            userList[userID] = 1
        else:
            #print userID
            continue  # if repeated, skip
        twitterAccount = findTwitter(website + '|')
        if (twitterAccount == '0'):
            twitterAccount = findTwitter(description + '|')
        if (twitterAccount == '0'):
            twitterAccount = findTwitter(general_info + '|')
        if (twitterAccount == '0'):
            twitterAccount = findTwitter(about + '|')

        fout.write(userID + '\t' + str(likes) + '\t' + twitterAccount + '\t' +
                   website + '\n')