def handle_get(conversation): fresh = conversation.query['fresh'] == 'true' max_timestamp = None board_list = [] session = get_session(application, fresh) try: try: boards = session.query(Board).all() for board in boards: board_list.append(board.id) timestamp = board.timestamp if max_timestamp is None or timestamp > max_timestamp: max_timestamp = timestamp except NoResultFound: return None note_list = [] try: notes = session.query(Note).all() for note in notes: note_list.append(note.to_dict()) except NoResultFound: pass finally: session.close() if max_timestamp is not None: conversation.modificationTimestamp = datetime_to_milliseconds( max_timestamp) return json.write({'boards': board_list, 'notes': note_list})
def handle_get(conversation): fresh = conversation.query['fresh'] == 'true' max_timestamp = None board_list = [] session = get_session(application, fresh) try: try: boards = session.query(Board).all() for board in boards: board_list.append(board.id) timestamp = board.timestamp if max_timestamp is None or timestamp > max_timestamp: max_timestamp = timestamp except NoResultFound: return None note_list = [] try: notes = session.query(Note).all() for note in notes: note_list.append(note.to_dict()) except NoResultFound: pass finally: session.close() if max_timestamp is not None: conversation.modificationTimestamp = datetime_to_milliseconds(max_timestamp) return json.write({'boards': board_list, 'notes': note_list})
def _doCommonRequest(url, postdata, content_type, method): ''' Transmit a data to remote server using HTTP(GET,POST,PUT). and got a response which specified by JSON formate : "{"results":"content"}" from remote server. @poastdata String of json formate or the binary data of zip,png @return dictionary of response @Exception URLError. ''' logger = Logger.getLogger() #user define exception errorcode = { 'unspecified': 31, 'feedback_ok': 32, 'feedback_fail': 33, 'json_error_attr': 34, 'json_error_format': 35 } f = None try: logger.debug('init request!') request = urllib2.Request(url) if method == 'POST': postdata['token'] = '1122334455667788' jsonstr = minjson.write(postdata) logger.debug(jsonstr) request.add_data(jsonstr) else: request.add_header('token', '1122334455667788') request.add_data(postdata) #the content type of data application/json application/zip image/png logger.debug('add content_type!') request.add_header('Content-Type', content_type) #the content type of feedback from server logger.debug('add Accept!') request.add_header('Accept', 'application/json') #the length of data #logger.debug('add Content-Length!') #request.add_header("Content-Length", str(len(postdata))) #the http request type GET POST PUT logger.debug('add method!') request.get_method = lambda: method urllib2.socket.setdefaulttimeout(10) logger.debug('open the url!') f = urllib2.urlopen(request) json_feedback = f.read() dic = eval(json_feedback) #key = list(dic).pop() #value = str(dic[key]) return dic except Exception, e: #all exception.urllib2.URLError: <urlopen error timed out>... logger.debug('Do http request, got error!') return ''
def _doCommonRequest(url,postdata,content_type,method): ''' Transmit a data to remote server using HTTP(GET,POST,PUT). and got a response which specified by JSON formate : "{"results":"content"}" from remote server. @poastdata String of json formate or the binary data of zip,png @return dictionary of response @Exception URLError. ''' logger = Logger.getLogger() #user define exception errorcode = {'unspecified':31, 'feedback_ok':32, 'feedback_fail':33, 'json_error_attr':34, 'json_error_format':35 } f = None try: logger.debug('init request!') request = urllib2.Request(url) if method == 'POST': postdata['token'] = '1122334455667788' jsonstr = minjson.write(postdata) logger.debug(jsonstr) request.add_data(jsonstr) else: request.add_header('token','1122334455667788') request.add_data(postdata) #the content type of data application/json application/zip image/png logger.debug('add content_type!') request.add_header('Content-Type',content_type) #the content type of feedback from server logger.debug('add Accept!') request.add_header('Accept', 'application/json') #the length of data #logger.debug('add Content-Length!') #request.add_header("Content-Length", str(len(postdata))) #the http request type GET POST PUT logger.debug('add method!') request.get_method = lambda: method urllib2.socket.setdefaulttimeout(10) logger.debug('open the url!') f = urllib2.urlopen(request) json_feedback = f.read() dic = eval(json_feedback) #key = list(dic).pop() #value = str(dic[key]) return dic except Exception,e: #all exception.urllib2.URLError: <urlopen error timed out>... logger.debug('Do http request, got error!') return ''
def handle_get(conversation): r = None state_lock = get_state_lock() state = get_state() state_lock.readLock().lock() try: r = json.write(state) finally: state_lock.readLock().unlock() return r
def handle_get(conversation): id = get_id(conversation) session = get_session(application) try: note = session.query(Note).filter_by(id=id).one() except NoResultFound: return 404 finally: session.close() conversation.modificationTimestamp = datetime_to_milliseconds(note.timestamp) return json.write(note.to_dict())
def handle_get(conversation): id = get_id(conversation) session = get_session(application) try: note = session.query(Note).filter_by(id=id).one() except NoResultFound: return 404 finally: session.close() conversation.modificationTimestamp = datetime_to_milliseconds( note.timestamp) return json.write(note.to_dict())
def importEDL(self, content, filename): sequence_delim = '' source_delim = '' edits = [] tcformats = { 'NON-DROP':':', 'DROP':';', } tcformat_re = re.compile('^FCM: (DROP|NON-DROP) FRAME') edit_re = re.compile('^\d\d\d\s+(\w+).*(\d\d:\d\d:\d\d:\d\d)\s+(\d\d:\d\d:\d\d:\d\d)\s+(\d\d:\d\d:\d\d:\d\d)\s+(\d\d:\d\d:\d\d:\d\d)') clipname_re = re.compile('^\* FROM CLIP NAME:\s+(.+)') #f = open(edl_filename, 'r') f = content.split('\n') for line in f: line = line.strip() tcformat_match = tcformat_re.match(line) if tcformat_match: if not sequence_delim: sequence_delim = tcformats[tcformat_match.group(1)] else: source_delim = tcformats[tcformat_match.group(1)] edit_match = edit_re.match(line) if edit_match: if not source_delim: source_delim = ':' if not sequence_delim: sequence_delim = ':' edits.append({ 'src': re.sub('_', ' ', edit_match.group(1)), 'srcin': re.sub(':', source_delim, edit_match.group(2)), 'srcout': re.sub(':', source_delim, edit_match.group(3)), 'seqin': re.sub(':', sequence_delim, edit_match.group(4)), 'seqout': re.sub(':', sequence_delim, edit_match.group(5)), }) clipname_match = clipname_re.match(line) if clipname_match: edits[-1]['srcname'] = clipname_match.group(1) json = "var edl = " + minjson.write(edits) fn = Media.path('edl', '_'.join([os.path.basename(filename), random_string(5)]) + '.js') self.edl = fn f = open(self.edl.path, 'w') f.write(json) f.close() self.save()
def run(self): try: filename = open(self.filename, "r") loglines = self.follow(filename) except Exception: import traceback self.mainLogger.error( 'LogTailer (%s) - Unable to open log file = %s', self.filename, traceback.format_exc() ) return for line in loglines: payload = { "itemId": self.agentConfig['agentKey'], "logs": [{ "timestamp": datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S"), "message": line, "filename": self.filename }] } try: if int(pythonVersion[1]) >= 6: payloadJSON = json.dumps(payload) else: payloadJSON = minjson.write(payload) except Exception: import traceback self.mainLogger.error( 'LogTailer (%s) - Failed encoding payload to json. Exception = %s', self.filename, traceback.format_exc() ) return sdUrl = string.replace(self.agentConfig['sdUrl'], 'https://', '') sdUrl = string.replace(sdUrl, '.serverdensity.io', '') payload = {'payload': payloadJSON, 'sdUrl': sdUrl} self.doPostBack(payload)
def handle_post(conversation): id = get_id(conversation) # Note: You can only "consume" the entity once, so if we want it # as text, and want to refer to it more than once, we should keep # a reference to that text. text = conversation.entity.text note_dict = json.read(text) session = get_session(application) try: note = session.query(Note).filter_by(id=id).one() note.update(note_dict) update_board_timestamp(session, note) session.flush() except NoResultFound: return 404 finally: session.close() conversation.modificationTimestamp = datetime_to_milliseconds(note.timestamp) return json.write(note.to_dict())
def handle_post(conversation): id = get_id(conversation) # Note: You can only "consume" the entity once, so if we want it # as text, and want to refer to it more than once, we should keep # a reference to that text. text = conversation.entity.text note_dict = json.read(text) session = get_session(application) try: note = session.query(Note).filter_by(id=id).one() note.update(note_dict) update_board_timestamp(session, note) session.flush() except NoResultFound: return 404 finally: session.close() conversation.modificationTimestamp = datetime_to_milliseconds( note.timestamp) return json.write(note.to_dict())
def dumps(data): return minjson.write(data)
def doChecks(self, sc, firstRun, systemStats=False): # System stats are passed in on the initial run # We cache the line index from which to read from top if not self.topIndex: # Output from top is slightly modified on OS X 10.6 (case #28239) if systemStats and 'macV' in systemStats and systemStats['macV'][0].startswith('10.6.'): self.topIndex = 6 else: self.topIndex = 5 self.checksLogger = logging.getLogger('checks') self.checksLogger.debug('doChecks: start') # Do the checks apacheStatus = self.getApacheStatus() diskUsage = self.getDiskUsage() loadAvrgs = self.getLoadAvrgs() memory = self.getMemoryUsage() mysqlStatus = self.getMySQLStatus() networkTraffic = self.getNetworkTraffic() nginxStatus = self.getNginxStatus() processes = self.getProcesses() self.checksLogger.debug('doChecks: checks success, build payload') checksData = {'agentKey' : self.agentConfig['agentKey'], 'agentVersion' : self.agentConfig['version'], 'diskUsage' : diskUsage, 'loadAvrg' : loadAvrgs['1'], 'memPhysUsed' : memory['physUsed'], 'memPhysFree' : memory['physFree'], 'memSwapUsed' : memory['swapUsed'], 'memSwapFree' : memory['swapFree'], 'memCached' : memory['cached'], 'networkTraffic' : networkTraffic, 'processes' : processes} self.checksLogger.debug('doChecks: payload built, build optional payloads') # Apache Status if apacheStatus != False: checksData['apacheReqPerSec'] = apacheStatus['reqPerSec'] checksData['apacheBusyWorkers'] = apacheStatus['busyWorkers'] checksData['apacheIdleWorkers'] = apacheStatus['idleWorkers'] self.checksLogger.debug('doChecks: built optional payload apacheStatus') # MySQL Status if mysqlStatus != False: checksData['mysqlConnections'] = mysqlStatus['connections'] checksData['mysqlCreatedTmpDiskTables'] = mysqlStatus['createdTmpDiskTables'] checksData['mysqlMaxUsedConnections'] = mysqlStatus['maxUsedConnections'] checksData['mysqlOpenFiles'] = mysqlStatus['openFiles'] checksData['mysqlSlowQueries'] = mysqlStatus['slowQueries'] checksData['mysqlTableLocksWaited'] = mysqlStatus['tableLocksWaited'] checksData['mysqlThreadsConnected'] = mysqlStatus['threadsConnected'] if mysqlStatus['secondsBehindMaster'] != None: checksData['mysqlSecondsBehindMaster'] = mysqlStatus['secondsBehindMaster'] # Nginx Status if nginxStatus != False: checksData['nginxConnections'] = nginxStatus['connections'] checksData['nginxReqPerSec'] = nginxStatus['reqPerSec'] # Include system stats on first postback if firstRun == True: checksData['systemStats'] = systemStats self.checksLogger.debug('doChecks: built optional payload systemStats') self.checksLogger.debug('doChecks: payloads built, convert to json') # Post back the data if int(pythonVersion[1]) >= 6: self.checksLogger.debug('doChecks: json convert') payload = json.dumps(checksData) else: self.checksLogger.debug('doChecks: minjson convert') payload = minjson.write(checksData) self.checksLogger.debug('doChecks: json converted, hash') payloadHash = md5.new(payload).hexdigest() postBackData = urllib.urlencode({'payload' : payload, 'hash' : payloadHash}) self.checksLogger.debug('doChecks: hashed, doPostBack') self.doPostBack(postBackData) self.checksLogger.debug('doChecks: posted back, reschedule') sc.enter(self.agentConfig['checkFreq'], 1, self.doChecks, (sc, False))
#!/usr/bin/env python import sys, os import time import re sys.path.append("../server") filename = "../server/tmp/tests.json" import minjson as json import api tests = api.getTests(None, None, None) if (tests["stat"] == 'ok'): result = {'stat': 'ok', 'tests': tests["tests"], "from": "cache"} tempname = "%s.tmp.%i" % (filename, os.getpid()) cacheFile = open(tempname, "w") cacheFile.write(json.write(result)) cacheFile.close() os.rename(tempname, filename) else: print "Failed to get test data"
self.mainLogger.debug('Unable to get hostname: ' + str(e)) self.mainLogger.debug('doChecks: payload: %s' % checksData) self.mainLogger.debug('doChecks: payloads built, convert to json') # Post back the data if int(pythonVersion[1]) >= 6: self.mainLogger.debug('doChecks: json convert') payload = json.dumps(checksData) testobj = json.loads(payload) else: self.mainLogger.debug('doChecks: minjson convert') payload = minjson.write(checksData) testobj = minjson.read(payload) self.mainLogger.debug('doChecks: json converted, hash') payloadHash = md5(payload).hexdigest() postBackData = urllib.urlencode({'payload': payload, 'hash': payloadHash}) self.mainLogger.debug('doChecks: hashed, doPostBack') self.doPostBack(postBackData) self.mainLogger.debug('doChecks: posted back, reschedule') sc.enter(self.botConfig['checkFreq'], 1, self.doChecks, (sc, False))
#!/usr/bin/env python import sys, os import time import re sys.path.append("../server") filename = "../server/tmp/tests.json" import minjson as json import api tests = api.getTests(None, None, None) if(tests["stat"] == 'ok'): result = {'stat':'ok', 'tests':tests["tests"], "from":"cache"} tempname = "%s.tmp.%i" % (filename, os.getpid()) cacheFile = open(tempname, "w") cacheFile.write(json.write(result)) cacheFile.close() os.rename(tempname, filename) else: print "Failed to get test data"