def signature(fname, blockcount=-1): audio_format = DB.get('format') if not audio_format: audio_format, start = get_audio_format(fname) if audio_format: logging.info("Setting this stream's audio format as %s" % audio_format) DB.set('format', audio_format) else: logging.warn("Can't determine type of file for %s." % fname) return False block = None if audio_format == _FORMAT_AAC: sig, block = aac_signature(fname, blockcount) if audio_format == _FORMAT_MP3 or not block: sig, block = mp3_signature(fname, blockcount) if len(block) > 0 and audio_format == _FORMAT_AAC: DB.set('format', _FORMAT_MP3) DB.clear_cache() return sig, block
def get_offset(force=False): # Contacts the goog, giving a longitude and lattitude and gets the time # offset with regard to the UTC. There's a sqlite cache entry for the offset. # Returns an int second offset. import misc # If we are testing this from an API level, then we don't # have a database if misc.IS_TEST: return 0 offset = DB.get('offset', expiry=ONE_DAY_SECOND) if not offset or force: from urllib2 import urlopen when = int(unixtime()) api_key = 'AIzaSyBkyEMoXrSYTtIi8bevEIrSxh1Iig5V_to' url = "https://maps.googleapis.com/maps/api/timezone/json?location=%s,%s×tamp=%d&key=%s" % ( misc.config['lat'], misc.config['long'], when, api_key) stream = urlopen(url) data = stream.read() opts = json.loads(data) if opts['status'] == 'OK': logging.info("Location: %s | offset: %s" % (opts['timeZoneId'], opts['rawOffset'])) offset = (int(opts['rawOffset']) + int(opts['dstOffset'])) / 60 DB.set('offset', offset) else: offset = 0 return int(offset)
def get_offset(force=False): # Contacts the goog, giving a longitude and lattitude and gets the time # offset with regard to the UTC. There's a sqlite cache entry for the offset. # Returns an int second offset. import misc # If we are testing this from an API level, then we don't # have a database if misc.IS_TEST: return 0 offset = DB.get('offset', expiry=ONE_DAY_SECOND) if not offset or force: from urllib2 import urlopen when = int(unixtime()) api_key = 'AIzaSyBkyEMoXrSYTtIi8bevEIrSxh1Iig5V_to' url = "https://maps.googleapis.com/maps/api/timezone/json?location=%s,%s×tamp=%d&key=%s" % (misc.config['lat'], misc.config['long'], when, api_key) stream = urlopen(url) data = stream.read() opts = json.loads(data) if opts['status'] == 'OK': logging.info("Location: %s | offset: %s" % (opts['timeZoneId'], opts['rawOffset'])) offset = (int(opts['rawOffset']) + int(opts['dstOffset'])) / 60 DB.set('offset', offset) else: offset = 0 return int(offset)
def _getXvmStatTokenData(): global _tdataPrev playerId = getCurrentPlayerId() if playerId is None: return None tdataActive = _getXvmStatActiveTokenData() tdata = _checkToken(playerId, None if tdataActive is None else tdataActive['token']) if tdata is None: tdata = _tdataPrev type = SystemMessages.SM_TYPE.Warning msg = '<textformat tabstops="[150]"><a href="#XVM_SITE#"><font color="#E2D2A2">www.modxvm.com</font></a>\n\n' if tdata is None: msg += '{{l10n:token/network_error}}' elif tdata['status'] == 'badToken': msg += '{{l10n:token/bad_token}}' elif tdata['status'] == 'blocked': msg += '{{l10n:token/blocked}}' elif tdata['status'] == 'inactive': msg += '{{l10n:token/inactive}}' elif tdata['status'] == 'active': type = SystemMessages.SM_TYPE.Information msg += '{{l10n:token/active}}\n' s = time.time() e = tdata['expires_at'] / 1000 days_left = int((e - s) / 86400) if days_left > 0: msg += '{{l10n:token/days_left:%d}}\n' % days_left else: hours_left = int((e - s) / 3600) msg += '{{l10n:token/hours_left:%d}}\n' % hours_left msg += '{{l10n:token/cnt:%d}}' % tdata['cnt'] else: type = SystemMessages.SM_TYPE.Error msg += '{{l10n:token/unknown_status}}\n%s' % json.dumps(tdata) msg += '</textformat>' if _tdataPrev is None or _tdataPrev[ 'status'] != 'active' or tdata is None or tdata[ 'status'] != 'active': SystemMessages.pushMessage(msg, type) if tdata is not None: _tdataPrev = tdata if 'token' in tdata: db.set('tokens', playerId, tdata) elif tdataActive is not None: tdata['token'] = tdataActive['token'] db.set('tokens', 'lastPlayerId', playerId) return tdata
def _getXvmStatTokenData(): global _tdataPrev playerId = _getPlayerId() if playerId is None: return None tdataActive = _getXvmStatActiveTokenData() tdata = _checkToken(playerId, None if tdataActive is None else tdataActive['token']) if tdata is None: tdata = _tdataPrev type = SystemMessages.SM_TYPE.Warning msg = '<textformat tabstops="[120]"><a href="#XVM_SITE#">www.modxvm.com</a>\n\n' if tdata is None: msg += '{{l10n:token/network_error}}' elif tdata['status'] == 'badToken': msg += '{{l10n:token/bad_token}}' elif tdata['status'] == 'blocked': msg += '{{l10n:token/blocked}}' elif tdata['status'] == 'inactive': msg += '{{l10n:token/inactive}}' elif tdata['status'] == 'active': type = SystemMessages.SM_TYPE.Information msg += '{{l10n:token/active}}\n' s = time.time() e = tdata['expires_at']/1000 days_left = int((e - s) / 86400) if days_left > 0: msg += '{{l10n:token/days_left:%d}}\n' % days_left else: hours_left = int((e - s) / 3600) msg += '{{l10n:token/hours_left:%d}}\n' % hours_left msg += '{{l10n:token/cnt:%d}}' % tdata['cnt'] else: type = SystemMessages.SM_TYPE.Error msg += '{{l10n:token/unknown_status}}\n%s' % json.dumps(tdata) msg += '</textformat>' if _tdataPrev is None or _tdataPrev['status'] != 'active' or tdata is None or tdata['status'] != 'active': SystemMessages.pushMessage(msg, type) if tdata is not None: _tdataPrev = tdata if 'token' in tdata: db.set('tokens', playerId, tdata) elif tdataActive is not None: tdata['token'] = tdataActive['token'] db.set('tokens', 'lastPlayerId', playerId) return tdata
def route_add(name, ip): if not validate.hostname(name): return respond_json({ "status": "error", "error": "Invalid hostname, use only alphanumeric characters and dashes" }) if not validate.ip(ip): return respond_json({"status": "error", "error": "Invalid IP address"}) if db.has(name): return respond_json({ "status": "error", "error": "Host is already taken" }) creator = request.args.get('name', '') email = request.args.get('email', '') code = request.args.get('secret', '') nonce_recv = request.args.get('nonce', '') if creator == "" or email == "": return respond_json({ "status": "error", "error": "Missing required fields" }) if 'nonce' not in session or nonce_recv == "" or session[ 'nonce'] != nonce_recv: return respond_json({"status": "error", "error": "Invalid nonce"}) session.pop('nonce', None) if code != config.code: return respond_json({"status": "error", "error": "Invalid code"}) db_entry = {"name": creator, "email": email, "host": name, "ip": ip} ret = ns.add(name, ip) if not ret: return respond_json({ "status": "error", "error": "An unknown error occurred" }) db.set(name, db_entry) return respond_json({"status": "ok"})
def token(): new_api_token = request.headers.get('token') # verify the token with the appengine app verify_resp = requests.put(settings.CONTROL_APP + "/checktoken", headers={'token': new_api_token}) j_resp = verify_resp.json() if j_resp['valid']: db.set('token', new_api_token) resp = jsonify({'success': True}) resp.status_code = 200 else: resp = jsonify({'success': False}) resp.status_code = 200 return resp
def samp_guess(samp): if DB.get('samp'): return True global samp_distribution # first to this amount is our winner cutoff = 10 if samp not in samp_distribution: samp_distribution[samp] = 0 samp_distribution[samp] += 1 if samp_distribution[samp] > cutoff: DB.set('samp', samp) globals()['_FRAME_LENGTH'] = (1152.0 / samp)
def coinbase(miner, reward=BLOCK_REWARD): coinbase_input = txIn('', '', amount=reward, signature='', owner=miner, index=0) coinbase_input.hash_in() got = get('outputindex', 'coofchainstatus') if got == None: got = 0 output_count = int(got) + 1 set('outputindex', str(output_count), 'coofchainstatus') out = txOut(hash='', index=output_count, owner=miner, amount=reward, allowed_infection=False, immune=False) out.hash_out() txn = Transaction(type=0, sender='coinbase', to=miner, inputs=[coinbase_input], outputs=[out]) txn.hash_tx() return txn
def application(env, start_response): """ The WSGI application """ body = '' try: length = int(env.get('CONTENT_LENGTH', '0')) except ValueError: length = 0 if length != 0: body = env['wsgi.input'].read(length) args = urlparse.parse_qs(body) # Add a new tickets and redirect to standard location (reload # won't trigger any new entries or such) if env['PATH_INFO'] == '/new': db.add(args.get("ticket")[0]) start_response('301 Redirect', [('Location', '/')]) return [] # List old tickets oldtickets = tickets_table(db.getall(ticket_type='active')) if env['PATH_INFO'] == '/edit': # Tiny edit (inlined) print("Args: " + str(args)) target, index = args.get('id')[0].split('_') value = args.get('value')[0] if target == "done": db.set(target, done_snowflake[value], index) else: db.set(target, value, index) response_body = cgi.escape(value) else: # Redraw main interface response_body = html % { "tickets": str(oldtickets), "finished": finished, "unfinished": unfinished } status = '200 OK' response_headers = [('Content-Type', 'text/html'), ('Content-Length', str(len(response_body)))] start_response(status, response_headers) return [response_body]
def poll(sensor): # poll the data data = None log.debug("[" + sensor["module_id"] + "][" + sensor["group_id"] + "][" + sensor["sensor_id"] + "] polling sensor") try: # retrieve the raw data data = plugins[sensor['plugin']['plugin_name']].poll(sensor) # delete from the cache the previous value db.delete(sensor['db_cache']) # store it in the cache db.set(sensor["db_cache"], data, utils.now()) except Exception, e: log.warning("[" + sensor["module_id"] + "][" + sensor["group_id"] + "][" + sensor["sensor_id"] + "] unable to poll: " + utils.get_exception(e)) return None
def application(env, start_response): """ The WSGI application """ body = '' try: length = int(env.get('CONTENT_LENGTH', '0')) except ValueError: length = 0 if length != 0: body = env['wsgi.input'].read(length) args = urlparse.parse_qs(body) # Add a new tickets and redirect to standard location (reload # won't trigger any new entries or such) if env['PATH_INFO'] == '/new': db.add(args.get("ticket")[0]) start_response('301 Redirect', [('Location', '/')]) return [] # List old tickets oldtickets = tickets_table(db.getall(ticket_type='active')) if env['PATH_INFO'] == '/edit': # Tiny edit (inlined) print("Args: " + str(args)) target, index = args.get('id')[0].split('_') value = args.get('value')[0] if target == "done": db.set(target, done_snowflake[value], index) else: db.set(target, value, index) response_body = cgi.escape(value) else: # Redraw main interface response_body = html % {"tickets": str(oldtickets), "finished": finished, "unfinished": unfinished} status = '200 OK' response_headers = [('Content-Type', 'text/html'), ('Content-Length', str(len(response_body)))] start_response(status, response_headers) return [response_body]
def set_watched_list(base_url, auth_user, con): posts = [] #this was refactored and became a bit of a mess. s[0] is itemid, s[1] is true/false for watched and s[2] is ticks. #the return of the _function (url, data) is added to (s[3], post_type) for logging purposes. s[3] is a debug value #that was added in the calculate_sync_list function to determine where/why an item was added. seen_items = [] for post_type in ['sync_played', 'sync_unplayed', 'sync_ticks']: for s in auth_user[post_type]: if s[0] in seen_items: #Don't allow dups break seen_items.append(s[0]) if post_type == 'sync_played': posts.append( _watched_list_played(base_url, auth_user, s[0], s[1], s[2]) + (auth_user['user_id'], s[3], post_type) ) elif post_type == 'sync_ticks': posts.append( _watched_list_ticks(base_url, auth_user, s[0], s[1], s[2]) + (auth_user['user_id'], s[3], post_type) ) elif post_type == 'sync_unplayed': posts.append( _watched_list_unplayed(base_url, auth_user, s[0], s[1], s[2]) + (auth_user['user_id'], s[3], post_type) ) posts.append( _watched_list_ticks(base_url, auth_user, s[0], s[1], s[2]) + (auth_user['user_id'], s[3], post_type) ) db.set(con, auth_user['user_id'], s[0], s[1], s[2]) db.save(con) return posts
def save(self): return set(self.hash, self.to_json(), 'utxos')
l.debug("Notifying photostreamer-server of full-resolution photo %s", fileId) payload = { "sender" : sender, "fileid": fileId, "full": saved.generate_url(expires_in=0, query_auth=False) } server.post('/photo/full', payload) l.debug("Starting background job.") sending = db.get('sending') # There is no semaphore, so make one if sending == None: l.info("No semaphore found in database. Creating one.") db.set('sending', False) sending = False # The script isn't running, so run it if sending == False: l.debug("Semaphore is False. Running background jobs.") # Set a semaphore using PickleDB db.set('sending', True) # Catch all exceptions here to make sure the semaphore doesn't get stuck # at True try: sql = db.connect() # First, send full quality versions of any files that have been # requested by photostreamer-server
def set(prop, val): sql = "update AuthorizedManHours set %s = %s where ID = %s" params = (prop, val, self.ID) return db.set(sql, params)
def set(prop, val): sql = "update Rates set %s = %s where ID = %s" params = (prop, val, self.ID) return db.set(sql, params)
def set(prop, val): sql = "update ManHourLogs set %s = %s where ID = %s" params = (prop, val, self.ID) return db.set(sql, params)
def set(prop, val): sql = "update Detachments set %s = %s where ClientID = %s" params = (prop, val, self.ID) return db.set(sql, params)
def set(prop, val): sql = "update FieldEmployees set %s=%s where DisplayCode = %s" params = (prop, val, self.ID) return db.set(sql, params)
def set(prop, val): sql = "update IncentiveMOR set %s=%s where ID = %s" params =(prop, val, self.ID) return db.set(sql,params)
def set_top_block(new): TOP_BLOCK = new set('height', str(new.height), 'coofchainstatus') return True
''' (c) 2020 Coinfection Project This code is licensed under the GNU General Public License v3.0 (see LICENSE.txt for details) ''' from db import set, get from block import Block, genesis TOP_BLOCK = None read_block_height = get('height', 'coofchainstatus') if read_block_height == None: # save the genesis block genesis = genesis() print("No blocks found, saving geneis block to db") genesis.save() set('height', '0', 'coofchainstatus') TOP_BLOCK = genesis def get_block_height(): return TOP_BLOCK.height def set_top_block(new): TOP_BLOCK = new set('height', str(new.height), 'coofchainstatus') return True def top_block(): return TOP_BLOCK
def persist(self): self.seen_now() db.set(self.db_key(), self.to_json()) db.sync()
def set(prop, val): sql = "update Receivables set %s=%s where ID = %s" params = (prop, val, self.ID) return db.set(sql, params)
def set(prop, val): sql = "update PayrollRecord set %s=%s where ID = %s" params =(prop, val, self.ID) return db.set(sql,params)
# copyright 2008 OsaMa -ITQPDA import appuifw, audio, os,e32 import db,time from graphics import * import powlite_fm as fm def ar(x):return x.decode('utf8') settings = db.set(save=1) #======================== # app info appuifw.app.screen='normal' appuifw.app.body = b = appuifw.Text() b.style = appuifw.STYLE_BOLD appuifw.app.body.color=0x0000ff b.add(ar(' ITQPDA SoundREC. v1.0\n===========================\n Author: Osamadj.\n For\n http://www.i-tich.net\n English by:\n ~: ITQPDA Team :~\n For\n www.ipmart-forum.com \n')) #======================== osamadir=u'E:\\itqpda\\' try: if not os.path.exists(osamadir): os.makedirs(osamadir) else: pass except: appuifw.note(u"Couldn't create the directory!", "error") def std(): date = time.strftime('_%d%m%Y_%H_%M_%S')
def set(prop, val): sql = "update SSSContributions set %s=%s where ID = %s" params = (prop, val, self.ID) return db.set(sql, params)
def set(prop, val): sql = "update PagibigSalaryLoans set %s=%s where ID = %s" params = (prop, val, self.ID) return db.set(sql, params)
def persist(self): db.set(self.db_key(), self.to_json()) db.sync()
def set(prop, val): sql = "update PersonalPayables set %s=%s where ID = %s" params =(prop, val, self.ID) return db.set(sql,params)
def done(ircsock, channel, index): """ Mark a ticket as finished """ db.set('done', True, index) sendmsg(ircsock, channel, "Finished ticket: " + index)
def set(prop, val): sql = "update OfficeEmployees set %s=%s where ID = %s" params =(prop, val, self.ID) return db.set(sql,params)
def set(prop, val): sql = "update HolidayMOR set %s=%s where ID = %s" params =(prop, val, self.ID) return db.set(sql,params)
def set(prop, val): sql = "update FieldEmployees set %s=%s where DisplayCode = %s" params =(prop, val, self.ID) return db.set(sql,params)
def upgrade_2_0(): ######## START OF CONFIGURATION # remote all data from the target database empty_target_db = False # migrate history data migrate_history = True # history start timestamp to migrate, "-inf" for all history_start_timestamp = "-inf" # historu end timestamp to migrate history_end_timestamp = utils.now() # migrate recent data migrate_recent = True # database number from which we are migrating db_from = 1 # database number into which we are migrating db_to = 2 # debug debug = False # keys to migrate history (from key -> to key) # destination key format: myHouse:<module_id>:<group_id>:<sensor_id> history = { 'home:weather:outdoor:temperature:day:max': 'myHouse:outdoor:temperature:external:day:max', 'home:weather:outdoor:temperature:day:min': 'myHouse:outdoor:temperature:external:day:min', 'home:weather:outdoor:temperature:day': 'myHouse:outdoor:temperature:external:day:avg', 'home:weather:indoor:temperature:day:max': 'myHouse:indoor:temperature:living_room:day:max', 'home:weather:indoor:temperature:day:min': 'myHouse:indoor:temperature:living_room:day:min', 'home:weather:indoor:temperature:day': 'myHouse:indoor:temperature:living_room:day:avg', 'home:weather:almanac:record:min': 'myHouse:outdoor:temperature:record:day:min', 'home:weather:almanac:record:max': 'myHouse:outdoor:temperature:record:day:max', 'home:weather:almanac:normal:min': 'myHouse:outdoor:temperature:normal:day:min', 'home:weather:almanac:normal:max': 'myHouse:outdoor:temperature:normal:day:max', 'home:weather:outdoor:condition:day': 'myHouse:outdoor:temperature:condition:day:avg', } # keys to migrate recent data (from key -> to key) recent = { 'home:weather:outdoor:temperature:measure': 'myHouse:outdoor:temperature:external', 'home:weather:indoor:temperature:measure': 'myHouse:indoor:temperature:living_room', 'home:weather:outdoor:condition:measure': 'myHouse:outdoor:temperature:condition', } ######## END OF CONFIGURATION conf = config.get_config(validate=False) print "[Migration from v1.x to v2.0]\n" input( "WARNING: which data will be migrate is defined within this script, on top of the upgrade_20() function.\nIndividual sensors to migrate must be specified manually\nPlase ensure you have reviewed all the settings first!\n\nPress Enter to continue..." ) backup("1.0") # empty the target database first if empty_target_db: print "Flushing target database..." change_db(db_to) db.flushdb() # for each history key to migrate print "Migrating historical data..." for key_from in history: if not migrate_history: break key_to = history[key_from] print "\tMigrating " + key_from + " -> " + key_to # retrieve all the data change_db(db_from) data = db.rangebyscore(key_from, history_start_timestamp, history_end_timestamp, withscores=True) change_db(db_to) count = 0 # for each entry for entry in data: timestamp = utils.day_start(utils.timezone(entry[0])) value = utils.normalize(entry[1]) # store it into the new database if debug: print "[HISTORY][" + key_to + "] (" + utils.timestamp2date( timestamp) + ") " + str(value) db.set(key_to, value, timestamp) count = count + 1 print "\t\tdone, " + str(count) + " values" # for each recent key to migrate print "Migrating recent data..." for key_from in recent: if not migrate_recent: break key_to = recent[key_from] print "\tMigrating " + key_from + " -> " + key_to # retrieve the recent data change_db(db_from) data = db.rangebyscore(key_from, utils.now() - 2 * conf["constants"]["1_day"], utils.now(), withscores=True) change_db(db_to) count = 0 # for each entry for entry in data: timestamp = utils.timezone(entry[0]) value = utils.normalize(entry[1]) if debug: print "[RECENT][" + key_to + "] (" + utils.timestamp2date( timestamp) + ") " + str(value) # skip it if the same value is already stored old = db.rangebyscore(key_to, timestamp, timestamp) if len(old) > 0: continue # store it into the new database db.set(key_to, value, timestamp) # create the sensor data structure key_split = key_to.split(":") group_id = key_split[-2] sensor_id = key_split[-1] module_id = key_split[-4] sensor = utils.get_sensor(module_id, group_id, sensor_id) sensor['module_id'] = module_id sensor['group_id'] = group_id sensor['db_group'] = conf["constants"]["db_schema"][ "root"] + ":" + sensor["module_id"] + ":" + sensor["group_id"] sensor[ 'db_sensor'] = sensor['db_group'] + ":" + sensor["sensor_id"] import sensors sensors.summarize(sensor, 'hour', utils.hour_start(timestamp), utils.hour_end(timestamp)) count = count + 1 print "\t\tdone, " + str(count) + " values" print "Upgrading database..." version_key = conf["constants"]["db_schema"]["version"] db.set_simple(version_key, "2.0")
def set(prop, val): sql = "update SSSContributions set %s=%s where ID = %s" params =(prop, val, self.ID) return db.set(sql,params)
def set(prop, val): sql = "update Allowances set %s = %s where ID = %s" params = (prop, val, self.ID) return db.set(sql, params)
def set(prop, val): sql = "update PagibigSalaryLoans set %s=%s where ID = %s" params =(prop, val, self.ID) return db.set(sql,params)
def set(prop, val): sql = "update IncentiveMOR set %s=%s where ID = %s" params = (prop, val, self.ID) return db.set(sql, params)
def process_inbound(self, node_id, child_id, command, ack, type, payload): # ensure command and type are valid if command >= len(commands): log.error("[" + self.gateway_id + "][" + str(node_id) + "][" + str(child_id) + "] command not supported: " + str(command)) return if type >= len(types[command]): log.error("[" + self.gateway_id + "][" + str(node_id) + "][" + str(child_id) + "] type not supported: " + str(type)) return # map the correspoding command and type string command_string = commands[command] type_string = types[command][type] ack_string = acks[ack] log.debug("[" + self.gateway_id + "][" + str(node_id) + "][" + str(child_id) + "][" + command_string + "][" + type_string + "][" + ack_string + "] received: " + str(payload)) # handle protocol messages if command_string == "PRESENTATION": # handle presentation messages log.info("[" + self.gateway_id + "][" + str(node_id) + "][" + str(child_id) + "] presented as " + type_string) elif command_string == "SET": # handle set messages (messages from sensors handled below) log.info("[" + self.gateway_id + "][" + str(node_id) + "][" + str(child_id) + "][" + command_string + "][" + type_string + "]: " + payload) elif command_string == "REQ": # handle req messages log.info("[" + self.gateway_id + "][" + str(node_id) + "][" + str(child_id) + "][" + command_string + "][" + type_string + "]: " + payload) elif command_string == "INTERNAL": # handle internal messages if type_string == "I_TIME": # return the time as requested by the sensor log.info("[" + self.gateway_id + "][" + str(node_id) + "] requesting timestamp") self.tx(node_id, child_id, command_string, type_string, int(time.time())) elif type_string == "I_SKETCH_NAME": # log the sketch name log.info("[" + self.gateway_id + "][" + str(node_id) + "] reported sketch name: " + str(payload)) elif type_string == "I_SKETCH_VERSION": # log the sketch version log.info("[" + self.gateway_id + "][" + str(node_id) + "] reported sketch version: " + str(payload)) elif type_string == "I_ID_REQUEST": # return the next available id log.info("[" + self.gateway_id + "][" + str(node_id) + "] requesting node_id") # get the available id id = self.get_available_id() # store it into the database db.set(self.assigned_ids_key, id, utils.now()) # send it back self.tx(node_id, child_id, command_string, "I_ID_RESPONSE", str(id)) elif type_string == "I_CONFIG": # return the controller's configuration log.info("[" + self.gateway_id + "][" + str(node_id) + "] requesting configuration") metric = "I" if conf["general"]["units"]["imperial"] else "M" self.tx(node_id, child_id, command_string, type_string, metric) elif type_string == "I_BATTERY_LEVEL": # log the battery level log.info("[" + self.gateway_id + "][" + str(node_id) + "] reporting battery level: " + str(payload) + "%") elif type_string == "I_LOG_MESSAGE": # log a custom message log.info("[" + self.gateway_id + "][" + str(node_id) + "] logging: " + str(payload)) elif type_string == "I_GATEWAY_READY": # report gateway report log.info("[" + self.gateway_id + "][" + str(node_id) + "] reporting gateway ready") elif type_string == "I_POST_SLEEP_NOTIFICATION": # report awake log.info("[" + self.gateway_id + "][" + str(node_id) + "] reporting awake") elif type_string == "I_HEARTBEAT_RESPONSE" or type_string == "I_PRE_SLEEP_NOTIFICATION": # handle smart sleep log.info("[" + self.gateway_id + "][" + str(node_id) + "] going to sleep") if node_id in self.queue and not self.queue[node_id].empty(): # process the queue while not self.queue[node_id].empty(): node_id, child_id, command_string, type_string, payload = self.queue[ node_id].get() # send the message self.tx(node_id, child_id, command_string, type_string, payload) else: log.info("[" + self.gateway_id + "][" + str(node_id) + "] received " + type_string) elif command_string == "STREAM": # handle stream messages return else: log.error("[" + self.gateway_id + "] Invalid command " + command_string) # handle messages for registered sensors if self.is_registered(node_id, child_id, command_string, type_string): # message for a registered sensor, retrieve the myHouse sensor sensor = self.nodes[node_id][child_id][command_string][type_string] # store the value for the sensor value = payload measures = [] measure = {} measure["key"] = sensor["sensor_id"] measure["value"] = utils.normalize( value, conf["constants"]["formats"][sensor["format"]]["formatter"]) measures.append(measure) sensors.store(sensor, measures)
def set(prop, val): sql = "update DetachmentContactPersons set %s = %s where DetachID = %s" params = (prop, val, self.ID) return db.set(sql, params)
def set(prop, val): sql = "update OfficeEmployeeTypes set %s=%s where ID = %s" params = (prop, val, self.ID) return db.set(sql, params)
def set(prop, val): sql = "update PersonalPayables set %s=%s where ID = %s" params = (prop, val, self.ID) return db.set(sql, params)
def cmd(): init() print(Fore.WHITE + '') c = input('Command>') kws = c.split() or [0] for i in kws: if i == '': c.remove(i) if kws[0] == 'export': try: f = File(kws[2], texts[kws[1]]) dt = f.set() dt.close() print(Fore.GREEN + 'Success! Text has been exported.') except IndexError: print(Fore.RED + 'Enter the arguments!') except KeyError: print(Fore.RED + 'No such key!') elif kws[0] == 'get': try: print(Fore.CYAN + texts[kws[1]]) except KeyError: print(Fore.RED + 'No such key! Try again!') except IndexError: for key in texts.keys(): print(Fore.CYAN + key) elif kws[0] == 'set': try: db.set(kws[1], kws[2]) texts[kws[1]] = kws[2] print(Fore.GREEN + 'Success! Variable has been saved.') except IndexError: print(Fore.RED + 'Enter the arguments!') elif kws[0] == 'del': try: db.dlt(kws[1]) texts.pop(kws[1]) print(Fore.GREEN + 'Success! Variable has been removed.') except IndexError: print(Fore.RED + 'Enter the argument!') except KeyError: print(Fore.RED + 'No such key! Try again!') elif kws[0] == 'exit': sys.exit() elif kws[0] == 'import': try: f = File(kws[2]) txt = f.get() db.dlt(kws[1]) texts[kws[1]] = '' for line in txt: texts[kws[1]] += line + '\n' db.set(kws[1], texts[kws[1]]) print(Fore.GREEN + 'Success! Text has been imported.') except IndexError: print(Fore.RED + 'Enter the arguments!') except FileNotFoundError: print(Fore.RED + 'No such file!') elif kws[0] == 'export-end': try: f = File(kws[2], texts[kws[1]]) f.add() print(Fore.GREEN + 'Success! Text has been exported to end of file.') except IndexError: print(Fore.RED + 'Enter the arguments!') elif kws[0] == 'help': try: comm = kws[1] except IndexError: comm = 'all' try: print(help[comm]) except KeyError: print(Fore.RED + 'No such command!') elif kws[0] == 'cls': os.system('cls') else: if kws[0] != 0: print(Fore.RED + 'Unknown command!') cmd()
def set(prop, val): sql = "update PayrollRecord set %s=%s where ID = %s" params = (prop, val, self.ID) return db.set(sql, params)
def set(prop, val): sql = "update UniformDeposits set %s=%s where ID = %s" params =(prop, val, self.ID) return db.set(sql,params)