def __init__(self, uid=None, client=None): super(justgood, self).__init__() self.uid = uid self.client = client self.flex = style.autobots() self.db = livejson.File('data/data.json', True, False, 4) self.key = livejson.File('data/key.json', True, False, 4) self.api = livejson.File('data/api.json', True, False, 4) self.host = f"https://{self.api['main']}" self.media = imjustgood(self.api["apikey"]) self.master = self.db['master'] self.join = False self.read = {"cctv": {}, "imgurl": {}}
def test_with_existing_file(self): """ Test that the with block won't clear data """ f = livejson.File(self.path) f["a"] = "b" with f: f["c"] = "d" self.assertIn("a", f)
def resolveSettingsFromToken(self, token): all_json_file = [fjs for fjs in os.listdir('.') if fjs.endswith('.json')] for c_file in all_json_file: data = livejson.File(c_file) if data.data.get('token', None) == token: return data return {}
async def rank(ctx): """ XP Ranking. """ with livejson.File('xp.json') as xp_list: _xp_list = {} for k in xp_list: _xp_list[k] = xp_list[k]['xp'] _u = [] for u in sorted(_xp_list, key=_xp_list.get, reverse=True): _u.append(u) xp_list[u]['rank'] = len(_u) if len(ctx.message.mentions) != 0: u = xp_list[ctx.message.mentions[0].id] u['lvl'] = math.floor(math.log((u['xp'] / 1000), 1.3)) + 1 if u['lvl'] < 1: u['lvl'] = 1 await rasis.say('{} is #{} at level {} with {}XP.'.format( ctx.message.mentions[0].display_name, xp_list[ctx.message.mentions[0].id]['rank'], xp_list[ctx.message.mentions[0].id]['lvl'], xp_list[ctx.message.mentions[0].id]['xp'])) else: u = xp_list[ctx.message.author.id] u['lvl'] = math.floor(math.log((u['xp'] / 1000), 1.3)) + 1 if u['lvl'] < 1: u['lvl'] = 1 await rasis.say('You are #{} at level {} with {}XP.'.format( xp_list[ctx.message.author.id]['rank'], xp_list[ctx.message.author.id]['lvl'], xp_list[ctx.message.author.id]['xp']))
def checkOnSavedCredential(self, token): all_json_file = [fjs for fjs in os.listdir('.') if fjs.endswith('.json')] for c_file in all_json_file: data = livejson.File(c_file) if data.data.get('token', None): return True return False
def test_fun_syntax(self): """ This is a fun bit of "syntactic sugar" enabled as a side effect of grouped writes. """ with livejson.File(self.path) as f: f["cats"] = "dogs" with open(self.path, "r") as fi: self.assertEqual(fi.read(), "{\"cats\": \"dogs\"}")
def test_basics(self): f = livejson.File(self.path) with f: f["a"] = "b" # Make sure that the write doesn't happen until we exit self.assertEqual(f.file_contents, "{}") self.assertEqual(f.file_contents, "{\"a\": \"b\"}")
def test_switchclass(self): """ Test the switching of classes in the middle of a grouped write """ f = livejson.File(self.path) with f: self.assertIsInstance(f, livejson.DictFile) f.set_data([]) self.assertIsInstance(f, livejson.ListFile) self.assertEqual(f.file_contents, "{}") self.assertEqual(f.file_contents, "[]")
def test_errors(self): """ Test the errors that are thrown """ f = livejson.File(self.path) f["data"] = {} # Test that storing non-string keys in a nested dict throws an error with self.assertRaises(TypeError): f["data"][True] = "test" # Test that storing numeric keys raises an additional error message with self.assertRaisesRegexp(TypeError, "Try using a"): f["data"][0] = "abc"
def test_multilevel_nesting(self): """ Test that you can nest stuff inside nested stuff :O """ f = livejson.File(self.path) f["stored_data"] = [] f["stored_data"].append({}) f["stored_data"][0]["colors"] = ["green", "purple"] self.assertEqual(f.data, {"stored_data": [{ "colors": ["green", "purple"] }]})
def test_rollback(self): """ Test that data can be restored in the case of an error to prevent corruption (see #3)""" class Test(object): pass f = livejson.File(self.path) f["a"] = "b" with self.assertRaises(TypeError): f["test"] = Test() self.assertEqual(f.data, {"a": "b"})
def test_misc_methods(self): f = livejson.File(self.path) f["stored_data"] = [{"colors": ["green"]}] # Test that normal __getitem__ still works self.assertEqual(f["stored_data"][0]["colors"][0], "green") # Test deleting values f["stored_data"][0]["colors"].pop(0) self.assertEqual(len(f["stored_data"][0]["colors"]), 0) # Test __iter__ on nested dict f["stored_data"] = {"a": "b", "c": "d"} self.assertEqual(list(f["stored_data"]), list(f["stored_data"].keys()))
async def on_message(m): await rasis.process_commands(m) with livejson.File('xp.json') as xp_list: if m.author.id not in xp_list: xp_list[m.author.id] = {} xp_list[m.author.id]['xp'] = 0 xp_list[m.author.id]['lvl'] = 1 xp_list[m.author.id]['xp'] += len(m.content) + 7 if len(m.content) > 1200: xp_list[m.author.id]['xp'] -= (len(m.content) + 9) xp_list[m.author.id]['name'] = m.author.display_name
def test_misc(self): """ Test miscellaneous other things that seem like they might break with a grouped write """ f = livejson.File(self.path) # Test is_caching, and test that data works with the cache self.assertEqual(f.is_caching, False) with f: self.assertEqual(f.is_caching, True) f["a"] = "b" # Test that data reflects the cache self.assertEqual(f.data, {"a": "b"}) self.assertEqual(f.is_caching, False)
def test_empty_file(self): """ Test that a File can be initialized in a completely empty, but existing, file """ # Dict files with open(self.path, "w") as fi: fi.write("") f = livejson.File(self.path) self.assertEqual(f.data, {}) # List files with open(self.path, "w") as fi: fi.write("") f = livejson.ListFile(self.path) self.assertEqual(f.data, [])
def test_switchclass(self): """ Test that it can automatically switch classes """ # Test switching under normal usage f = livejson.File(self.path) self.assertIsInstance(f, livejson.DictFile) f.set_data([]) self.assertIsInstance(f, livejson.ListFile) # Test switching when the file is manually changed with open(self.path, "w") as fi: fi.write("{}") # This shouldn't error, it should change types when you do this f["dogs"] = "cats" self.assertIsInstance(f, livejson.DictFile)
def test_special_stuff(self): """ Test all the not-strictly-necessary extra API that I added """ f = livejson.File(self.path) f["a"] = "b" # Test 'data' (get a vanilla dict object) self.assertEqual(f.data, {"a": "b"}) # Test file_contents self.assertEqual(f.file_contents, "{\"a\": \"b\"}") # Test __str__ and __repr__ self.assertEqual(str(f), str(f.data)) self.assertEqual(repr(f), repr(f.data)) # Test __iter__ self.assertEqual(list(f), list(f.keys())) # Test remove() f.remove() self.assertFalse(os.path.exists(self.path))
def test_errors(self): """ Test the errors that are set up """ f = livejson.File(self.path) # Test error for trying to initialize in non-existant directories self.assertRaises(IOError, livejson.File, "a/b/c.py") # Test error when trying to store non-string keys with self.assertRaises(TypeError): f[True] = "test" # Test that storing numeric keys raises a more helpful error message with self.assertRaisesRegexp(TypeError, "Try using a"): f[0] = "abc" # When initializing using with_data, test that an error is thrown if # the file already exists with self.assertRaises(ValueError): livejson.File.with_data(self.path, {})
def test_ListFile(self): """ Test that Files in which the base object is an array work """ # Create the JSON file. f = livejson.ListFile(self.path) self.assertEqual(f.data, []) # Test append, extend, and insert f.append("dogs") f.extend(["cats", "penguins"]) f.insert(0, "turtles") self.assertIsInstance(f.data, list) self.assertEqual(f.data, ["turtles", "dogs", "cats", "penguins"]) # Test clear f.clear() self.assertEqual(len(f), 0) # Test creating a new ListFile automatically when file is an Array f2 = livejson.File(self.path) self.assertIsInstance(f2, livejson.ListFile)
def test_json_formatting(self): """ Test the extra JSON formatting options """ # Test pretty formatting f = livejson.File(self.path, pretty=True) f["a"] = "b" self.assertEqual(f.file_contents, '{\n "a": "b"\n}') f.indent = 4 f.set_data(f.data) # Force an update self.assertEqual(f.file_contents, '{\n "a": "b"\n}') # Test sorting of keys f["b"] = "c" f["d"] = "e" f["c"] = "d" self.assertTrue( f.file_contents.find("a") < f.file_contents.find("b") < f.file_contents.find("c") < f.file_contents.find("d"))
async def set(self, ctx, *, zone: str): """Set a user's own time zone. You can get a list of your timezones from ;;time zones <code>. Usage: ;;time set America/New_York >>> Timezone set as America/New_York.""" try: if len(pytz.timezone(zone).zone) < 6: await self.rasis.say( 'Timezone set as ' + pytz.timezone(zone).zone + '. But be warned — using abbreviated timezones (like \'EST\') doesn\'t account for Daylight Savings Time. See `;;time zones <code>` for a more accurate time.' ) await self.rasis.say('Timezone set as ' + pytz.timezone(zone).zone + '.') with livejson.File('times.json') as times: times[ctx.message.author.id] = zone except Exception as e: await self.rasis.say( 'Ouch.\n{}: {}\nTry running `;;time zones <code>` first to get a list of acceptable timezone names.' .format(type(e).__name__, e))
def test_DictFile(self): """ Test that 'livejson.File's in which the base object is a dict work as expected. This also tests all the methods shared between both types. """ # Test that a blank JSON file can be properly created f = livejson.File(self.path) self.assertIsInstance(f, livejson.DictFile) # Test DictFile is default self.assertTrue(os.path.exists(self.path)) with open(self.path, "r") as fi: self.assertEqual(fi.read(), "{}") # Test writing to a file f["a"] = "b" # Test reading values from an existing file newInstance = livejson.DictFile(self.path).data # Tests explicit type self.assertEqual(newInstance["a"], "b") # Test deleting values f["c"] = "d" self.assertIn("c", f) # This also conviently tests __contains__ del f["c"] self.assertNotIn("c", f)
def __init__(self, fileName, client, app, uid): super(commands, self).__init__() self.fileName = fileName self.client = client self.app = app self.uid = uid self.db = livejson.File("database/%s.json"%fileName, True, True, 4) self.master = ["YOUR_MID"] self.invites = [] self.settings = { "protect": {}, "namelock": {}, "linkprotect": {}, "denyinvite": {}, "autopurge": False, "allowban": True, "sqmode": False, "rname": fileName, "sname": "default" } if not "settings" in self.db: self.db['settings'] = self.settings self.settings = self.db["settings"] for oup in self.master: client.sendMessage(oup,"I'm just created.\nMy uid: %s"%uid) else: self.settings = self.db["settings"] self.stats = { "owners": [], "admins": [], "staffs": [], "bots": [], "antijs": [], "banned": [] } if not "stats" in self.db: self.db['stats'] = self.stats self.stats = self.db["stats"] else: self.stats = self.db["stats"]
async def xp(ctx): """ Experience system description. TODO: Don't forget to write this. """ with livejson.File('xp.json') as xp_list: if len(ctx.message.mentions) != 0: u = xp_list[ctx.message.mentions[0].id] u['lvl'] = math.floor(math.log((u['xp'] / 1000), 1.3)) + 1 if u['lvl'] < 1: u['lvl'] = 1 await rasis.say( '{} is level {} with {}XP. They have {}XP to go before the next level.' .format(ctx.message.mentions[0].display_name, u['lvl'], u['xp'], _nextXP(u['xp'], u['lvl']))) else: u = xp_list[ctx.message.author.id] u['lvl'] = math.floor(math.log((u['xp'] / 1000), 1.3)) + 1 if u['lvl'] < 1: u['lvl'] = 1 await rasis.say( 'You are level {} with {}XP. You have {}XP to go before the next level.' .format(u['lvl'], u['xp'], _nextXP(u['xp'], u['lvl'])))
async def top(ctx): """ XP Leaderboards. """ with livejson.File('xp.json') as xp_list: _xp_list = {} for k in xp_list: _xp_list[k] = xp_list[k]['xp'] _u = [] for u in sorted(_xp_list, key=_xp_list.get, reverse=True): _u.append(u) xp_list[u]['rank'] = len(_u) m = 'Top 10 users:' i = 1 for u in _u[0:10]: xp_list[u]['lvl'] = math.floor( math.log((xp_list[u]['xp'] / 1000), 1.3)) + 1 if xp_list[u]['lvl'] < 1: xp_list[u]['lvl'] = 1 m += '\n#{}: {} at level {} with {}XP.'.format( i, xp_list[u]['name'], xp_list[u]['lvl'], xp_list[u]['xp']) i += 1 # x = None await rasis.say(m)
import os import livejson localdir = os.path.dirname(os.path.abspath(__file__)) dbpath = os.path.join(localdir, "users.json") students = livejson.File(dbpath, pretty=True, sort_keys=True)
# -*- coding: utf-8 -*- from linepy import * from akad.ttypes import OpType, Message, TalkException from threading import Thread import os, livejson, traceback, time, sys from data import commands OT = OpType fileName = os.path.splitext(os.path.basename(__file__))[0] db = livejson.File("token/%s.json" % fileName) if ":" in db['token']: app = "ANDROIDLITE\t2.11.1\tAndroid OS\t5.1.1" else: app = "DESKTOPWIN\t5.21.3\tWindows\t10" try: client = LINE(idOrAuthToken=db["token"], appName=app) except: e = traceback.format_exc() if "code=20" in e: print("FREEZING") time.sleep(3600) python3 = sys.executable os.execl(python3, python3, *sys.argv) elif "code=8" in e or "code=7" in e: client = LINE(db["mail"], db["pass"], certificate='{}.crt'.format(db["mail"]), appName=app) db['token'] = client.authToken
def main(): """ The main function for Murakami.""" parser = configargparse.ArgParser( auto_env_var_prefix="murakami_settings_", config_file_parser_class=TomlConfigFileParser, default_config_files=defaults.CONFIG_FILES, description="The Murakami network test runner.", ignore_unknown_config_file_keys=False, ) parser.add( "-c", "--config", is_config_file=True, required=False, help="TOML configuration file path.", ) parser.add( "-d", "--dynamic-state", default=defaults.DYNAMIC_FILE, dest="dynamic", help= "Path to dynamic configuration store, used to override settings via Webthings (default:" + defaults.DYNAMIC_FILE + ").", ) parser.add( "-p", "--port", type=int, default=defaults.HTTP_PORT, help="The port to listen on for incoming connections (default: 80).", ) parser.add("-n", "--hostname", help="The mDNS hostname for WebThings (default: automatic).") parser.add( "-s", "--ssl-options", nargs="?", dest="ssl_options", help="SSL options for the WebThings server (default: none).", ) parser.add( "-r", "--additional-routes", nargs="?", dest="additional_routes", help="Additional routes for the WebThings server (default: none).", ) parser.add( "-b", "--base-path", default="", dest="base_path", help="Base URL path to use, rather than '/' (default: '').", ) parser.add( "-l", "--loglevel", dest="loglevel", default="DEBUG", choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"], help="Set the logging level", ) parser.add( "-t", "--tests-per-day", dest="tests_per_day", type=int, default=defaults.TESTS_PER_DAY, help="Set the number of tests per day.", ) parser.add( "-i", "--immediate", action="store_true", dest="immediate", default=False, help="Immediately run available tests on startup.", ) parser.add( "-w", "--webthings", action="store_true", dest="webthings", default=False, help="Enable webthings support.", ) parser.add( "--location", default=None, dest="location", help="Physical place Murakami node is located (default: '').", ) parser.add( "--network-type", default=None, dest="network_type", help="Site associated with this Murakami node (default: '').", ) parser.add( "--connection-type", default=None, dest="connection_type", help="Connection associated with this node (default: '').", ) parser.add( "--device-id", default=default_device_id(), dest="device_id", help="Unique identifier for the current Murakami device (default: '').", ) settings = parser.parse_args() print(settings) logging.basicConfig( level=settings.loglevel, format="%(asctime)s %(filename)s:%(lineno)s %(levelname)s %(message)s", ) # Merge the content of the TOML config file with the environment variables. # If no configuration file has been parsed at this point, just use env. global config config_from_env = load_env() if config: config = {**config, **config_from_env} else: config = config_from_env if settings.webthings: state = livejson.File(settings.dynamic, pretty=True) config = ChainMap(state, config) server = MurakamiServer( port=settings.port, hostname=settings.hostname, ssl_options=settings.ssl_options, additional_routes=settings.additional_routes, base_path=settings.base_path, tests_per_day=settings.tests_per_day, immediate=settings.immediate, webthings=settings.webthings, location=settings.location, network_type=settings.network_type, connection_type=settings.connection_type, device_id=settings.device_id, config=config, ) # reload server on HUP and TERM signal signal.signal(signal.SIGHUP, server.reload) signal.signal(signal.SIGTERM, server.reload) try: server.start() except KeyboardInterrupt: server.stop()
from akad.ttypes import * import json, requests, livejson, random db = livejson.File('data/data.json') def loggedIn(func): def checkLogin(*args, **kwargs): if args[0].isLogin: return func(*args, **kwargs) else: args[0].callback.other( 'You want to call the function, you must login to LINE') return checkLogin class Liff(object): isLogin = False def __init__(self): self.isLogin = True @loggedIn def allowFlex(self): data = {'on': ['P', 'CM'], 'off': []} headers = { 'X-Line-Access': self.authToken, 'X-Line-Application': self.server.APP_NAME, 'X-Line-ChannelId': self.server.CHANNEL_ID['THE_AUTOBOTS_CORP'], 'Content-Type': 'application/json'
# uncompyle6 version 3.4.1 # Python bytecode 3.7 (3394) # Decompiled from: Python 2.7.16 (default, Jul 28 2019, 22:06:57) # [GCC 4.2.1 Compatible Android (5220042 based on r346389c) Clang 8.0.7 (https:// # Embedded file name: loginme.py # Size of source mod 2**32: 8278 bytes from MIGHTAPI.MIGHTLY import LINE, OEPoll from Naked.toolshed.shell import execute_js import multiprocessing from akad.ttypes import TalkException as TalkE from multiprocessing import Process from akad.ttypes import TalkException import livejson, traceback, sys app = 'WIN10\t5.9.0\tSpamJS\t12' appJS = 'DESKTOPMAC\t5.11.1\tSpamJS\t12' settings = livejson.File('settings_f.json') proc = [] if 'token' not in settings: settings['token'] = '#' if 'contact' not in settings: settings['contact'] = [] if 'name' not in settings: settings[ 'name'] = u'@\u026a\u0274\u1d20\u026a\u1d1b\u1d07\u1d07:\u1786\u17b6\u0e38\u0c10\u09a3\u0beb\u09a3\u0c10' exg = LINE(appName=appJS) cPoll = OEPoll(exg) set = {'get': False, 'remove': False} def runadd(uid): return execute_js(f"spamadd.js uid={uid}")