class DummyRoom: def __init__(self, id, message, exits): self.id = id self.is_outside = False self.set_message(message) # exits is a dict exits["east"] = "A" self.exits = exits self.encoder = JSONEncoder(indent=4) def __repr__(self): return "Room %s Exits %s" % (self.id, str(self.exits)) def set_message(self, message): # Assign message self.message = message # Mark as outside if that's the case if ("location" in message) and (message["location"] == "outside the castle"): self.is_outside = True def get_message(self): return self.message def print_message(self): print self.encoder.encode(self.message) def get_exit_room_id(self, direction): return self.exits[direction]
def __call__(self): country = self.request.get('country') required = self.request.get('required','').lower().strip() == 'true' utility = component.getUtility(ICountriesStates) jsonEncoder = JSONEncoder() return jsonEncoder.encode(utility.states(country=country, allow_no_values=not required))
def test_command_parser(self): parser = search_command_internals.SearchCommandParser() encoder = JSONEncoder() file_path = os.path.abspath(os.path.join(TestSearchCommandInternals._package_path, 'data', 'input', '_empty.csv')) options = [ 'boolean=true', 'duration=00:00:10', 'fieldname=word_count', 'file=%s' % encoder.encode(file_path), 'integer=10', 'optionname=foo_bar', 'regularexpression="\\\\w+"', 'set=foo'] fields = ['field_1', 'field_2', 'field_3'] command = StubbedStreamingCommand() # All options except for the builtin options are required parser.parse(options + fields, command) command_line = str(command) self.assertEqual( 'stubbedstreaming boolean=true duration=10 fieldname="word_count" file=%s integer=10 optionname="foo_bar" regularexpression="\\\\w+" set="foo" field_1 field_2 field_3' % encoder.encode(file_path), command_line) for option in options: self.assertRaises(ValueError, parser.parse, [x for x in options if x != option] + ['field_1', 'field_2', 'field_3'], command) command = StubbedReportingCommand() # No options are required parser.parse(options + fields, command) for option in options: try: parser.parse([x for x in options if x != option] + ['field_1', 'field_2', 'field_3'], command) except Exception as e: self.assertFalse("Unexpected exception: %s" % e) try: parser.parse(options, command) except Exception as e: self.assertFalse("Unexpected exception: %s" % e) for option in command.options.itervalues(): if option.name in ['show_configuration', 'logging_configuration', 'logging_level']: continue self.assertTrue(option.is_set) self.assertEqual(len(command.fieldnames), 0) try: parser.parse(fields, command) except Exception as e: self.assertFalse("Unexpected exception: %s" % e) for option in command.options.itervalues(): self.assertFalse(option.is_set) self.assertListEqual(fields, command.fieldnames) return
def generate_link_from_actionlist(self, actionlist): """ Generates an ajaxlink from an actionlist. ajaxlinks look like: javascript:SkdAjax.load_link([{'w':<widget_id>,'p':{params}}, ... ]) the corresponding js on clientside should generate something like: /ajax/{'w':<widget_id>,'p':{params}} """ link = "javascript:window.SkdAJAX.execute_action(%s);" linkjson = [] for action in actionlist.get_actions(): if action.get_url() is not None: return action.get_url() elif action.get_view_id() is not None: view = View.get_from_id(action.get_view_id()) name = view.get_name() return "/web/"+quote(name) elif action.get_space() is not None and action.get_widget_id() is not None: page = Page.get_page(self.get_page()) space_names = page.get_space_names() space_name = space_names[action.get_space()] linkjson.append({"w":action.get_widget_id(), "s":space_name, "p":{}}) encoder = JSONEncoder() ajaxdata = encoder.encode(linkjson) ajaxdata = ajaxdata.replace('"',"'") return link%ajaxdata
def __init__(self, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, sort_keys=False, indent=None, separators=None, encoding='utf-8', default=None): JSONEncoder.__init__(self, skipkeys=skipkeys, ensure_ascii=ensure_ascii, check_circular=check_circular, allow_nan=allow_nan, sort_keys=sort_keys, indent=indent, separators=separators, encoding=encoding, default=default) if self.default is not None: self.old_default = self.default def new_default(o): if isinstance(o, Decimal): return str(o) return self.old_default(o) else: def new_default(o): raise TypeError(repr(o) + " is not JSON serializable") self.default = new_default
def get_tree_json(cls, story, user): chapters = Chapter.objects.filter(story=story) if user.is_authenticated(): chapters.select_related('readers', 'bookmarkers', 'likers') nodes = [] for c in chapters: parent_pk = c.parent.pk if c.parent else None url = reverse('read_story', kwargs={'pk': story.pk}) url += '?chapter-pk={}'.format(c.pk) node = {'pk': c.pk, 'parent': parent_pk, 'title': c.headline[:20], 'children': [], 'url': url} for n in nodes: if n['parent'] == c.pk: node['children'].append(n) elif parent_pk == n['pk']: n['children'].append(node) nodes.append(node) # TODO: pick color in template if user in c.bookmarkers.all(): node['color'] = 'blue' elif user in c.likers.all(): node['color'] = 'red' elif user in c.readers.all(): node['color'] = 'green' else: node['color'] = 'grey' if not parent_pk: root = [node] encoder = JSONEncoder() return encoder.encode(root)
def _createPileupConfigFile(self, helper, fakeSites=None): """ Stores pileup JSON configuration file in the working directory / sandbox. """ if fakeSites is None: fakeSites = [] if self._isCacheValid(helper): # we need to update the new sandbox json file in case TrustPUSitelists is on if fakeSites: self._updatePileupPNNs(helper, fakeSites) # if file already exist don't make a new dbs call and overwrite the file. # just return return encoder = JSONEncoder() # this should have been set in CMSSWStepHelper along with # the pileup configuration url = helper.data.dbsUrl dbsReader = DBSReader(url) configDict = self._queryDbsAndGetPileupConfig(helper, dbsReader, fakeSites) # create JSON and save into a file jsonPU = encoder.encode(configDict) self._saveFile(helper, jsonPU)
def output_geojson_bzipped(index, routes): ''' ''' try: ids = [id for (id, t, g) in routes] geometries = [cascaded_union(geoms) for (i, t, geoms) in routes] geometries = [geom.__geo_interface__ for geom in geometries if geom] properties = [tags for (i, tags, g) in routes] features = [dict(type='Feature', id=id, properties=p, geometry=g) for (id, p, g) in zip(ids, properties, geometries)] geojson = dict(type='FeatureCollection', features=features) encoder = JSONEncoder(separators=(',', ':')) encoded = encoder.iterencode(geojson) output = BZ2File('routes-%06d.json.bz2' % index, 'w') for token in encoded: if charfloat_pat.match(token): # in python 2.7, we see a character followed by a float literal output.write(token[0] + '%.6f' % float(token[1:])) elif float_pat.match(token): # in python 2.6, we see a simple float literal output.write('%.6f' % float(token)) else: output.write(token) output.close() except Exception, e: return index, len(routes), e
def generate_link_from_actionlist(self,actionlist): """ returns a link that describes a call to a view that results of the actionlist """ target = {} target['s'] = self.get_page() target['v'] = deepcopy(self.get_space_widget_mapping()) target['b'] = deepcopy(self.get_box_mapping()) target['c'] = deepcopy(self.get_widget_param_mapping()) for action in actionlist.get_actions(): if action.get_url() is not None: return action.get_url() elif action.get_view_id() is not None: view = actionlist._core.get_view_manager().get_from_id(action.get_view_id()) name = view.get_name() return "/web/"+quote(name) elif action.get_space() is not None and action.get_widget_id() is not None: target['v'][action.get_space()] = action.get_widget_id() #delete any parameters of this widget. otherwise link will only #load current state of that widget again if target['c'].has_key(action.get_widget_id()): del(target['c'][action.get_widget_id()]) encoder = JSONEncoder() viewjsonstring = quote(encoder.encode(target)) view_manager = self._core.get_view_manager() checkview = view_manager.get_from_json(viewjsonstring) existing_name = checkview.check_has_name() if existing_name == False: return "/web/?"+viewjsonstring else: return "/web/"+existing_name
def generate_link_from_dict(self, dct): """ Creates a link by analyzing a view-dictionary and merging it to this view The incoming dictionary can be thought of as a diff that is added to the existing view. Does not affect the View itself. """ target = {} target['s'] = self.get_page() target['v'] = deepcopy(self.get_space_widget_mapping()) target['b'] = deepcopy(self.get_box_mapping()) target['c'] = deepcopy(self.get_widget_param_mapping()) if dct.has_key('s'): target['s'] = dct['s'] if dct.has_key('v'): target['v'].update(dct['v']) if dct.has_key('b'): target['b'].update(dct['b']) if dct.has_key('c'): for widget_id in dct['c'].keys(): target['c'][widget_id] = dct['c'][widget_id] if dct.has_key('p'): target['p'] = dct['p'] encoder = JSONEncoder() viewjsonstring = quote(encoder.encode(target)) view_manager = self._core.get_view_manager() checkview = view_manager.get_from_json(viewjsonstring) existing_name = checkview.check_has_name() if existing_name == False: return "/web/?"+viewjsonstring else: return "/web/"+existing_name
def output_geojson_bzipped(index, streets): ''' ''' try: ids = [str(uuid1()) for (n, k, h, g) in streets] geometries = [geom.__geo_interface__ for (n, k, h, geom) in streets] properties = [dict(name=short_street_name(n), long_name=n, kind=k, highway=h) for (n, k, h, g) in streets] features = [dict(type='Feature', id=id, properties=p, geometry=g) for (id, p, g) in zip(ids, properties, geometries)] geojson = dict(type='FeatureCollection', features=features) encoder = JSONEncoder(separators=(',', ':')) encoded = encoder.iterencode(geojson) output = BZ2File('streets-%06d.json.bz2' % index, 'w') for token in encoded: if charfloat_pat.match(token): # in python 2.7, we see a character followed by a float literal output.write(token[0] + '%.6f' % float(token[1:])) elif float_pat.match(token): # in python 2.6, we see a simple float literal output.write('%.6f' % float(token)) else: output.write(token) output.close() except Exception, e: return index, len(streets), e
def json_encode(data, pretty=False): ''' Encode stream of JSON with 7-digits floating point precision. ''' if pretty: separators = (', ', ': ') indent = 2 else: separators = (',', ':') indent = None encoder = JSONEncoder(indent=indent, separators=separators) encoded = encoder.iterencode(data) output = StringIO() for token in encoded: if charfloat_pat.match(token): # in python 2.7, we see a character followed by a float literal output.write(token[0] + '%.7f' % float(token[1:])) elif float_pat.match(token): # in python 2.6, we see a simple float literal output.write('%.7f' % float(token)) else: output.write(token) return output.getvalue()
def reptree(node, charset = 'utf-8'): from json import JSONEncoder enc = JSONEncoder(encoding = charset) if node.symbol is not None: return enc.encode(node.symbol) else: return "c()?" + reptree(node.right) + ":" + reptree(node.left)
def encode(self, data): """ encode data as json """ encoder = JSONEncoder() thunker = JSONThunker() thunked = thunker.thunk(data) return encoder.encode(thunked)
def inform_sns(arns: list, message: str, region): jsonizer = JSONEncoder() sns_topics = set(arns) sns = BotoClientProxy('sns', region_name=region) for sns_topic in sns_topics: sns.publish(TopicArn=sns_topic, Subject="SenzaTrafficRedirect", Message=jsonizer.encode(message))
def test_command_parser(self): from splunklib.searchcommands.search_command_internals import \ SearchCommandParser parser = SearchCommandParser() encoder = JSONEncoder() file_path = TestSearchCommandsApp._data_file(os.path.join('input', 'counts.csv')) options = [ 'boolean=true', 'duration=00:00:10', 'fieldname=word_count', 'file=%s' % encoder.encode(file_path), 'integer=10', 'optionname=foo_bar', 'regularexpression="\\\\w+"', 'set=foo'] fields = ['field_1', 'field_2', 'field_3'] command = StubbedStreamingCommand() # All options are required parser.parse(options + fields, command) command_line = str(command) self.assertEqual( 'stubbedstreaming boolean=true duration=10 fieldname="word_count" file=%s integer=10 optionname="foo_bar" regularexpression="\\\\w+" set="foo" field_1 field_2 field_3' % encoder.encode(file_path), command_line) for option in options: self.assertRaises(ValueError, parser.parse, [x for x in options if x != option] + ['field_1', 'field_2', 'field_3'], command) command = StubbedReportingCommand() # No options are required parser.parse(options + fields, command) for option in options: try: parser.parse([x for x in options if x != option] + ['field_1', 'field_2', 'field_3'], command) except Exception as e: self.assertFalse("Unexpected exception: %s" % e) try: parser.parse(options, command) except Exception as e: self.assertFalse("Unexpected exception: %s" % e) for option in command.options.itervalues(): self.assertTrue(option.is_set) self.assertEqual(len(command.fieldnames), 0) try: parser.parse(fields, command) except Exception as e: self.assertFalse("Unexpected exception: %s" % e) for option in command.options.itervalues(): self.assertFalse(option.is_set) self.assertListEqual(fields, command.fieldnames) return
def __init__(self, uid, name, command, tool_args, *args, **kwargs): self.uid = uid self.name = name self.command = command self.tool_args = [] if isinstance(tool_args, list): for arg in tool_args: self.tool_args.append(arg) self.kwargs = kwargs JSONEncoder.__init__(self, Tool)
def __init__(self, arch, os, sysroot): """ :type arch: str :type os: str :type sysroot: str """ self.arch = arch self.os = os self.sysroot = sysroot JSONEncoder.__init__(self, Host)
def __init__(self, value): # Ensure the value is a serializable by trying to encode it. # WARNING: The value may change before it is encoded again, and may # not be encodable after the change. try: e = JSONEncoder() e.encode(value) except TypeError: raise self.value = value
def __call__(self, instructions): jd = JSONDecoder() je = JSONEncoder() answer = {} try: instructions = jd.decode(instructions) except ValueError, e: answer['error'] = "could not decode instructions" self._core.response_body.append(je.encode(answer)) return
def wrap2dasjson(data): """DAS JSON wrapper""" encoder = JSONEncoder() cherrypy.response.headers['Content-Type'] = "text/json" try: jsondata = encoder.encode(data) return jsondata except: return dict(error="Failed to JSONtify obj '%s' type '%s'" \ % (data, type(data)))
class JsonFormatter(Formatter): def begin(self, fields): self.encoder = JSONEncoder(indent=2, separators=(', ', ': ')) def iterout(self, iterable): self.begin([]) for chunk in self.encoder.iterencode(list(iterable)): self.fp.write(chunk) def out(self, d): self.fp.write(self.encoder.encode(d))
def wrapper (self, *args, **kwds): """Decorator wrapper""" encoder = JSONEncoder() data = func(self, *args, **kwds) cherrypy.response.headers['Content-Type'] = "text/json" try: jsondata = encoder.encode(data) return jsondata except: Exception("Failed to JSONtify obj '%s' type '%s'" \ % (data, type(data)))
def encode(self, obj): if isinstance(obj, TwitterMention): obj_as_dict = dict(ID=obj.id, FROM_ID=obj.from_id, FROM_SCREEN_NAME=str(obj.from_screen_name), MESSAGE_TEXT=str(obj.message_text)) return JSONEncoder.encode(self, obj_as_dict) return JSONEncoder.encode(self, obj)
def default(self, o): if isinstance(o, AudioQuantum): return self.map_instance_variables(o, "start", "duration", "confidence") elif isinstance(o, AudioAnalysis): return self.map_instance_variables(o, *AUDIO_ANALYSIS_KEYS) elif isinstance(o, Track) or isinstance(o, audio.LocalAudioFile): # Don't know what should be serialized return {} else: JSONEncoder.default(self, o)
class RestUtils(object): def __init__(self): """Initialization method """ self.api_url = SERVER_ROOT print "Initialized API REST Utils" self.encoder = JSONEncoder() self.initialize_logging() def initialize_logging(self): self.logger = logging.getLogger('forum') hdlr = logging.FileHandler('forum_testing.log') formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') hdlr.setFormatter(formatter) self.logger.addHandler(hdlr) self.logger.setLevel(logging.DEBUG) def _call_api(self, pattern, method, body=None, headers=HEADERS, payload=None, auth=None, **kwargs): """Launch HTTP request to Policy Manager API with given arguments :param pattern: string pattern of API url with keyword arguments (format string syntax) :param method: HTTP method to execute (string) :param body: JSON body content (dict) :param headers: HTTP header request (dict) :param payload: Query parameters for the URL :param **kwargs: URL parameters (without url_root) to fill the patters :returns: REST API response """ kwargs['url_root'] = self.api_url url = pattern.format(**kwargs) self.logger.info('NEW REQUEST TO SEND') self.logger.info('\nMETHOD: {}\nURL: {} \nHEADERS: {} \nBODY: {}'.format(method, url, headers, self.encoder.encode(body))) try: r = requests.request(method=method, url=url, data=self.encoder.encode(body), headers=headers, params=payload, auth=auth) except Exception, e: print "Request {} to {} crashed: {}".format(method, url, str(e)) return None return r
def default(self, obj): if isinstance(obj, QuerySet): # `default` must return a python serializable # structure, the easiest way is to load the JSON # string produced by `serialize` and return it return loads(serialize('json', obj)) return JSONEncoder.default(self, obj)
def default(self, obj): """ Returns instance of a `dict` from an ndb object. :param value: Value to get dictionary for. If not encodable, will call the superclasses default method. """ if isinstance(obj, query.Query): return list(obj) elif isinstance(obj, ndb.Cursor): return obj.urlsafe() elif isinstance(obj, ndb.BlobKey): return str(obj) elif isinstance(obj, ndb.Key): return obj.urlsafe() elif hasattr(obj, 'to_dict'): return getattr(obj, 'to_dict')( includes=self.__includes, excludes=self.__excludes) elif isinstance(obj, datetime): return to_epoch(obj) elif isinstance(obj, date): return to_epoch(datetime(obj.year, obj.month, obj.day)) return JSONEncoder.default(self, obj)
def default(self, obj): if isinstance(obj, datetime): return obj.isoformat() return JSONEncoder.default(self, obj) # Error handlers #app.error_handlers[404] = handle_404 #app.error_handlers[500] = handle_500
def default(self, obj): if isinstance(obj, PRecord): result = dict(obj) result[_CLASS_MARKER] = obj.__class__.__name__ return result elif isinstance(obj, PClass): result = obj.evolver().data result[_CLASS_MARKER] = obj.__class__.__name__ return result elif isinstance(obj, PMap): return {_CLASS_MARKER: u"PMap", u"values": dict(obj).items()} elif isinstance(obj, (PSet, PVector, set)): return list(obj) elif isinstance(obj, FilePath): return {_CLASS_MARKER: u"FilePath", u"path": obj.path.decode("utf-8")} elif isinstance(obj, UUID): return {_CLASS_MARKER: u"UUID", "hex": unicode(obj)} elif isinstance(obj, datetime): if obj.tzinfo is None: raise ValueError( "Datetime without a timezone: {}".format(obj)) return {_CLASS_MARKER: u"datetime", "seconds": timegm(obj.utctimetuple())} return JSONEncoder.default(self, obj)
def default(self, obj): if isinstance(obj, WarningList): return obj.to_dict() return JSONEncoder.default(self, obj)
def default(self, obj): nonlocal Decimal, JSONEncoder if isinstance(obj, Decimal): return str(obj) return JSONEncoder.default(self, obj)
def default(self, obj): if isinstance( obj, (list, dict, str, unicode, int, float, bool, type(None))): return JSONEncoder.default(self, obj) return {'_python_object': pickle.dumps(obj)}
class CTRex(object): def __init__(self): self.status = TRexStatus.Idle self.verbose_status = 'TRex is Idle' self.errcode = None self.session = None self.zmq_monitor = None self.zmq_dump = None self.seq = None self.expect_trex = threading.Event() self.encoder = JSONEncoder() def get_status(self): return self.status def set_status(self, new_status): self.status = new_status def get_verbose_status(self): return self.verbose_status def set_verbose_status(self, new_status): self.verbose_status = new_status def gen_seq(self): self.seq = randrange(1, 1000) def get_seq(self): return self.seq def get_running_info(self): if self.status == TRexStatus.Running: return self.encoder.encode(self.zmq_dump) else: logger.info( "TRex isn't running. Running information isn't available.") if self.status == TRexStatus.Idle: if self.errcode is not None: # some error occured logger.info( "TRex is in Idle state, with errors. returning fault") return Fault( self.errcode, self.verbose_status ) # raise at client relevant exception, depending on the reason the error occured else: logger.info( "TRex is in Idle state, no errors. returning {}") return u'{}' return Fault( -12, self.verbose_status ) # raise at client TRexWarning, indicating TRex is back to Idle state or still in Starting state def stop_trex(self): if self.status == TRexStatus.Idle: # t-rex isn't running, nothing to abort logger.info("TRex isn't running. No need to stop anything.") if self.errcode is not None: # some error occurred, notify client despite TRex already stopped return Fault( self.errcode, self.verbose_status ) # raise at client relevant exception, depending on the reason the error occured return False else: # handle stopping t-rex's run self.session.join() logger.info("TRex session has been successfully aborted.") return True def start_trex(self, trex_launch_path, trex_cmd): self.set_status(TRexStatus.Starting) logger.info("TRex running state changed to 'Starting'.") self.set_verbose_status('TRex is starting (data is not available yet)') self.errcode = None self.session = AsynchronousTRexSession(self, trex_launch_path, trex_cmd) self.session.start() self.expect_trex.set()
number += 256**i * num return number os.chdir("MinecraftTextures/TEXTURES") fs = os.listdir() dic = [] for f in fs: image = Image.open(f) pixels = [] if ("CORAL" in f): continue try: if (len(list(image.getpixel((0, 0)))) != 4): continue except (TypeError): continue for x in range(image.width): for y in range(image.height): g = list(image.getpixel((x, y)))[:-1] pixels.append(float(conv256to10(g))) d = {} d["id"] = f[:-4] d["num"] = vq.kmeans2(pixels, 10)[0][0] dic.append(d) js = JSONEncoder() f = open("../txts.json", "w") f.write(js.encode(dic)) f.flush() f.close()
def default(self, obj): if isinstance(obj, (datetime, )): return obj.strftime("%b-%Y") return JSONEncoder.default(self, obj)
def __init__(self): """Initialization method """ self.api_url = SDC_SERVER self.encoder = JSONEncoder()
def format(self): e = JSONEncoder(indent=2, sort_keys=True) return e.encode(self.value)
def begin(self, fields): self.encoder = JSONEncoder(indent=2, separators=(', ', ': '))
def send_edges(self, edges): self.master.load_from_json(JSONEncoder().encode({ 'history': edges, 'immutableobjects': [] }))
def default(self, request): if isinstance(request, SimpleRequest): return request.__dict__ else: return JSONEncoder.default(self, request)
def default(self, obj): if isinstance(obj, EventModel): return obj.decode() return JSONEncoder.default(self, obj)
import json from json import JSONEncoder def _default(self, obj): return getattr(obj.__class__, "to_json", _default.default)(obj) _default.default = JSONEncoder().default JSONEncoder.default = _default class TimeZone (): def __init__(self, time_zone): time_zones = { "ACDT" : [10, 30], "ACST" : [9, 30], "ACT" : [-5, 0], "ADT" : [-3, 0], "AEDT" : [11, 0], "AEST" : [10, 0], "AFT" : [4, 30], "AKDT" : [-8, 0], "AKST" : [-9, 0], "ALMT" : [6, 0], "AMST" : [-3, 0], "AMT" : [-4, 0], "AMT2" : [4, 0], "ANAT" : [12, 0], "AQTT" : [5, 0], "ART" : [-3, 0], "AST" : [-4, 0], "AST2" : [3, 0], "AWST" : [8, 0],
def __init__(self): JSONEncoder.__init__(self, separators=MetadataEncoder._separators)
def cache_home_info(args): key = 'home_info' data = JSONEncoder().encode(args) redis_client.set(key, data, 3 * 60)
from json import JSONEncoder import datetime import time def _default(self, obj): if isinstance(obj, datetime.datetime): timestamp = time.mktime(obj.timetuple()) return timestamp if hasattr(obj, '__json__'): return getattr(obj, '__json__')() return obj _default.default = JSONEncoder().default# Save unmodified default. JSONEncoder.default = _default# replacement
def _iterencode(self, o, markers=None): if isinstance(o, ObjectId): return """ObjectId("%s")""" % str(o) else: return JSONEncoder._iterencode(self, o, markers)
def get_submission_status(self, session: LuoguSessionData, submission_id: str) -> dict: from bs4 import BeautifulSoup import re import ast from pprint import pprint as print from typing import Dict, Tuple, List from urllib.parse import unquote from json import JSONDecoder, JSONEncoder from datatypes.submission_fetch import TestcaseResult, SubmissionResult, SubtaskResult resp = requests.get("https://www.luogu.com.cn/record/"+submission_id, headers=self.headers, cookies=session.as_dict()) """ var flagMap = { 12: "AC", 3: "OLE", 4: "MLE", 5: "TLE", 6: "WA", 7: "RE" }; var longFlagMap = { 0: "Waiting", 1: "Judging", 2: "Compile Error", 12: "Accepted", 14: "Unaccepted", 21: "Hack Success", 22: "Hack Failure", 23: "Hack Skipped" }; """ hj2_status = { 0: "waiting", 1: "judging", 2: "compile_error", 3: "wrong_answer", 4: "memory_limit_exceed", 5: "time_limit_exceed", 6: "wrong_answer", 7: "runtime_error", 12: "accepted", 14: "unaccepted", } soup = BeautifulSoup(resp.text, "lxml") for item in soup.select("script"): script = item.contents[0] break regexpr = re.compile( r"""JSON.parse\(decodeURIComponent\(\"(.*)\"\)\)""") # print(regexpr.search(script).groups()[0]) content = unquote(regexpr.search(script).groups()[0]) # print(JSONDecoder().decode(content)) try: luogu_status: Dict[str, str] = JSONDecoder().decode(content)[ "currentData"] print(JSONEncoder().encode(luogu_status)) except Exception as ex: import traceback traceback.print_exc() return SubmissionResult( subtasks={}, message="", extra_status="waiting" ) # print(status) # print(luogu_status) result = SubmissionResult() if "record" not in luogu_status: return SubmissionResult( subtasks={}, message="", extra_status="waiting" ) result.extra_status = hj2_status[luogu_status["record"] ["status"]] if luogu_status["record"]["status"] in {0}: return result if not luogu_status["record"]["detail"]["compileResult"]["success"]: result.message = luogu_status["record"]["detail"]["compileResult"]["message"] return result subtask_count = sum((len(x) for x in luogu_status["testCaseGroup"])) for i, subtask in enumerate(luogu_status["record"]["detail"]["judgeResult"]["subtasks"]): testcases: List[TestcaseResult] = [] current_subtask = SubtaskResult( score=0, status="waiting", testcases=testcases ) all_ok = True has_any_waiting_or_judging = False for idx, current in (subtask["testCases"].items() if type(subtask["testCases"]) == dict else enumerate(subtask["testCases"])): testcases.append(TestcaseResult( memory_cost=current["memory"]*1024, time_cost=current["time"], status=hj2_status[current["status"]], input="NotAvailable", output="NotAvailable", message=current["description"], score=current["score"], full_score=100//subtask_count )) current_subtask.score += current["score"] all_ok = all_ok and (current["status"] == 12) if all_ok: current_subtask.status = "accepted" elif has_any_waiting_or_judging: current_subtask.status = "waiting" else: current_subtask.status = "unaccepted" result.subtasks["Subtask{}".format( subtask["id"]+1)] = current_subtask return result
class MicroblogApi(object): def __init__(self): """Initialization method """ self.api_url = SERVER_ROOT print("Initialized API REST Utils") self.encoder = JSONEncoder() self.initialize_logging() def initialize_logging(self): self.logger = logging.getLogger('forum') hdlr = logging.FileHandler('forum_testing.log') formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') hdlr.setFormatter(formatter) self.logger.addHandler(hdlr) self.logger.setLevel(logging.DEBUG) def _call_api(self, pattern, method, body=None, headers=HEADERS, payload=None, auth=None, **kwargs): """Launch HTTP request to Policy Manager API with given arguments :param pattern: string pattern of API url with keyword arguments (format string syntax) :param method: HTTP method to execute (string) :param body: JSON body content (dict) :param headers: HTTP header request (dict) :param payload: Query parameters for the URL :param **kwargs: URL parameters (without url_root) to fill the patters :returns: REST API response """ kwargs['url_root'] = self.api_url url = pattern.format(**kwargs) self.logger.info('NEW REQUEST TO SEND') self.logger.info( '\nMETHOD: {}\nURL: {} \nHEADERS: {} \nBODY: {}'.format( method, url, headers, self.encoder.encode(body))) if isinstance(body, dict): body = self.encoder.encode(body) try: r = requests.request(method=method, url=url, data=body, headers=headers, params=payload, auth=auth) except Exception as e: print("Request {} to {} crashed: {}".format(method, url, str(e))) return None return r def get_new_token(self, username=None, password=None): pass # CREATE THE AUTHENTICATION WITH USERNAME AND PASSWORD USING A TUPLA #CALL TO API WITH PATTERN, METHOD POST AND AUTHENTICATION def revoke_token(self, headers=None): pass #CALL TO API WITH PATTERN, METHOD DELETE AND HEADER WITH AUTHENTICATION def get_user_list(self, headers=None): pass #CALL TO API WITH PATTERN, METHOD GET AND HEADER WITH AUTHENTICATION def create_user(self, body=None): pass #CALL TO API WITH PATTERN, METHOD POST AND BODY TO CREATE USER def get_user_information(self, headers=None, user_id=None): pass
def default(self, o): if isinstance(o, models.Model): return o.dict() return JSONEncoder.default(self, o)
def saveJson(fileName, object): JSONEncoder().encode(object) with open(fileName, "a") as file: file.write(dumps(object, file, indent=4))
def main(argv=None): cm1 = cmdbuild() print("\n==> dir(cmdbuild): " + str(dir(cm1))) print("\n==> cmdbuild.info(): " + str(cm1.info())) if (cm1.connect('http://server:8080/cmdbuild', 'userid', 'password')): print(returnvalue) print("\n==> Returned token: " + str(cm1.token)) print "+++ Session_info()+++" pprint(cm1.session_info()) # print "+++ lookup_types()+++" pprint(cm1.lookup_types_info()) # print "+++ lookup_types_values()+++" pprint(cm1.lookup_type_values('APCT_SCFEWS_BBRANCH')) # print "+++ lookup_type_details()+++" pprint(cm1.lookup_type_details('APCT_SCFEWS_BBRANCH', '52106')) # print "+++ Domains()+++" pprint(cm1.domains_list()) # print "+++ Domains() details+++" pprint(cm1.domain_details('IPV4_002_DV')) # print "+++ Domains() attributes+++" pprint(cm1.domain_attributes('IPV4_002_DV')) # print "+++ Domain relations()+++" pprint(cm1.domain_relations('IPV4_002_DV')) # print "+++ Domain relation details()+++" pprint(cm1.domain_relation_details('IPV4_002_DV',46441)) # print "+++ Classes_total()+++" pprint(cm1.classes_total()) # print "+++ Classes_list()+++" pprint(cm1.classes_list()) # print "+++ Class_details()+++" pprint(cm1.class_details('CI_RS_PF_SVC_SW')) # pprint(cm1.class_details('CI_RS_PF_SVC_PGSQLRDBMS')) # print "+++ Class attributes()+++" pprint(cm1.class_get_attributes_of_type('CI_RS_PF_SVC_PGSQLRDBMS')) # # print "+++ Class all cards() of type+++" pprint(cm1.class_get_all_cards_of_type('CI_RS_PF_SVC_PGSQLRDBMS')) # print "+++ Class get cards details+++" pprint(cm1.class_get_card_details('CI_RS_PF_SVC_PGSQLRDBMS', 38182)) # print "+++ get object by id +++" pprint(cm1.get_id(52106)) print "+++ Insert Class+++" jsonString = JSONEncoder().encode({ 'Description': 'test-server', 'Notes': 'Imported from python script' }) classname='CI_RS_IFST_SVR' pprint(cm1.class_insert_card(classname, jsonString)) print("++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++") print("dir cmdbuild") pprint(dir(cmdbuild)) print("vars cmdbuild instance") pprint(vars(cm1))
def default(self, obj): if isinstance(obj, AbstractMISP): return obj.jsonable() return JSONEncoder.default(self, obj)
def serialize_decimal(obj): if isinstance(obj, decimal.Decimal): return str(obj) return JSONEncoder.default(obj)
def default(self, obj): if isinstance(obj, set): return list(obj) return JSONEncoder.default(self, obj)
class RecordWriter(object): def __init__(self, ofile, maxresultrows=None): self._maxresultrows = 50000 if maxresultrows is None else maxresultrows self._ofile = ofile self._fieldnames = None self._buffer = StringIO() self._writer = csv.writer(self._buffer, dialect=CsvDialect) self._writerow = self._writer.writerow self._finished = False self._flushed = False self._inspector = OrderedDict() self._chunk_count = 0 self._record_count = 0 self._total_record_count = 0 @property def is_flushed(self): return self._flushed @is_flushed.setter def is_flushed(self, value): self._flushed = True if value else False @property def ofile(self): return self._ofile @ofile.setter def ofile(self, value): self._ofile = value def flush(self, finished=None, partial=None): assert finished is None or isinstance(finished, bool) assert partial is None or isinstance(partial, bool) assert not (finished is None and partial is None) assert finished is None or partial is None self._ensure_validity() def write_message(self, message_type, message_text, *args, **kwargs): self._ensure_validity() self._inspector.setdefault('messages', []).append( (message_type, message_text.format(*args, **kwargs))) def write_record(self, record): self._ensure_validity() self._write_record(record) def write_records(self, records): self._ensure_validity() write_record = self._write_record for record in records: write_record(record) def _clear(self): self._buffer.seek(0) self._buffer.truncate() self._inspector.clear() self._record_count = 0 self._flushed = False def _ensure_validity(self): if self._finished is True: assert self._record_count == 0 and len(self._inspector) == 0 raise RuntimeError('I/O operation on closed record writer') def _write_record(self, record): fieldnames = self._fieldnames if fieldnames is None: self._fieldnames = fieldnames = list(record.keys()) value_list = imap(lambda fn: (str(fn), str('__mv_') + str(fn)), fieldnames) self._writerow(list(chain.from_iterable(value_list))) get_value = record.get values = [] for fieldname in fieldnames: value = get_value(fieldname, None) if value is None: values += (None, None) continue value_t = type(value) if issubclass(value_t, (list, tuple)): if len(value) == 0: values += (None, None) continue if len(value) > 1: value_list = value sv = '' mv = '$' for value in value_list: if value is None: sv += '\n' mv += '$;$' continue value_t = type(value) if value_t is not bytes: if value_t is bool: value = str(value.real) elif value_t is six.text_type: value = value elif value_t is int or value_t is int or value_t is float or value_t is complex: value = str(value) elif issubclass(value_t, (dict, list, tuple)): value = str(''.join( RecordWriter._iterencode_json(value, 0))) else: value = repr(value).encode( 'utf-8', errors='backslashreplace') sv += value + '\n' mv += value.replace('$', '$$') + '$;$' values += (sv[:-1], mv[:-2]) continue value = value[0] value_t = type(value) if value_t is bool: values += (str(value.real), None) continue if value_t is bytes: values += (value, None) continue if value_t is six.text_type: if six.PY2: value = value.encode('utf-8') values += (value, None) continue if value_t is int or value_t is int or value_t is float or value_t is complex: values += (str(value), None) continue if issubclass(value_t, dict): values += (str(''.join(RecordWriter._iterencode_json(value, 0))), None) continue values += (repr(value), None) self._writerow(values) self._record_count += 1 if self._record_count >= self._maxresultrows: self.flush(partial=True) try: # noinspection PyUnresolvedReferences from _json import make_encoder except ImportError: # We may be running under PyPy 2.5 which does not include the _json module _iterencode_json = JSONEncoder(separators=(',', ':')).iterencode else: # Creating _iterencode_json this way yields a two-fold performance improvement on Python 2.7.9 and 2.7.10 from json.encoder import encode_basestring_ascii @staticmethod def _default(o): raise TypeError(repr(o) + ' is not JSON serializable') _iterencode_json = make_encoder( {}, # markers (for detecting circular references) _default, # object_encoder encode_basestring_ascii, # string_encoder None, # indent ':', ',', # separators False, # sort_keys False, # skip_keys True # allow_nan ) del make_encoder
def default(self, o): if isinstance(o, NotEqual): return o.value return JSONEncoder.default(self, o)
def default(self, o): return o.__dict__ if isinstance( o, ObjectView) else JSONEncoder.default(self, o)
def default(self, o): # pylint: disable=E0202 if isinstance(o, np.generic): return np.asscalar(o) return JSONEncoder.default(self, o)
def default(self, obj): if isinstance(obj, numpy.ndarray): return obj.tolist() return JSONEncoder.default(self, obj)