def post_docker_sync_hook(self, obj, cloudid): jn = obj['name'] # bid = str(obj['build']['number']) jsonUtil = JsonUtil() c = jsonUtil.parseJsonString(config.CLOUD_CONFIG) j = Jenkins(c[cloudid]['jenkins_url'], username=c[cloudid]['jenkins_name'], password=c[cloudid]['jenkins_token']) re = postimagesync() try: if j.job_exists(jn): ss = xmltodict.parse(j.get_job_config(jn)) jsonpickle.set_preferred_backend('json') if isinstance(jsonpickle.decode(ss['project']['description']), callback): desobj = jsonpickle.decode(ss['project']['description']) re.image_name = desobj.image_name re.status = self.getstatus(obj['build']['status']) re.sync_cloud_id = desobj.repo_name re.tag = desobj.tag re.time = datetime.now() re.post_callback_url = desobj.callback_url if re.status != 'error' and config.JENKINS_IMAGEOPTJOB_DELETE == 'true': j.delete_job(jn) except Exception as e: print e.message re = None raise gen.Return(re)
def set_backend(self, *args): backend = args[0] self._is_installed(backend) jsonpickle.load_backend(*args) jsonpickle.set_preferred_backend(backend)
def post_docker_load_hook(self, obj): jn = obj['name'] bid = str(obj['build']['number']) re = postimage() try: # info = self.j.get_build_info(jn, int(bid)) if self.j.job_exists(jn): ss = xmltodict.parse(self.j.get_job_config(jn)) jsonpickle.set_preferred_backend('json') if isinstance(jsonpickle.decode(ss['project']['description']), callback): desobj = jsonpickle.decode(ss['project']['description']) re.image_name = desobj.image_name re.status = self.getstatus(obj['build']['status']) re.tag = desobj.tag re.export_file_url = desobj.des re.time = datetime.now() re.build_id = desobj.build_id re.post_callback_url = desobj.callback_url if re.status != 'error' and config.JENKINS_IMAGEOPTJOB_DELETE == 'true': self.j.delete_job(jn) except Exception as e: print e.message re = None raise gen.Return(re)
def posthook(self, obj): # s = {r'/': r'...'} jn = obj['name'] bid = str(obj['build']['number']) # n, r = self.getjobnames(jn) re = hook() try: info = self.j.get_build_info(jn, int(bid)) if self.j.job_exists(jn): ss = xmltodict.parse(self.j.get_job_config(jn)) jsonpickle.set_preferred_backend('json') if isinstance(jsonpickle.decode(ss['project']['description']), callback): desobj = jsonpickle.decode(ss['project']['description']) re.namespace = desobj.namespace re.repo_name = desobj.repo_name re.build_id = str(obj['build']['number']) re.status = self.getstatus(obj['build']['status']) re.duration = info['duration'] re.tag = desobj.tag re.time = datetime.now() re.callurl = desobj.callback_url except Exception as e: print e.message re = None raise gen.Return(re)
def to_json(params): jsonpickle.load_backend('json', 'dumps', 'loads', ValueError) jsonpickle.set_preferred_backend('json') jsonpickle.set_encoder_options('json', ensure_ascii=False) out = jsonpickle.encode(params, unpicklable=False) out = out.replace(': None', ': null') return out
def transform(my_object): jsonpickle.enable_fallthrough(False) jsonpickle.set_preferred_backend('simplejson') jsonpickle.set_encoder_options('simplejson', sort_keys=True, ignore_nan=True) return jsonpickle.encode(my_object, unpicklable=False)
def get(request): try: jsonpickle.set_preferred_backend('demjson') jsonpickle.set_encoder_options('json', cls=JSONDateTimeEncoder) r_id = request.matchdict['run_id'] c_id = request.matchdict['contest_id'] run = Run.get_by(run_id=r_id, contest_id=c_id) if (not RequestCheckUserCapability(request, 'moodle/ejudge_submits:comment')): if (int(run.user.id) != int(RequestGetUserId(request))): raise Exception("Auth Error") comments = run.comments res = CommentRes() res.comments = comments res.run_id = request.matchdict['run_id'] res.contest_id = request.matchdict['contest_id'] if (int(run.user.id) == int(RequestGetUserId(request))): with transaction.manager: DBSession.query(Comment).filter( Comment.run_id == r_id, Comment.contest_id == c_id).update({'is_read': True}) transaction.commit() return jsonpickle.encode(res, unpicklable=False, max_depth=5) # return json.dumps(res, skipkeys = True) except Exception as e: return json.dumps({ "result": "error", "message": e.__str__(), "stack": traceback.format_exc() })
def get(request): try: run_id = int(request.matchdict['run_id']) is_superuser = RequestCheckUserCapability( request, 'moodle/ejudge_submits:comment') user_id = RequestGetUserId(request) comment_q = DBSession.query(Comment) \ .filter(Comment.py_run_id == run_id) if not is_superuser: comment_q.filter( or_(Comment.author_user_id == user_id, Comment.user_id == user_id)) comments = comment_q.all() jsonpickle.set_preferred_backend('demjson') jsonpickle.set_encoder_options('json', cls=JSONDateTimeEncoder) return jsonpickle.encode(comments, unpicklable=False, max_depth=5) except Exception as e: return json.dumps({ "result": "error", "message": e.__str__(), "stack": traceback.format_exc() })
def parseJsonString(self,jsonstring): jsonpickle.set_preferred_backend('json') jsonpickle.set_encoder_options('json',ensure_ascii=False,separators=(',', ': ')) obj = jsonpickle.decode(jsonstring) # obj=json.loads(jsonstring) # assert obj.name == result['name'] == 'Awesome' return obj
def get_schedule(): schedule = SstuScheduleAggregator().get_schedule( Constants.IBS_51_SCHEDULE_LINK) jsonpickle.set_preferred_backend('json') jsonpickle.set_encoder_options('json', ensure_ascii=False) return jsonpickle.encode(schedule, unpicklable=False)
def toJSON(self, human_readable=True): jsonpickle.set_preferred_backend(Config.__pickle_backend) if human_readable: jsonpickle.set_encoder_options(Config.__pickle_backend, sort_keys=True, indent=4) return jsonpickle.encode(self)
def parseJsonAll(self,obj): jsonpickle.set_preferred_backend('json') jsonpickle.set_encoder_options('json', ensure_ascii=False,separators=(',', ': ')) frozen = jsonpickle.encode(obj,unpicklable=True) # self.finish(frozen) # jsonstr=json.dumps(obj,default=self.__default,ensure_ascii=False,separators=(',',':')) #cls=DecimalEncoder return frozen
def main(): dataset = {} outfiles = {} datas = transcript.transcriptdict() namesA = [ x[0] for x in sampleFolders ] namesA.sort() for sampleFolder in sampleFolders: sampleName, sampleDirectory = sampleFolder dataset[sampleName] = { 'expression' : os.path.join(base, sampleDirectory, expressionName), 'exons' : os.path.join(base, sampleDirectory, gtfName ), 'name' : sampleName, } expfile = dataset[sampleName]['expression' ] gtffile = dataset[sampleName]['exons' ] loadExpFile(expfile, sampleName, datas) loadGtfFile(gtffile, sampleName, datas) jsonpickle.set_preferred_backend('simplejson') jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=1) jsonp = jsonpickle.encode([datas, transcript.transcriptdata.headersPos, transcript.transcriptdata.keys]) with open(dbfile, 'w') as f: f.write(jsonp)
def create_item_queue(self, queue_name: str, queue_item: QueueItem): jsonpickle.set_preferred_backend('json') jsonpickle.set_encoder_options('json', ensure_ascii=False) jpi = jsonpickle.encode(queue_item) jsonObj = json.loads(jpi) self.logger.writeLog( 'INFO', 'Performing POST Request on ' + self.jamesUrl + "/v1/queue/create-item/" + queue_name + "/" + " with the following Headers: " + str(self.headers) + " with the following Request Body " + str(jsonObj)) resp = requests.post(self.jamesUrl + "/v1/queue/create-item/" + queue_name + "/", headers=self.headers, json=jsonObj) if resp.status_code != 200: self.logger.writeLog( 'ERROR', 'POST Request on ' + self.jamesUrl + "/v1/queue/create-item/" + queue_name + "/" + " FAILED with the following status " + ' {}'.format(resp.status_code)) raise Exception('POST Request on ' + self.jamesUrl + "/v1/queue/create-item/" + queue_name + "/" + " FAILED with the following status " + ' {}'.format(resp.status_code)) return resp.text
def getJson(self): jsonpickle.set_preferred_backend('json') jsonpickle.set_encoder_options('json', ensure_ascii=False, indent=4, separators=(',', ': ')) return jsonpickle.encode(self)
def get_count_unread(request): jsonpickle.set_preferred_backend('demjson') jsonpickle.set_encoder_options('json', cls=JSONDateTimeEncoder) res = DBSession.query(Comment).filter( Comment.user_id == RequestGetUserId(request)).filter( Comment.is_read == False).count() return jsonpickle.encode(res, unpicklable=False, max_depth=5)
def encode(values: Any): jsonpickle.set_preferred_backend('simplejson') jsonpickle.set_encoder_options('simplejson', use_decimal=True, sort_keys=True) SimpleDecimalHandler.handles(decimal.Decimal) return jsonpickle.encode(values, unpicklable=False)
def get_release_info(self): with self.input()["release"].open("r") as f: jsonpickle.set_preferred_backend('simplejson') jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=4) object = jsonpickle.decode(f.read()) return object[next(object.keys().__iter__())]["release"]
def save(self, file_name: str): with open(file_name, 'w+') as config_file: jsonpickle.set_preferred_backend('simplejson') jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=4) json_str = jsonpickle.encode(self.data) config_file.write(json_str)
def get_count_unread(request): jsonpickle.set_preferred_backend('demjson') jsonpickle.set_encoder_options('json', cls=JSONDateTimeEncoder) res = DBSession.query(Comment) \ .filter(Comment.user_id == RequestGetUserId(request)) \ .filter(Comment.is_read == False) \ .count() return jsonpickle.encode(res, unpicklable=False, max_depth=5)
def loadJSON(json_str_or_path): jsonpickle.set_preferred_backend(Config.__pickle_backend) if isinstance(json_str_or_path, basestring): if os.path.exists(json_str_or_path): json_str_or_path = file(json_str_or_path, 'r').read() return jsonpickle.decode(json_str_or_path) else: return None
def toJSON(self): json = resource_path('config.json') with open(json,'w') as file: jsonpickle.set_preferred_backend('simplejson') file.write(jsonpickle.encode(self)) file.close()
def get_test_container_image_info( self, input: Dict[str, LocalTarget]) -> ImageInfo: with input["test_container_image"].open("r") as f: jsonpickle.set_preferred_backend('simplejson') jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=4) object = jsonpickle.decode(f.read()) return object["test-container"]["test-container"]
def encode(ob): try: jsonpickle.set_preferred_backend('json') jsonpickle.set_encoder_options('json', ensure_ascii=False) jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=4) result = jsonpickle.encode(ob, unpicklable=False) return result except Exception as e: print(str(e)) return ""
def decode(js): try: jsonpickle.set_preferred_backend('json') jsonpickle.set_encoder_options('json', ensure_ascii=False) jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=4) obj = jsonpickle.decode(js) return obj except Exception as e: print(str(e)) return ""
def save_json(parser, path): """ Write the parser object and its children to JSON. :param parser: AltoParser object. :param path: Full path to the target file. """ jsonpickle.set_preferred_backend('json') jsonpickle.set_encoder_options('json', indent=4, ensure_ascii=False) with open(path, 'w', encoding='utf-8') as file: file.write(jsonpickle.dumps(parser.document, unpicklable=False))
def main(): jsonpickle.set_preferred_backend('simplejson') jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=1) print "loading data %s" % dbfile datas, transcript.transcriptdata.headersPos, transcript.transcriptdata.keys = jsonpickle.decode(open(dbfile, 'r').read()) print "creating index %s" % indexfile jsonq = jsonpickle.encode(transcript.getIndex(datas)) with open(indexfile, 'w') as f: f.write(jsonq)
def print_json(data): '''Printing complex data in human-readable JSON format with indents and return string with JSON''' jsonpickle.set_preferred_backend('json') jsonpickle.set_encoder_options('json', ensure_ascii=False) json_str = jsonpickle.encode(data, unpicklable=False, fail_safe=None, indent=2, separators=(',', ':')) print(json_str) return json_str
def save_json(self, json_file_path): """ Saves a text file representing this configuration in a JSON format. :param str json_file_path: the path to the JSON file in which to save this configuration. :return: """ jsonpickle.set_preferred_backend('json') jsonpickle.set_encoder_options('json', indent=4, sort_keys=False) with open(json_file_path, 'w') as json_file: json_str = jsonpickle.encode(self) json_file.write(json_str)
def test_set_preferred_backend_allows_magic(self): """Tests that we can use the pluggable backends magically """ backend = 'os.path' jsonpickle.load_backend(backend, 'split', 'join', AttributeError) jsonpickle.set_preferred_backend(backend) slash_hello, world = jsonpickle.encode('/hello/world') jsonpickle.remove_backend(backend) self.assertEqual(slash_hello, '/hello') self.assertEqual(world, 'world')
def test_set_preferred_backend_allows_magic(self): """Tests that we can use the pluggable backends magically """ backend = "os.path" jsonpickle.load_backend(backend, "split", "join", AttributeError) jsonpickle.set_preferred_backend(backend) slash_hello, world = jsonpickle.encode("/hello/world") jsonpickle.remove_backend(backend) self.assertEqual(slash_hello, "/hello") self.assertEqual(world, "world")
def save(self, fileName, myObject, make_backup=False): if make_backup == True: try: shutil.copy( fileName, fileName + "." + datetime.datetime.now().strftime("%Y-%m-%d-%H-%M.")) except: pass # This is only best efort case. The file may not exist at all jsonpickle.set_preferred_backend('json') jsonpickle.set_encoder_options('json', sort_keys=False, indent=4) with open(fileName, 'w') as outfile: json_obj = jsonpickle.encode(myObject) outfile.write(json_obj)
def openLog(self, filePath): import jsonpickle jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=4) jsonpickle.set_preferred_backend('simplejson') filePath = str(filePath) try: with open(filePath, 'r') as f: obj = jsonpickle.decode(f.read(), keys = True) return obj except: raise RuntimeError('Unable to open and decode json file at {0}'.format(filePath))
def download_story(url: str, pages_file: str): print("Downloading story at", url, "and saving as", pages_file, flush=True) # configure jsonpickle to preserve utf-8 strings instead of \u escape sequences jsonpickle.load_backend('simplejson', 'dumps', 'loads', ValueError) jsonpickle.set_preferred_backend('simplejson') jsonpickle.set_encoder_options('simplejson', ensure_ascii=False) # configure Selenium chrome_options = Options() chrome_options.headless = True chrome_options.add_argument('--no-sandbox') chrome_options.add_argument('--disable-dev-shm-usage') driver = webdriver.Chrome(options=chrome_options) driver.implicitly_wait(1) driver.request_interceptor = request_interceptor print("Opening", url) driver.get(url) page_driver = PageDriver(driver) print("Opening", pages_file, "for writing") with open(pages_file, 'w') as f: print("Starting main loop", flush=True) queue: "Queue[PagePath]" = Queue() queue.put(()) while not queue.empty(): page_path = queue.get() # open page print(page_path, end=' ', flush=True) page_driver = page_driver.navigate(*page_path) # save page to file page = page_driver.serialize(page_path) page_json = jsonpickle.encode(page, unpicklable=True, indent=2 * ' ') f.write(page_json) f.write('\n') # add answer pages to queue for answer_id in range(len(page_driver.answers())): new_page_path = (*page_path, answer_id) queue.put(new_page_path) # return to root page print(page_driver.article[:50], flush=True) page_driver = page_driver.restart()
def init_json(): """Initialize JSON encoder. """ # Register datetime flattener for jsonpickle jsonpickle.handlers.registry.register(datetime, DatetimeHandler) jsonpickle.handlers.registry.register(timedelta, TimedeltaHandler) # Select json module jsonpickle.set_preferred_backend('json') # Opetions for producing nice JSON jsonpickle.set_encoder_options('json', sort_keys=True, indent=4, separators=(',', ': '), ensure_ascii=False, encoding="utf8")
def init_db(dbfile, indexfile): with app.app_context(): print "initializing db" if not os.path.exists(dbfile): print "NO DATABASE FILE %s" % dbfile sys.exit(1) if not os.path.exists(indexfile): print "NO INDEX FILE %s" % indexfile sys.exit(1) global db global headers global queries jsonpickle.set_preferred_backend('simplejson') jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=1) dataDb = open(dbfile, 'r').read() dataIn = open(indexfile, 'r').read() db, lHeadersPos, lKeys = jsonpickle.decode(dataDb) headers = jsonpickle.decode(dataIn) transcript.transcriptdata.headersPos, transcript.transcriptdata.keys = lHeadersPos, lKeys if db is None: print "no data in database" sys.exit(1) if len(db) == 0: print "database is empty" sys.exit(1) if headers is None: print "no data in index" sys.exit(1) if len(headers) == 0: print "index is empty" sys.exit(1) print "db loaded. %d entries" % len(db)
def get(request): try: run_id = int(request.matchdict['run_id']) is_superuser = RequestCheckUserCapability(request, 'moodle/ejudge_submits:comment') user_id = RequestGetUserId(request) comment_q = DBSession.query(Comment) \ .filter(Comment.py_run_id == run_id) if not is_superuser: comment_q.filter(or_(Comment.author_user_id == user_id, Comment.user_id == user_id)) comments = comment_q.all() jsonpickle.set_preferred_backend('demjson') jsonpickle.set_encoder_options('json', cls=JSONDateTimeEncoder) return jsonpickle.encode(comments, unpicklable=False, max_depth=5) except Exception as e: return json.dumps( {"result": "error", "message": e.__str__(), "stack": traceback.format_exc()})
def writeConfig(): """ Writes the global configuration. """ global Configuration global ConfigFilename print("Storing configuration...") jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=4) jsonpickle.set_encoder_options('json', sort_keys=True, indent=4) jsonpickle.set_preferred_backend('json') try: configfile = open(ConfigFilename, "w") #json.dump(Configuration, configfile, indent=True) configfile.write(jsonpickle.encode(Configuration)) configfile.close() print("Configuration successfully stored.") return True except Exception as error: # TODO: Handle this better, friendlier print("Configuration error: %s" % error)
def set_preferred_backend(self, backend): self._is_installed(backend) jsonpickle.set_preferred_backend(backend)
def __init__(self): Serializer.__init__(self) jsonpickle.set_preferred_backend('json')
print "count not find setup file %s" % setupfile sys.exit(1) exec( open(setupfile, 'r').read() ) DATA_URL_PATH = "/%s" % DATA_URL dbPath = os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), SERVER_DB) #VARIABLES app = Flask(__name__) app.config.from_object(__name__) app.jinja_env.globals['trim_blocks' ] = True app.jinja_env.add_extension('jinja2.ext.do') jsonpickle.set_preferred_backend('simplejson') jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=1) data = None def tojason( data ): resp = Response( response=jsonpickle.encode( data ), status=200, mimetype='application/json' ) return resp
import jsonpickle # pylint: disable=unused-variable, unused-argument jsonpickle.set_preferred_backend("json") def dumps(obj, **args): return jsonpickle.encode(obj, unpicklable=False) def loads(json, **args): return jsonpickle.decode(json)
#-*- coding: utf8 -*- import jsonpickle try: jsonpickle.set_preferred_backend('yajl') except AssertionError: pass def dumps(data): return jsonpickle.encode(data) def loads(data): return jsonpickle.decode(data)
def tearDown(self): # always reset to default backend jsonpickle.set_preferred_backend('json')
it[i] = formatAll(it[i],formatDic,{},formatter) elif isinstance(it,str): if formatter is not None: it = formatter(it).evaluate(formatDic) else: it = it.format(**formatDic) return it # register common extension and backend demjson import jsonpickle.ext.numpy import demjson jsonBackend = "demjson" jsonpickle.ext.numpy.register_handlers() jsonpickle.load_backend(jsonBackend,"encode","decode",CE.MyValueError) jsonpickle.set_preferred_backend(jsonBackend) jsonpickle.set_decoder_options(jsonBackend,decode_float=float) def jsonDump(obj,file,compact=False,*args,**kargs): global jsonBackend jsonpickle.set_encoder_options(jsonBackend,compactly=compact,*args,**kargs) file.write(jsonpickle.encode(obj)) def jsonParse(s): return jsonpickle.decode(s) def jsonLoad(filePath): f = open(filePath) return jsonParse(f.read())
Библиотека классов, используемых в оболочке, запуск кина из файлов, имена файлов записаны в списке, хранимом в виде файла; для каждого файла типа "кино" создается кнопка, нажатие на которую приводит к запуску кина. Через ключ командной строки можно запустить сканирование хранилища с записью результатов в файл. ''' import sys, os, time, re, subprocess import jsonpickle jsonpickle.set_preferred_backend('json') jsonpickle.set_encoder_options('json', ensure_ascii=False) import const moviesListFN = const.MDB_FILENAME CP = const.CP class VMovie: ''' параметры одного кина основной параметр - имя файла, записывается относительно корня хранилища; корнем считаю каталог в котором каталог bin\ в котором этот скрипт ''' fileName = '' # mandatory enTitle = ''
def add_backend(self, backend_string): jsonpickle.load_backend(backend_string) jsonpickle.set_preferred_backend(backend_string)