def _action(fake_src_data, fake_dst_data): fake_config = utils.ext_dict( migrate=utils.ext_dict({ 'ssh_connection_attempts': 3, 'key_filename': 'key_filename', }), src=utils.ext_dict({'ssh_user': '******', 'ssh_sudo_password': '******', 'host': SRC_CINDER_HOST, }), dst=utils.ext_dict({'ssh_user': '******', 'ssh_sudo_password': '******', 'host': DST_CINDER_HOST, 'conf': '/etc/cinder.conf', }), src_storage=utils.ext_dict({'conf': '/etc/cinder.conf'}), dst_storage=utils.ext_dict({'conf': '/etc/cinder.conf'}), ) fake_src_cloud = mock.Mock() fake_src_storage = mock.Mock() fake_src_cloud.resources = {'storage': fake_src_storage} fake_dst_cloud = mock.Mock() fake_dst_storage = mock.Mock() fake_dst_storage.read_db_info = \ mock.Mock(return_value=jsondate.dumps(fake_dst_data)) fake_dst_cloud.resources = {'storage': fake_dst_storage} fake_init = { 'src_cloud': fake_src_cloud, 'dst_cloud': fake_dst_cloud, 'cfg': fake_config } action = cinder_database_manipulation.WriteVolumesDb(fake_init) action.dst_mount = get_dst_mount(fake_dst_data) action.mount_dirs = mock.MagicMock(side_effect=mount_dirs) action.find_dir = mock.MagicMock(side_effect=find_dir(fake_dst_data)) action.volume_size = mock.MagicMock(side_effect=volume_size) action.free_space = mock.MagicMock(side_effect=free_space) action.dst_hosts = [ 'dst_cinder', 'dst_cinder@nfs1', 'dst_cinder@nfs2', 'dst_cinder@nfs3', ] action.run_repeat_on_errors = mock.Mock() args = { cinder_database_manipulation.NAMESPACE_CINDER_CONST: jsondate.dumps(fake_src_data) } return action, args
def setupDatasetsTest(unit_test, httpretty): httpretty.reset() httpretty.enable() unit_test.dataset_data = {'dataset_data': DatasetDataFactory.build()} # mock out calls with column_index query param # NOTE: this will always return 'column.1' as the column name single_col_data = DatasetDataFactory.build(column_names=[six.u('Date'), six.u('column.1')], data=[['2015-07-11', 444.3], ['2015-07-13', 433.3], ['2015-07-14', 437.5], ['2015-07-15', 440.0]]) unit_test.single_dataset_data = {'dataset_data': single_col_data} dataset_data = DatasetDataFactory.build() d_values = dataset_data.pop('data') d_metadata = dataset_data unit_test.data_list_obj = DataList(Data, d_values, d_metadata) unit_test.nse_oil = {'dataset': DatasetFactory.build( database_code='NSE', dataset_code='OIL')} unit_test.wiki_aapl = {'dataset': DatasetFactory.build( database_code='WIKI', dataset_code='AAPL')} unit_test.wiki_msft = {'dataset': DatasetFactory.build( database_code='WIKI', dataset_code='MSFT', newest_available_date='2015-07-30', oldest_available_date='2013-01-01')} unit_test.single_col = {'dataset': DatasetFactory.build( database_code='SINGLE', dataset_code='COLUMN', newest_available_date='2015-07-30', oldest_available_date='2013-01-01')} unit_test.oil_obj = Dataset('NSE/OIL', unit_test.nse_oil['dataset']) unit_test.aapl_obj = Dataset('WIKI/AAPL', unit_test.wiki_aapl['dataset']) unit_test.wiki_obj = Dataset('WIKI/MSFT', unit_test.wiki_msft['dataset']) unit_test.single_col_obj = Dataset('SINGLE/COLUMN', unit_test.single_col['dataset']) httpretty.register_uri(httpretty.GET, re.compile( 'https://www.quandl.com/api/v3/datasets/.*/metadata'), responses=[httpretty.Response(body=json.dumps(dataset)) for dataset in [unit_test.nse_oil, unit_test.wiki_aapl, unit_test.wiki_msft]]) # mock our query param column_index request httpretty.register_uri(httpretty.GET, "https://www.quandl.com/api/v3/datasets/SINGLE/COLUMN/data", body=json.dumps(unit_test.single_dataset_data)) httpretty.register_uri(httpretty.GET, "https://www.quandl.com/api/v3/datasets/WIKI/AAPL/data", body=json.dumps(unit_test.dataset_data)) httpretty.register_uri(httpretty.GET, re.compile( 'https://www.quandl.com/api/v3/datasets/NSE/OIL/data'), body=json.dumps(unit_test.dataset_data)) httpretty.register_uri(httpretty.GET, re.compile( 'https://www.quandl.com/api/v3/datasets/WIKI/MSFT/data'), body=json.dumps(unit_test.dataset_data))
def read_db_info(self, **kwargs): """ Returns serialized data from database """ if kwargs.get('tenant_id'): self.filter_tenant_id = kwargs['tenant_id'][0] return jsondate.dumps( {i: self.list_of_dicts_for_table(i) for i in self.list_of_tables})
def run(self, *args, **kwargs): """Run WriteVolumesDb Action.""" data_from_namespace = kwargs.get(NAMESPACE_CINDER_CONST) if not data_from_namespace: raise AbortMigrationError( "Cannot read attribute {attribute} from namespace".format( attribute=NAMESPACE_CINDER_CONST)) self.cloud = { SRC: { CLOUD: self.src_cloud, RES: self.src_cloud.resources.get(utils.STORAGE_RESOURCE), CFG: self.cfg.src, }, DST: { CLOUD: self.dst_cloud, RES: self.dst_cloud.resources.get(utils.STORAGE_RESOURCE), CFG: self.cfg.dst, } } self.data[SRC] = jsondate.loads(data_from_namespace) search_opts = kwargs.get('search_opts_tenant', {}) self.data[DST] = jsondate.loads( self.cloud[DST][RES].read_db_info(**search_opts)) LOG.debug('Cloud info: %s', str(self.cloud)) self._copy_volumes() self.data[SRC] = _modify_data(self.data[SRC]) self.cloud[DST][RES].deploy(jsondate.dumps(self.data[SRC]))
def to_json(obj): """ Convert a dictionary to pretty-printed JSON. Keyword arguments: obj -- Dictionary to convert. """ return json.dumps(obj, sort_keys=True, indent=4)
def log_event_to_channel(bot, msg, reason, chid, formats): if msg.chat.username: from_chatname = '<a href="https://t.me/%s">@%s</a>' % ( msg.chat.username, msg.chat.username ) else: from_chatname = '#%d' % msg.chat.id user_display_name = format_user_display_name(msg.from_user) from_info = ( 'Chat: %s\nUser: <a href="tg://user?id=%d">%s</a>' % (from_chatname, msg.from_user.id, user_display_name) ) if 'forward' in formats: try: bot.forward_message( chid, msg.chat.id, msg.message_id ) except Exception as ex: db.fail.save({ 'date': datetime.utcnow(), 'reason': str(ex), 'traceback': format_exc(), 'chat_id': msg.chat.id, 'msg_id': msg.message_id, }) if ( 'MESSAGE_ID_INVALID' in str(ex) or 'message to forward not found' in str(ex) ): logging.error( 'Failed to forward spam message: %s' % ex ) else: raise if 'json' in formats: msg_dump = msg.to_dict() msg_dump['meta'] = { 'reason': reason, 'date': datetime.utcnow(), } dump = jsondate.dumps(msg_dump, indent=4, ensure_ascii=False) dump = html.escape(dump) content = '%s\n<pre>%s</pre>' % (from_info, dump) try: bot.send_message(chid, content, parse_mode=ParseMode.HTML) except Exception as ex: if 'message is too long' in str(ex): logging.error('Failed to log message to channel: %s' % ex) else: raise if 'simple' in formats: text = html.escape(msg.text or msg.caption) content = ( '%s\nReason: %s\nContent:\n<pre>%s</pre>' % (from_info, reason, text) ) bot.send_message(chid, content, parse_mode=ParseMode.HTML)
def test_dumps_datelike_string_does_not_roundtrip(self): """A string that looks like a date *will* be interpreted as a date. If for whatever reason, you don't want that to happen, you'll need to do some pre or post-processing to fixup the results. """ orig_dict = dict(created_at='2011-01-01') expected = dict(created_at=datetime.date(2011, 1, 1)) self.assertEqual(expected, jsondate.loads(jsondate.dumps(orig_dict)))
def _put(self, key, data): import jsondate as json if not isinstance(data, dict): data = dict(data.__dict__) # any key starting with underscore isn't serialized for k in data.keys(): if k.startswith('_'): del data[k] return super(TaskDb, self)._put(key, '%s\n' % json.dumps(data))
def test_dump_unicode_roundtrips(self): orig_dict = {u'foo': u'bar', 'empty': u''} # json module broken: unicode objects, empty-string objects are str result = json.loads(json.dumps(orig_dict)) self.assertTypeAndValue(unicode, u'bar', result[u'foo']) self.assertTypeAndValue(str, '', result[u'empty']) # jsondate fix: always return unicode objects result = jsondate.loads(jsondate.dumps(orig_dict)) self.assertTypeAndValue(unicode, u'bar', result[u'foo']) self.assertTypeAndValue(unicode, u'', result[u'empty'])
def error(self, request, job=None, error=None): resp = {} if job: resp['meta'] = self.format_meta_for_response(job) if error: resp['data'] = unicode(error) else: resp['data'] = 'There was an unknown error' return Response(json.dumps(resp), status=500, content_type='application/json')
def run(self, *args, **kwargs): data_from_namespace = kwargs.get(NAMESPACE_CINDER_CONST) if not data_from_namespace: raise RuntimeError( "Cannot read attribute {attribute} from namespace".format( attribute=NAMESPACE_CINDER_CONST)) data = jsondate.loads(data_from_namespace) # mark attached volumes as available for volume in data['volumes']: if volume['status'] == 'in-use': volume['mountpoint'] = None volume['status'] = 'available' volume['instance_uuid'] = None volume['attach_status'] = 'detached' self.get_resource().deploy(jsondate.dumps(data))
def set_likert_choices(question_id): choices_likert = [ {'choice_text': 'Disagree strongly', 'question_id': question_id, 'sort_nr': '1'}, {'choice_text': 'Disagree moderately', 'question_id': question_id, 'sort_nr': '2'}, {'choice_text': 'Disagree a little', 'question_id': question_id, 'sort_nr': '3'}, {'choice_text': 'Neither agree nor disagree', 'question_id': question_id, 'sort_nr': '4'}, {'choice_text': 'Agree a little', 'question_id': question_id, 'sort_nr': '5'}, {'choice_text': 'Agree moderately', 'question_id': question_id, 'sort_nr': '6'}, {'choice_text': 'Agree strongly', 'question_id': question_id, 'sort_nr': '7'} ] for c in choices_likert: r = requests.post('{}/choice'.format(BASE_URL), data=json.dumps(c), headers={'content-type': 'application/json'}) if r.status_code == 201: log.info('Likert {} with status code {}'.format(c['choice_text'], r.status_code)) else: log.error('Likert {} with status code {}'.format(c['choice_text'], r.status_code)) post_created(r)
def return_response(self, resp, status=200, content_type=None): if not content_type: content_type = self.content_type return Response(json.dumps(resp), status=status, content_type=content_type)
def test_dumps_none_roundtrips(self): # Generates a TypeError from _datetime_object_hook orig_dict = dict(foo=None) self.assertEqual(orig_dict, jsondate.loads(jsondate.dumps(orig_dict)))
log_stderrhandler = StderrHandler() def post_created(response): if response.status_code == 201: return True else: print response.text print response.status_code return False # Questionnaire questionnaire = {'name': 'FMR Continuation Desire Questionnaire'} r = requests.post('{}/questionnaire'.format(BASE_URL), data=json.dumps(questionnaire), headers={'content-type': 'application/json'}) if r.status_code == 201: log.info('Questionnaire {} with status code {}'.format(questionnaire['name'], r.status_code)) else: log.error('Questionnaire {} with status code {}'.format(questionnaire['name'], r.status_code)) # QuestionSet question_sets = [ {'name': 'Before the experience', 'sort_nr': '1', 'info_text': 'Questions that need to be filled out before you start playing', 'questionnaire_id': '1'}, {'name': 'During the experience', 'sort_nr': '2', 'info_text': 'Questions regarding playing the game', 'questionnaire_id': '1'},
def test_dumps_date_roundtrips(self): orig_dict = dict(created_at=datetime.date(2011, 1, 1)) self.assertEqual(orig_dict, jsondate.loads(jsondate.dumps(orig_dict)))
def setupDatasetsTest(unit_test, httpretty): httpretty.reset() httpretty.enable() unit_test.dataset_data = {'dataset_data': DatasetDataFactory.build()} # mock out calls with column_index query param # NOTE: this will always return 'column.1' as the column name single_col_data = DatasetDataFactory.build( column_names=[six.u('Date'), six.u('column.1')], data=[['2015-07-11', 444.3], ['2015-07-13', 433.3], ['2015-07-14', 437.5], ['2015-07-15', 440.0]]) unit_test.single_dataset_data = {'dataset_data': single_col_data} dataset_data = DatasetDataFactory.build() d_values = dataset_data.pop('data') d_metadata = dataset_data unit_test.data_list_obj = DataList(Data, d_values, d_metadata) unit_test.nse_oil = { 'dataset': DatasetFactory.build(database_code='NSE', dataset_code='OIL') } unit_test.goog_aapl = { 'dataset': DatasetFactory.build(database_code='GOOG', dataset_code='NASDAQ_AAPL') } unit_test.goog_msft = { 'dataset': DatasetFactory.build(database_code='GOOG', dataset_code='NASDAQ_MSFT', newest_available_date='2015-07-30', oldest_available_date='2013-01-01') } unit_test.single_col = { 'dataset': DatasetFactory.build(database_code='SINGLE', dataset_code='COLUMN', newest_available_date='2015-07-30', oldest_available_date='2013-01-01') } unit_test.oil_obj = Dataset('NSE/OIL', unit_test.nse_oil['dataset']) unit_test.aapl_obj = Dataset('GOOG/AAPL', unit_test.goog_aapl['dataset']) unit_test.goog_obj = Dataset('GOOG/MSFT', unit_test.goog_msft['dataset']) unit_test.single_col_obj = Dataset('SINGLE/COLUMN', unit_test.single_col['dataset']) httpretty.register_uri( httpretty.GET, re.compile('https://www.quandl.com/api/v3/datasets/.*/metadata'), responses=[ httpretty.Response(body=json.dumps(dataset)) for dataset in [unit_test.nse_oil, unit_test.goog_aapl, unit_test.goog_msft] ]) # mock our query param column_index request httpretty.register_uri( httpretty.GET, "https://www.quandl.com/api/v3/datasets/SINGLE/COLUMN/data", body=json.dumps(unit_test.single_dataset_data)) httpretty.register_uri( httpretty.GET, "https://www.quandl.com/api/v3/datasets/GOOG/NASDAQ_AAPL/data", body=json.dumps(unit_test.dataset_data)) httpretty.register_uri( httpretty.GET, re.compile('https://www.quandl.com/api/v3/datasets/NSE/OIL/data'), body=json.dumps(unit_test.dataset_data)) httpretty.register_uri( httpretty.GET, re.compile( 'https://www.quandl.com/api/v3/datasets/GOOG/NASDAQ_MSFT/data'), body=json.dumps(unit_test.dataset_data))
def test_unexpected_type_raises(self): dict_ = {'foo': set(['a'])} with self.assertRaises(TypeError): jsondate.dumps(dict_)
def handle_any_msg(msg): to_delete = False for ent in (msg.entities or []): if ent.type in ('url', 'text_link') and get_setting(group_config, msg.chat.id, 'links', True): url = msg.text[ent.offset:ent.offset + ent.length] if not url.startswith('//') and not url.startswith('http'): url = '//' + url if urlparse(url).netloc.lower() in LINKS_EXCEPTIONS: continue to_delete = True reason = 'external link' break if ent.type in ('email',) and get_setting(group_config, msg.chat.id, 'emails', True): to_delete = True reason = 'email' break if ent.type == 'mention': username = msg.text[ent.offset:ent.offset + ent.length].lstrip('@') if username.lower() in USERNAME_EXCEPTIONS: continue user_type = process_user_type(db, username) if user_type == 'group' and get_setting(group_config, msg.chat.id, 'groups', True): reason = '@-link to group' to_delete = True break elif user_type == 'channel' and get_setting(group_config, msg.chat.id, 'channels', True): reason = '@-link to channel' to_delete = True break if not to_delete: mention = re.search(r'(?:^|\W)\@\s([a-zA-Z]+)(?:$|\W)', msg.text) if mention: username = mention.group(1) if username.lower() not in USERNAME_EXCEPTIONS: user_type = process_user_type(db, username) if user_type == 'group' and get_setting(group_config, msg.chat.id, 'groups', True): reason = '@-link to group' to_delete = True if user_type == 'channel' and get_setting(group_config, msg.chat.id, 'channels', True): reason = '@-link to channel' to_delete = True if (msg.forward_from or msg.forward_from_chat) and get_setting(group_config, msg.chat.id, 'forwarded', True): reason = 'forwarded' to_delete = True if not to_delete: usernames = find_username_links(msg.caption or '') for username in usernames: username = username.lstrip('@') user_type = process_user_type(db, username) if user_type == 'group' and get_setting(group_config, msg.chat.id, 'groups', True): reason = 'caption @-link to group' to_delete = True break elif user_type == 'channel' and get_setting(group_config, msg.chat.id, 'channels', True): reason = 'caption @-link to channel' to_delete = True break if not to_delete: if find_external_links(msg.caption or '') and get_setting(group_config, msg.chat.id, 'links', True): reason = 'caption external link' to_delete = True if to_delete: if msg.from_user.id in [x.user.id for x in bot.get_chat_administrators(msg.chat.id)]: return try: save_event(db, 'delete_msg', msg, reason=reason) if msg.from_user.first_name and msg.from_user.last_name: from_user = '******' % ( msg.from_user.first_name, msg.from_user.last_name, ) elif msg.from_user.first_name: from_user = msg.from_user.first_name elif msg.from_user.username: from_user = msg.from_user.first_name else: from_user = '******' % msg.from_user.id event_key = (msg.chat.id, msg.from_user.id) if get_setting(group_config, msg.chat.id, 'publog', True): # Notify about spam from same user only one time per hour if ( event_key not in delete_events or delete_events[event_key] < datetime.utcnow() - timedelta(hours=1) ): ret = 'Removed msg from %s. Reason: %s\nMessages containing links to these websites will not be deleted: steemit.com, golos.io' bot.send_message(msg.chat.id, ret, parse_mode='HTML') delete_events[event_key] = datetime.utcnow() ids = set() channel_id = get_setting(group_config, msg.chat.id, 'log_channel_id') if channel_id: ids.add(channel_id) for chid in ids: formats = get_setting(group_config, chid, 'logformat', default=['simple']) from_chatname = ( '@%s' % msg.chat.username if msg.chat.username else '#%d' % msg.chat.id ) if msg.from_user.username: from_username = '******' % ( msg.from_user.username, msg.from_user.first_name ) else: from_username = msg.from_user.first_name from_info = ( 'Chat: %s\nUser: <a href="tg://user?id=%d">%s</a>' % (from_chatname, msg.from_user.id, from_username) ) try: if 'forward' in formats: bot.forward_message(chid, msg.chat.id, msg.message_id) if 'json' in formats: msg_dump = dump_telegram_object(msg) msg_dump['meta'] = { 'reason': reason, 'date': datetime.utcnow(), } dump = jsondate.dumps(msg_dump, indent=4, ensure_ascii=False) dump = html.escape(dump) content = ( '%s\n<pre>%s</pre>' % (from_info, dump), ) bot.send_message(chid, content, parse_mode='HTML') if 'simple' in formats: text = html.escape(msg.text or msg.caption) content = ( '%s\nReason: %s\nContent:\n<pre>%s</pre>' % (from_info, reason, text) ) bot.send_message(chid, content, parse_mode='HTML') except Exception as ex: logging.error( 'Failed to send notification to channel [%d]' % chid, exc_info=ex ) finally: bot.delete_message(msg.chat.id, msg.message_id)
def handle_any_msg(msg): to_delete = False if msg.from_user.username == 'madspectator' and (msg.text == 'del' or msg.caption == 'del'): reason = 'debug delete' to_delete = True if not to_delete: try: join_date = joined_users[(msg.chat.id, msg.from_user.id)] except KeyError: return safe_hours = get_setting(group_config, msg.chat.id, 'safe_hours', DEFAULT_SAFE_HOURS) if datetime.utcnow() - timedelta(hours=safe_hours) > join_date: return for ent in (msg.entities or []): if ent.type in ('url', 'text_link'): to_delete = True reason = 'external link' break if ent.type in ('email', ): to_delete = True reason = 'email' break if ent.type == 'mention': username = msg.text[ent.offset:ent.offset + ent.length].lstrip('@') user_type = process_user_type(db, username) if user_type in ('group', 'channel'): to_delete = True reason = '@-link to group/channel' break if not to_delete: if msg.forward_from or msg.forward_from_chat: reason = 'forwarded' to_delete = True if not to_delete: usernames = find_username_links(msg.caption or '') for username in usernames: username = username.lstrip('@') user_type = process_user_type(db, username) if user_type in ('group', 'channel'): reason = 'caption @-link to group/channel' to_delete = True break if not to_delete: if find_external_links(msg.caption or ''): reason = 'caption external link' to_delete = True if to_delete: try: save_event(db, 'delete_msg', msg, reason=reason) if msg.from_user.first_name and msg.from_user.last_name: from_user = '******' % ( msg.from_user.first_name, msg.from_user.last_name, ) elif msg.from_user.first_name: from_user = msg.from_user.first_name elif msg.from_user.username: from_user = msg.from_user.first_name else: from_user = '******' % msg.from_user.id event_key = (msg.chat.id, msg.from_user.id) if get_setting(group_config, msg.chat.id, 'publog', True): # Notify about spam from same user only one time per hour if (event_key not in delete_events or delete_events[event_key] < datetime.utcnow() - timedelta(hours=1)): ret = 'Removed msg from %s. Reason: new user + %s' % ( from_user, reason) bot.send_message(msg.chat.id, ret, parse_mode='HTML') delete_events[event_key] = datetime.utcnow() ids = set([GLOBAL_CHANNEL_ID]) channel_id = get_setting(group_config, msg.chat.id, 'log_channel_id') if channel_id: ids.add(channel_id) for chid in ids: formats = get_setting(group_config, chid, 'logformat', default=['simple']) from_chatname = ('@%s' % msg.chat.username if msg.chat.username else '#%d' % msg.chat.id) if msg.from_user.username: from_username = '******' % (msg.from_user.username, msg.from_user.first_name) else: from_username = msg.from_user.first_name from_info = ( 'Chat: %s\nUser: <a href="tg://user?id=%d">%s</a>' % (from_chatname, msg.from_user.id, from_username)) try: if 'forward' in formats: bot.forward_message(chid, msg.chat.id, msg.message_id) if 'json' in formats: msg_dump = dump_telegram_object(msg) msg_dump['meta'] = { 'reason': reason, 'date': datetime.utcnow(), } dump = jsondate.dumps(msg_dump, indent=4, ensure_ascii=False) dump = html.escape(dump) content = ('%s\n<pre>%s</pre>' % (from_info, dump), ) bot.send_message(chid, content, parse_mode='HTML') if 'simple' in formats: text = html.escape(msg.text or msg.caption) content = ( '%s\nReason: %s\nContent:\n<pre>%s</pre>' % (from_info, reason, text)) bot.send_message(chid, content, parse_mode='HTML') except Exception as ex: logging.error( 'Failed to send notification to channel [%d]' % chid, exc_info=ex) finally: bot.delete_message(msg.chat.id, msg.message_id)
import jsondate as json import requests from logbook import Logger, FileHandler, StderrHandler BASE_URL = 'http://fmr-api-507.herokuapp.com/api' log = Logger('Logbook') log_filehandler = FileHandler('application.log') log_stderrhandler = StderrHandler() # Participants participants = [ {'scenario': '0'}, {'scenario': '1'}, {'scenario': '0'} ] participants_ids = [] for p in participants: r = requests.post('{}/participant'.format(BASE_URL), data=json.dumps(p), headers={'content-type': 'application/json'}) if r.status_code == 201: log.info('Participant with scenario {} with status code {}'.format(p['scenario'], r.status_code)) participants_ids.append(json.loads(r.text)['id']) else: log.error('Participant with scenario {} with status code {}'.format(p['scenario'], r.status_code)) print r.text
#!/usr/bin/env python # # Takes an .html file on the command line, parses it using the PACER # Docket Report parser, and outputs json to stdout. import jsondate as json import sys from juriscraper.pacer.http import PacerSession from juriscraper.pacer import DocketReport pacer_session = PacerSession(username='******', password='******') report = DocketReport('psc', pacer_session) for path in sys.argv[1:]: with open(path, 'r') as f: report._parse_text(f.read().decode('utf-8')) data = report.data print json.dumps(data, indent=2, sort_keys=True, separators=(',', ': '))
def read_db_info(self, **_): """ Return serialized data from database. """ return jsondate.dumps( {i: self.list_of_dicts_for_table(i) for i in self.list_of_tables})
def serializar_datetime(self, payload): data = jsondate.dumps(payload) return data
def roundtrip(input): return jsondate.loads(jsondate.dumps(input))
def test_dumps_str_roundtrips(self): # Generates a ValueError from _datetime_object_hook orig_dict = dict(foo='bar') self.assertEqual(orig_dict, jsondate.loads(jsondate.dumps(orig_dict)))
def __str__(self): return json.dumps(self.__dict__) + '\n'
def hash(self, block): encoded_block = json.dumps(block, sort_keys=True).encode() return hashlib.sha256(encoded_block).hexdigest()
def test_dumps_empty_roundtrips(self): self.assertEqual({}, jsondate.loads(jsondate.dumps({})))