Exemplo n.º 1
0
 def test_unserializable(self):
     # Hide an unserializable object as deep as we can in supported collections
     value = ['a', ('b', {'c': {'d', object()}})]
     with pytest.raises(TypeError):
         serialization.serialize(value)
     with pytest.raises(TypeError):
         serialization.dumps(value)
Exemplo n.º 2
0
def upgrade(ver, session):
    if ver == 0:
        table = table_schema('input_cache_entry', session)
        table_add_column(table, 'json', Unicode, session)
        # Make sure we get the new schema with the added column
        table = table_schema('input_cache_entry', session)
        for row in session.execute(select([table.c.id, table.c.entry])):
            try:
                p = pickle.loads(row['entry'])
                session.execute(
                    table.update().where(table.c.id == row['id']).values(
                        json=json.dumps(p, encode_datetime=True)))
            except KeyError as e:
                logger.error(
                    'Unable error upgrading input_cache pickle object due to {}',
                    str(e))
        ver = 1
    if ver == 1:
        table = table_schema('input_cache_entry', session)
        for row in session.execute(select([table.c.id, table.c.json])):
            if not row['json']:
                # Seems there could be invalid data somehow. See #2590
                continue
            data = json.loads(row['json'], decode_datetime=True)
            # If title looked like a date, make sure it's a string
            title = str(data.pop('title'))
            e = Entry(title=title, **data)
            session.execute(table.update().where(
                table.c.id == row['id']).values(json=serialization.dumps(e)))

        ver = 2
    return ver
Exemplo n.º 3
0
def upgrade(ver: int, session: DBSession) -> int:
    if ver == 0:
        table = table_schema('input_cache_entry', session)
        table_add_column(table, 'json', Unicode, session)
        # Make sure we get the new schema with the added column
        table = table_schema('input_cache_entry', session)
        for row in session.execute(select([table.c.id, table.c.entry])):
            try:
                p = pickle.loads(row['entry'])
                session.execute(
                    table.update().where(table.c.id == row['id']).values(
                        json=json.dumps(p, encode_datetime=True)))
            except KeyError as ex:
                logger.error(
                    f'Unable error upgrading input_cache pickle object due to {ex}'
                )
        ver = 1
    if ver == 1:
        table = table_schema('input_cache_entry', session)
        for row in session.execute(select([table.c.id, table.c.json])):
            if not row['json']:
                # Seems there could be invalid data somehow. See #2590
                continue
            data = json.loads(row['json'], decode_datetime=True)
            # If title looked like a date, make sure it's a string
            # Had a weird case of an entry without a title: https://github.com/Flexget/Flexget/issues/2636
            title = data.pop('title', None)
            entry = partial(Entry, **data)
            e = entry(title=str(title)) if title else entry()
            session.execute(table.update().where(
                table.c.id == row['id']).values(json=serialization.dumps(e)))

        ver = 2
    return ver
Exemplo n.º 4
0
 def test_entry_serialization(self):
     entry1 = entry.Entry(
         {
             'title': 'blah',
             'url': 'http://blah',
             'listfield': ['a', 'b', 1, 2],
             'dictfield': {'a': 1, 'b': 2},
             'intfield': 5,
             'floatfield': 5.5,
             'datefield': datetime.date(1999, 9, 9),
             'datetimefield': datetime.datetime(1999, 9, 9, 9, 9),
             'qualityfield': qualities.Quality('720p hdtv'),
             'nestedlist': [qualities.Quality('1080p')],
             'nesteddict': {'a': datetime.date(1999, 9, 9)},
         }
     )
     entry1.add_lazy_fields('lazy function', ['lazyfield'])
     assert entry1.is_lazy('lazyfield')
     serialized = serialization.dumps(entry1)
     print(serialized)
     entry2 = serialization.loads(serialized)
     # Use the underlying dict, so we compare all fields
     assert entry2.is_lazy('lazyfield')
     assert dict(entry1) == dict(entry2)
     assert entry2['lazyfield'] == 'value a'
Exemplo n.º 5
0
 def test_builtin_serialization(self):
     # Also test these things nest properly
     value = {
         'a': 'aoeu',
         'b': [1, 2, 3.5],
         'c': (1, datetime.datetime(2019, 12, 12, 12, 12)),
         'd': {'a', 1, datetime.date(2019, 11, 11)}
     }
     out = serialization.dumps(value)
     backin = serialization.loads(out)
     assert backin == value
Exemplo n.º 6
0
 def setter(self, entry):
     if isinstance(entry, dict):
         if entry.get('serializer') == 'Entry' and 'version' in entry and 'value' in entry:
             # This is already a serialized form of entry
             setattr(self, name, json.dumps(entry))
             return
         entry = Entry(entry)
     if isinstance(entry, Entry):
         setattr(self, name, serialization.dumps(entry))
     else:
         raise TypeError(f'{type(entry)!r} is not type Entry or dict.')
Exemplo n.º 7
0
Arquivo: db.py Projeto: x572b/Flexget
def upgrade(ver, session):
    if ver is None:
        # Make sure there is no data we can't load in the backlog table
        backlog_table = table_schema('backlog', session)
        try:
            for item in session.query('entry').select_from(
                    backlog_table).all():
                pickle.loads(item.entry)
        except (ImportError, TypeError):
            # If there were problems, we can drop the data.
            logger.info(
                'Backlog table contains unloadable data, clearing old data.')
            session.execute(backlog_table.delete())
        ver = 0
    if ver == 0:
        backlog_table = table_schema('backlog', session)
        logger.info('Creating index on backlog table.')
        Index('ix_backlog_feed_expire', backlog_table.c.feed,
              backlog_table.c.expire).create(bind=session.bind)
        ver = 1
    if ver == 1:
        table = table_schema('backlog', session)
        table_add_column(table, 'json', Unicode, session)
        # Make sure we get the new schema with the added column
        table = table_schema('backlog', session)
        for row in session.execute(select([table.c.id, table.c.entry])):
            try:
                p = pickle.loads(row['entry'])
                session.execute(
                    table.update().where(table.c.id == row['id']).values(
                        json=json.dumps(p, encode_datetime=True)))
            except KeyError as e:
                logger.error(
                    'Unable error upgrading backlog pickle object due to {}',
                    str(e))

        ver = 2
    if ver == 2:
        table = table_schema('backlog', session)
        for row in session.execute(select([table.c.id, table.c.json])):
            if not row['json']:
                # Seems there could be invalid data somehow. See #2590
                continue
            data = json.loads(row['json'], decode_datetime=True)
            # If title looked like a date, make sure it's a string
            title = str(data.pop('title'))
            e = Entry(title=title, **data)
            session.execute(table.update().where(
                table.c.id == row['id']).values(json=serialization.dumps(e)))

        ver = 3
    return ver
Exemplo n.º 8
0
def upgrade(ver, session):
    if ver == 0:
        table = table_schema('pending_entries', session)
        for row in session.execute(select([table.c.id, table.c.json])):
            if not row['json']:
                # Seems there could be invalid data somehow. See #2590
                continue
            data = json.loads(row['json'], decode_datetime=True)
            # If title looked like a date, make sure it's a string
            title = str(data.pop('title'))
            e = Entry(title=title, **data)
            session.execute(table.update().where(
                table.c.id == row['id']).values(json=serialization.dumps(e)))

        ver = 1
    return ver
Exemplo n.º 9
0
def upgrade(ver, session):
    if ver is None:
        # Upgrade to version 0 was a failed attempt at cleaning bad entries from our table, better attempt in ver 1
        ver = 1
    if ver == 1:
        table = table_schema('delay', session)
        table_add_column(table, 'json', Unicode, session)
        # Make sure we get the new schema with the added column
        table = table_schema('delay', session)
        failures = 0
        for row in session.execute(select([table.c.id, table.c.entry])):
            try:
                p = pickle.loads(row['entry'])
                session.execute(
                    table.update()
                    .where(table.c.id == row['id'])
                    .values(json=json.dumps(p, encode_datetime=True))
                )
            except (KeyError, ImportError):
                failures += 1
        if failures > 0:
            logger.error(
                'Error upgrading {} pickle objects. Some delay information has been lost.',
                failures,
            )
        ver = 2
    if ver == 2:
        table = table_schema('delay', session)
        for row in session.execute(select([table.c.id, table.c.json])):
            if not row['json']:
                # Seems there could be invalid data somehow. See #2590
                continue
            data = json.loads(row['json'], decode_datetime=True)
            # If title looked like a date, make sure it's a string
            title = str(data.pop('title'))
            e = Entry(title=title, **data)
            session.execute(
                table.update().where(table.c.id == row['id']).values(json=serialization.dumps(e))
            )
        ver = 3

    return ver