Пример #1
0
 def getCredentials(self, **kwargs):
     try:
         oauth_token = kwargs.get("oauth")
     except:
         return "redirect", Env.get("web_base") + "settings/automation/"
     log.debug("oauth_token is: %s", oauth_token)
     self.conf("automation_oauth_token", value=oauth_token)
     return "redirect", Env.get("web_base") + "settings/automation/"
Пример #2
0
    def startup_compact(self):
        from couchpotato import Env

        db = self.getDB()
        size = db.get_db_details().get('size')
        prop_name = 'last_db_compact'
        last_check = int(Env.prop(prop_name, default = 0))
        if size > 26214400 and last_check < time.time()-604800: # 25MB / 7 days
            self.compact()
            Env.prop(prop_name, value = int(time.time()))
Пример #3
0
    def ignoreView(self, imdb = None, **kwargs):

        ignored = splitString(Env.prop('charts_ignore', default = ''))

        if imdb:
            ignored.append(imdb)
            Env.prop('charts_ignore', ','.join(set(ignored)))

        return {
            'result': True
        }
Пример #4
0
    def getCredentials(self, **kwargs):
        try:
            oauth_token = kwargs.get('oauth')
            refresh_token = kwargs.get('refresh')

            log.debug('oauth_token is: %s', oauth_token)
            self.conf('automation_oauth_token', value = oauth_token)
            self.conf('automation_oauth_refresh', value = refresh_token)

            Env.prop('last_trakt_refresh', value = int(time.time()))
        except:
            log.error('Failed setting trakt token: %s', traceback.format_exc())

        return 'redirect', Env.get('web_base') + 'settings/automation/'
Пример #5
0
    def search(self, q, limit = 12):

        if self.isSearchDisabled():
            return []

        name_year = fireEvent('scanner.name_year', q, single = True)

        if not name_year or (name_year and not name_year.get('name')):
            name_year = {
                'name': q
            }

        cache_key = 'omdbapi.cache.%s' % q
        url = self.urls['search'] % tryUrlencode({'t': name_year.get('name'), 'y': name_year.get('year', '')})
        cached = self.getCache(cache_key, url, timeout = 3, headers = {'User-Agent': Env.getIdentifier()})

        if cached:
            result = self.parseMovie(cached)
            if result.get('titles') and len(result.get('titles')) > 0:
                log.info('Found: %s', result['titles'][0] + ' (' + str(result.get('year')) + ')')
                return [result]

            return []

        return []
Пример #6
0
    def getAuthorizationUrl(self, host=None, **kwargs):
        callback_url = cleanHost(host) + "%sautomation.trakt.credentials/" % (Env.get("api_base").lstrip("/"))
        log.debug("callback_url is %s", callback_url)

        target_url = self.urls["oauth"] + "?target=" + callback_url
        log.debug("target_url is %s", target_url)

        return {"success": True, "url": target_url}
Пример #7
0
    def getAuthorizationUrl(self, host = None, **kwargs):
        callback_url = cleanHost(host) + '%sautomation.trakt.credentials/' % (Env.get('api_base').lstrip('/'))
        log.debug('callback_url is %s', callback_url)

        target_url = self.urls['oauth'] + "?target=" + callback_url
        log.debug('target_url is %s', target_url)

        return {
            'success': True,
            'url': target_url,
        }
Пример #8
0
    def automationView(self, force_update = False, **kwargs):

        db = get_db()

        charts = fireEvent('automation.get_chart_list', merge = True)
        ignored = splitString(Env.prop('charts_ignore', default = ''))

        # Create a list the movie/list.js can use
        for chart in charts:
            medias = []
            for media in chart.get('list', []):

                identifier = media.get('imdb')
                if identifier in ignored:
                    continue

                try:
                    try:
                        in_library = db.get('media', 'imdb-%s' % identifier)
                        if in_library:
                            continue
                    except RecordNotFound:
                        pass
                except:
                    pass

                # Cache poster
                posters = media.get('images', {}).get('poster', [])
                poster = [x for x in posters if 'tmdb' in x]
                posters = poster if len(poster) > 0 else posters

                cached_poster = fireEvent('file.download', url = posters[0], single = True) if len(posters) > 0 else False
                files = {'image_poster': [cached_poster] } if cached_poster else {}

                medias.append({
                    'status': 'chart',
                    'title': getTitle(media),
                    'type': 'movie',
                    'info': media,
                    'files': files,
                    'identifiers': {
                        'imdb': identifier
                    }
                })

            chart['list'] = medias

        return {
            'success': True,
            'count': len(charts),
            'charts': charts,
            'ignored': ignored,
        }
Пример #9
0
    def startup_compact(self):
        from couchpotato import Env

        db = self.getDB()

        # Try fix for migration failures on desktop
        if Env.get('desktop'):
            try:
                list(db.all('profile', with_doc = True))
            except RecordNotFound:

                failed_location = '%s_failed' % db.path
                old_db = os.path.join(Env.get('data_dir'), 'couchpotato.db.old')

                if not os.path.isdir(failed_location) and os.path.isfile(old_db):
                    log.error('Corrupt database, trying migrate again')
                    db.close()

                    # Rename database folder
                    os.rename(db.path, '%s_failed' % db.path)

                    # Rename .old database to try another migrate
                    os.rename(old_db, old_db[:-4])

                    fireEventAsync('app.restart')
                else:
                    log.error('Migration failed and couldn\'t recover database. Please report on GitHub, with this message.')
                    db.reindex()

                return

        # Check size and compact if needed
        size = db.get_db_details().get('size')
        prop_name = 'last_db_compact'
        last_check = int(Env.prop(prop_name, default = 0))

        if size > 26214400 and last_check < time.time()-604800: # 25MB / 7 days
            self.compact()
            Env.prop(prop_name, value = int(time.time()))
Пример #10
0
    def refreshToken(self):

        token = self.conf('automation_oauth_token')
        refresh_token = self.conf('automation_oauth_refresh')
        if token and refresh_token:

            prop_name = 'last_trakt_refresh'
            last_refresh = int(Env.prop(prop_name, default = 0))

            if last_refresh < time.time()-4838400:  # refresh every 8 weeks
                log.debug('Refreshing trakt token')

                url = self.urls['refresh_token'] + '?token=' + self.conf('automation_oauth_refresh')
                data = fireEvent('cp.api_call', url, cache_timeout = 0, single = True)
                if data and 'oauth' in data and 'refresh' in data:
                    log.debug('Oauth refresh: %s', data)
                    self.conf('automation_oauth_token', value = data.get('oauth'))
                    self.conf('automation_oauth_refresh', value = data.get('refresh'))
                    Env.prop(prop_name, value = int(time.time()))
                else:
                    log.error('Failed refreshing Trakt token, please re-register in settings')

        elif token and not refresh_token:
            log.error('Refresh token is missing, please re-register Trakt for autorefresh of the token in the future')
Пример #11
0
    def getInfo(self, identifier = None, **kwargs):
        if self.isDisabled() or not identifier:
            return {}

        cache_key = 'omdbapi.cache.%s' % identifier
        url = self.urls['info'] % (self.getApiKey(), identifier)
        cached = self.getCache(cache_key, url, timeout = 3, headers = {'User-Agent': Env.getIdentifier()})

        if cached:
            result = self.parseMovie(cached)
            if result.get('titles') and len(result.get('titles')) > 0:
                log.info('Found: %s', result['titles'][0] + ' (' + str(result['year']) + ')')
                return result

        return {}
Пример #12
0
    def migrate(self):

        from couchpotato import Env
        old_db = os.path.join(Env.get('data_dir'), 'couchpotato.db')
        if not os.path.isfile(old_db): return

        log.info('=' * 30)
        log.info('Migrating database, hold on..')
        time.sleep(1)

        if os.path.isfile(old_db):

            migrate_start = time.time()

            import sqlite3
            conn = sqlite3.connect(old_db)

            migrate_list = {
                'category': ['id', 'label', 'order', 'required', 'preferred', 'ignored', 'destination'],
                'profile': ['id', 'label', 'order', 'core', 'hide'],
                'profiletype': ['id', 'order', 'finish', 'wait_for', 'quality_id', 'profile_id'],
                'quality': ['id', 'identifier', 'order', 'size_min', 'size_max'],
                'movie': ['id', 'last_edit', 'library_id', 'status_id', 'profile_id', 'category_id'],
                'library': ['id', 'identifier', 'info'],
                'librarytitle': ['id', 'title', 'default', 'libraries_id'],
                'library_files__file_library': ['library_id', 'file_id'],
                'release': ['id', 'identifier', 'movie_id', 'status_id', 'quality_id', 'last_edit'],
                'releaseinfo': ['id', 'identifier', 'value', 'release_id'],
                'release_files__file_release': ['release_id', 'file_id'],
                'status': ['id', 'identifier'],
                'properties': ['id', 'identifier', 'value'],
                'file': ['id', 'path', 'type_id'],
                'filetype': ['identifier', 'id']
            }

            migrate_data = {}
            rename_old = False

            try:

                c = conn.cursor()

                for ml in migrate_list:
                    migrate_data[ml] = {}
                    rows = migrate_list[ml]

                    try:
                        c.execute('SELECT %s FROM `%s`' % ('`' + '`,`'.join(rows) + '`', ml))
                    except:
                        # ignore faulty destination_id database
                        if ml == 'category':
                            migrate_data[ml] = {}
                        else:
                            rename_old = True
                            raise

                    for p in c.fetchall():
                        columns = {}
                        for row in migrate_list[ml]:
                            columns[row] = p[rows.index(row)]

                        if not migrate_data[ml].get(p[0]):
                            migrate_data[ml][p[0]] = columns
                        else:
                            if not isinstance(migrate_data[ml][p[0]], list):
                                migrate_data[ml][p[0]] = [migrate_data[ml][p[0]]]
                            migrate_data[ml][p[0]].append(columns)

                conn.close()

                log.info('Getting data took %s', time.time() - migrate_start)

                db = self.getDB()
                if not db.opened:
                    return

                # Use properties
                properties = migrate_data['properties']
                log.info('Importing %s properties', len(properties))
                for x in properties:
                    property = properties[x]
                    Env.prop(property.get('identifier'), property.get('value'))

                # Categories
                categories = migrate_data.get('category', [])
                log.info('Importing %s categories', len(categories))
                category_link = {}
                for x in categories:
                    c = categories[x]

                    new_c = db.insert({
                        '_t': 'category',
                        'order': c.get('order', 999),
                        'label': toUnicode(c.get('label', '')),
                        'ignored': toUnicode(c.get('ignored', '')),
                        'preferred': toUnicode(c.get('preferred', '')),
                        'required': toUnicode(c.get('required', '')),
                        'destination': toUnicode(c.get('destination', '')),
                    })

                    category_link[x] = new_c.get('_id')

                # Profiles
                log.info('Importing profiles')
                new_profiles = db.all('profile', with_doc = True)
                new_profiles_by_label = {}
                for x in new_profiles:

                    # Remove default non core profiles
                    if not x['doc'].get('core'):
                        db.delete(x['doc'])
                    else:
                        new_profiles_by_label[x['doc']['label']] = x['_id']

                profiles = migrate_data['profile']
                profile_link = {}
                for x in profiles:
                    p = profiles[x]

                    exists = new_profiles_by_label.get(p.get('label'))

                    # Update existing with order only
                    if exists and p.get('core'):
                        profile = db.get('id', exists)
                        profile['order'] = tryInt(p.get('order'))
                        profile['hide'] = p.get('hide') in [1, True, 'true', 'True']
                        db.update(profile)

                        profile_link[x] = profile.get('_id')
                    else:

                        new_profile = {
                            '_t': 'profile',
                            'label': p.get('label'),
                            'order': int(p.get('order', 999)),
                            'core': p.get('core', False),
                            'qualities': [],
                            'wait_for': [],
                            'finish': []
                        }

                        types = migrate_data['profiletype']
                        for profile_type in types:
                            p_type = types[profile_type]
                            if types[profile_type]['profile_id'] == p['id']:
                                if p_type['quality_id']:
                                    new_profile['finish'].append(p_type['finish'])
                                    new_profile['wait_for'].append(p_type['wait_for'])
                                    new_profile['qualities'].append(migrate_data['quality'][p_type['quality_id']]['identifier'])

                        if len(new_profile['qualities']) > 0:
                            new_profile.update(db.insert(new_profile))
                            profile_link[x] = new_profile.get('_id')
                        else:
                            log.error('Corrupt profile list for "%s", using default.', p.get('label'))

                # Qualities
                log.info('Importing quality sizes')
                new_qualities = db.all('quality', with_doc = True)
                new_qualities_by_identifier = {}
                for x in new_qualities:
                    new_qualities_by_identifier[x['doc']['identifier']] = x['_id']

                qualities = migrate_data['quality']
                quality_link = {}
                for x in qualities:
                    q = qualities[x]
                    q_id = new_qualities_by_identifier[q.get('identifier')]

                    quality = db.get('id', q_id)
                    quality['order'] = q.get('order')
                    quality['size_min'] = tryInt(q.get('size_min'))
                    quality['size_max'] = tryInt(q.get('size_max'))
                    db.update(quality)

                    quality_link[x] = quality

                # Titles
                titles = migrate_data['librarytitle']
                titles_by_library = {}
                for x in titles:
                    title = titles[x]
                    if title.get('default'):
                        titles_by_library[title.get('libraries_id')] = title.get('title')

                # Releases
                releaseinfos = migrate_data['releaseinfo']
                for x in releaseinfos:
                    info = releaseinfos[x]

                    # Skip if release doesn't exist for this info
                    if not migrate_data['release'].get(info.get('release_id')):
                        continue

                    if not migrate_data['release'][info.get('release_id')].get('info'):
                        migrate_data['release'][info.get('release_id')]['info'] = {}

                    migrate_data['release'][info.get('release_id')]['info'][info.get('identifier')] = info.get('value')

                releases = migrate_data['release']
                releases_by_media = {}
                for x in releases:
                    release = releases[x]
                    if not releases_by_media.get(release.get('movie_id')):
                        releases_by_media[release.get('movie_id')] = []

                    releases_by_media[release.get('movie_id')].append(release)

                # Type ids
                types = migrate_data['filetype']
                type_by_id = {}
                for t in types:
                    type = types[t]
                    type_by_id[type.get('id')] = type

                # Media
                log.info('Importing %s media items', len(migrate_data['movie']))
                statuses = migrate_data['status']
                libraries = migrate_data['library']
                library_files = migrate_data['library_files__file_library']
                releases_files = migrate_data['release_files__file_release']
                all_files = migrate_data['file']
                poster_type = migrate_data['filetype']['poster']
                medias = migrate_data['movie']
                for x in medias:
                    m = medias[x]

                    status = statuses.get(m['status_id']).get('identifier')
                    l = libraries.get(m['library_id'])

                    # Only migrate wanted movies, Skip if no identifier present
                    if not l or not getImdb(l.get('identifier')): continue

                    profile_id = profile_link.get(m['profile_id'])
                    category_id = category_link.get(m['category_id'])
                    title = titles_by_library.get(m['library_id'])
                    releases = releases_by_media.get(x, [])
                    info = json.loads(l.get('info', ''))

                    files = library_files.get(m['library_id'], [])
                    if not isinstance(files, list):
                        files = [files]

                    added_media = fireEvent('movie.add', {
                        'info': info,
                        'identifier': l.get('identifier'),
                        'profile_id': profile_id,
                        'category_id': category_id,
                        'title': title
                    }, force_readd = False, search_after = False, update_after = False, notify_after = False, status = status, single = True)

                    if not added_media:
                        log.error('Failed adding media %s: %s', (l.get('identifier'), info))
                        continue

                    added_media['files'] = added_media.get('files', {})
                    for f in files:
                        ffile = all_files[f.get('file_id')]

                        # Only migrate posters
                        if ffile.get('type_id') == poster_type.get('id'):
                            if ffile.get('path') not in added_media['files'].get('image_poster', []) and os.path.isfile(ffile.get('path')):
                                added_media['files']['image_poster'] = [ffile.get('path')]
                                break

                    if 'image_poster' in added_media['files']:
                        db.update(added_media)

                    for rel in releases:

                        empty_info = False
                        if not rel.get('info'):
                            empty_info = True
                            rel['info'] = {}

                        quality = quality_link.get(rel.get('quality_id'))
                        if not quality:
                            continue

                        release_status = statuses.get(rel.get('status_id')).get('identifier')

                        if rel['info'].get('download_id'):
                            status_support = rel['info'].get('download_status_support', False) in [True, 'true', 'True']
                            rel['info']['download_info'] = {
                                'id': rel['info'].get('download_id'),
                                'downloader': rel['info'].get('download_downloader'),
                                'status_support': status_support,
                            }

                        # Add status to keys
                        rel['info']['status'] = release_status
                        if not empty_info:
                            fireEvent('release.create_from_search', [rel['info']], added_media, quality, single = True)
                        else:
                            release = {
                                '_t': 'release',
                                'identifier': rel.get('identifier'),
                                'media_id': added_media.get('_id'),
                                'quality': quality.get('identifier'),
                                'status': release_status,
                                'last_edit': int(time.time()),
                                'files': {}
                            }

                            # Add downloader info if provided
                            try:
                                release['download_info'] = rel['info']['download_info']
                                del rel['download_info']
                            except:
                                pass

                            # Add files
                            release_files = releases_files.get(rel.get('id'), [])
                            if not isinstance(release_files, list):
                                release_files = [release_files]

                            if len(release_files) == 0:
                                continue

                            for f in release_files:
                                rfile = all_files.get(f.get('file_id'))
                                if not rfile:
                                    continue

                                file_type = type_by_id.get(rfile.get('type_id')).get('identifier')

                                if not release['files'].get(file_type):
                                    release['files'][file_type] = []

                                release['files'][file_type].append(rfile.get('path'))

                            try:
                                rls = db.get('release_identifier', rel.get('identifier'), with_doc = True)['doc']
                                rls.update(release)
                                db.update(rls)
                            except:
                                db.insert(release)

                log.info('Total migration took %s', time.time() - migrate_start)
                log.info('=' * 30)

                rename_old = True

            except OperationalError:
                log.error('Migrating from faulty database, probably a (too) old version: %s', traceback.format_exc())
                
                rename_old = True
            except:
                log.error('Migration failed: %s', traceback.format_exc())


            # rename old database
            if rename_old:
                random = randomString()
                log.info('Renaming old database to %s ', '%s.%s_old' % (old_db, random))
                os.rename(old_db, '%s.%s_old' % (old_db, random))

                if os.path.isfile(old_db + '-wal'):
                    os.rename(old_db + '-wal', '%s-wal.%s_old' % (old_db, random))
                if os.path.isfile(old_db + '-shm'):
                    os.rename(old_db + '-shm', '%s-shm.%s_old' % (old_db, random))
Пример #13
0
    def migrate(self):

        from couchpotato import Env
        old_db = os.path.join(Env.get('data_dir'), 'couchpotato.db')
        if not os.path.isfile(old_db): return

        log.info('=' * 30)
        log.info('Migrating database, hold on..')
        time.sleep(1)

        if os.path.isfile(old_db):

            migrate_start = time.time()

            import sqlite3
            conn = sqlite3.connect(old_db)

            migrate_list = {
                'category': [
                    'id', 'label', 'order', 'required', 'preferred', 'ignored',
                    'destination'
                ],
                'profile': ['id', 'label', 'order', 'core', 'hide'],
                'profiletype': [
                    'id', 'order', 'finish', 'wait_for', 'quality_id',
                    'profile_id'
                ],
                'quality':
                ['id', 'identifier', 'order', 'size_min', 'size_max'],
                'movie': [
                    'id', 'last_edit', 'library_id', 'status_id', 'profile_id',
                    'category_id'
                ],
                'library': ['id', 'identifier', 'info'],
                'librarytitle': ['id', 'title', 'default', 'libraries_id'],
                'library_files__file_library': ['library_id', 'file_id'],
                'release': [
                    'id', 'identifier', 'movie_id', 'status_id', 'quality_id',
                    'last_edit'
                ],
                'releaseinfo': ['id', 'identifier', 'value', 'release_id'],
                'release_files__file_release': ['release_id', 'file_id'],
                'status': ['id', 'identifier'],
                'properties': ['id', 'identifier', 'value'],
                'file': ['id', 'path', 'type_id'],
                'filetype': ['identifier', 'id']
            }

            migrate_data = {}

            c = conn.cursor()

            for ml in migrate_list:
                migrate_data[ml] = {}
                rows = migrate_list[ml]

                try:
                    c.execute('SELECT %s FROM `%s`' %
                              ('`' + '`,`'.join(rows) + '`', ml))
                except:
                    # ignore faulty destination_id database
                    if ml == 'category':
                        migrate_data[ml] = {}
                    else:
                        raise

                for p in c.fetchall():
                    columns = {}
                    for row in migrate_list[ml]:
                        columns[row] = p[rows.index(row)]

                    if not migrate_data[ml].get(p[0]):
                        migrate_data[ml][p[0]] = columns
                    else:
                        if not isinstance(migrate_data[ml][p[0]], list):
                            migrate_data[ml][p[0]] = [migrate_data[ml][p[0]]]
                        migrate_data[ml][p[0]].append(columns)

            conn.close()

            log.info('Getting data took %s', time.time() - migrate_start)

            db = self.getDB()
            if not db.opened:
                return

            # Use properties
            properties = migrate_data['properties']
            log.info('Importing %s properties', len(properties))
            for x in properties:
                property = properties[x]
                Env.prop(property.get('identifier'), property.get('value'))

            # Categories
            categories = migrate_data.get('category', [])
            log.info('Importing %s categories', len(categories))
            category_link = {}
            for x in categories:
                c = categories[x]

                new_c = db.insert({
                    '_t':
                    'category',
                    'order':
                    c.get('order', 999),
                    'label':
                    toUnicode(c.get('label', '')),
                    'ignored':
                    toUnicode(c.get('ignored', '')),
                    'preferred':
                    toUnicode(c.get('preferred', '')),
                    'required':
                    toUnicode(c.get('required', '')),
                    'destination':
                    toUnicode(c.get('destination', '')),
                })

                category_link[x] = new_c.get('_id')

            # Profiles
            log.info('Importing profiles')
            new_profiles = db.all('profile', with_doc=True)
            new_profiles_by_label = {}
            for x in new_profiles:

                # Remove default non core profiles
                if not x['doc'].get('core'):
                    db.delete(x['doc'])
                else:
                    new_profiles_by_label[x['doc']['label']] = x['_id']

            profiles = migrate_data['profile']
            profile_link = {}
            for x in profiles:
                p = profiles[x]

                exists = new_profiles_by_label.get(p.get('label'))

                # Update existing with order only
                if exists and p.get('core'):
                    profile = db.get('id', exists)
                    profile['order'] = tryInt(p.get('order'))
                    profile['hide'] = p.get('hide') in [
                        1, True, 'true', 'True'
                    ]
                    db.update(profile)

                    profile_link[x] = profile.get('_id')
                else:

                    new_profile = {
                        '_t': 'profile',
                        'label': p.get('label'),
                        'order': int(p.get('order', 999)),
                        'core': p.get('core', False),
                        'qualities': [],
                        'wait_for': [],
                        'finish': []
                    }

                    types = migrate_data['profiletype']
                    for profile_type in types:
                        p_type = types[profile_type]
                        if types[profile_type]['profile_id'] == p['id']:
                            if p_type['quality_id']:
                                new_profile['finish'].append(p_type['finish'])
                                new_profile['wait_for'].append(
                                    p_type['wait_for'])
                                new_profile['qualities'].append(
                                    migrate_data['quality'][
                                        p_type['quality_id']]['identifier'])

                    if len(new_profile['qualities']) > 0:
                        new_profile.update(db.insert(new_profile))
                        profile_link[x] = new_profile.get('_id')
                    else:
                        log.error(
                            'Corrupt profile list for "%s", using default.',
                            p.get('label'))

            # Qualities
            log.info('Importing quality sizes')
            new_qualities = db.all('quality', with_doc=True)
            new_qualities_by_identifier = {}
            for x in new_qualities:
                new_qualities_by_identifier[x['doc']['identifier']] = x['_id']

            qualities = migrate_data['quality']
            quality_link = {}
            for x in qualities:
                q = qualities[x]
                q_id = new_qualities_by_identifier[q.get('identifier')]

                quality = db.get('id', q_id)
                quality['order'] = q.get('order')
                quality['size_min'] = tryInt(q.get('size_min'))
                quality['size_max'] = tryInt(q.get('size_max'))
                db.update(quality)

                quality_link[x] = quality

            # Titles
            titles = migrate_data['librarytitle']
            titles_by_library = {}
            for x in titles:
                title = titles[x]
                if title.get('default'):
                    titles_by_library[title.get('libraries_id')] = title.get(
                        'title')

            # Releases
            releaseinfos = migrate_data['releaseinfo']
            for x in releaseinfos:
                info = releaseinfos[x]

                # Skip if release doesn't exist for this info
                if not migrate_data['release'].get(info.get('release_id')):
                    continue

                if not migrate_data['release'][info.get('release_id')].get(
                        'info'):
                    migrate_data['release'][info.get(
                        'release_id')]['info'] = {}

                migrate_data['release'][info.get('release_id')]['info'][
                    info.get('identifier')] = info.get('value')

            releases = migrate_data['release']
            releases_by_media = {}
            for x in releases:
                release = releases[x]
                if not releases_by_media.get(release.get('movie_id')):
                    releases_by_media[release.get('movie_id')] = []

                releases_by_media[release.get('movie_id')].append(release)

            # Type ids
            types = migrate_data['filetype']
            type_by_id = {}
            for t in types:
                type = types[t]
                type_by_id[type.get('id')] = type

            # Media
            log.info('Importing %s media items', len(migrate_data['movie']))
            statuses = migrate_data['status']
            libraries = migrate_data['library']
            library_files = migrate_data['library_files__file_library']
            releases_files = migrate_data['release_files__file_release']
            all_files = migrate_data['file']
            poster_type = migrate_data['filetype']['poster']
            medias = migrate_data['movie']
            for x in medias:
                m = medias[x]

                status = statuses.get(m['status_id']).get('identifier')
                l = libraries.get(m['library_id'])

                # Only migrate wanted movies, Skip if no identifier present
                if not l or not getImdb(l.get('identifier')): continue

                profile_id = profile_link.get(m['profile_id'])
                category_id = category_link.get(m['category_id'])
                title = titles_by_library.get(m['library_id'])
                releases = releases_by_media.get(x, [])
                info = json.loads(l.get('info', ''))

                files = library_files.get(m['library_id'], [])
                if not isinstance(files, list):
                    files = [files]

                added_media = fireEvent('movie.add', {
                    'info': info,
                    'identifier': l.get('identifier'),
                    'profile_id': profile_id,
                    'category_id': category_id,
                    'title': title
                },
                                        force_readd=False,
                                        search_after=False,
                                        update_after=False,
                                        notify_after=False,
                                        status=status,
                                        single=True)

                if not added_media:
                    log.error('Failed adding media %s: %s',
                              (l.get('identifier'), info))
                    continue

                added_media['files'] = added_media.get('files', {})
                for f in files:
                    ffile = all_files[f.get('file_id')]

                    # Only migrate posters
                    if ffile.get('type_id') == poster_type.get('id'):
                        if ffile.get('path') not in added_media['files'].get(
                                'image_poster', []) and os.path.isfile(
                                    ffile.get('path')):
                            added_media['files']['image_poster'] = [
                                ffile.get('path')
                            ]
                            break

                if 'image_poster' in added_media['files']:
                    db.update(added_media)

                for rel in releases:

                    empty_info = False
                    if not rel.get('info'):
                        empty_info = True
                        rel['info'] = {}

                    quality = quality_link.get(rel.get('quality_id'))
                    if not quality:
                        continue

                    release_status = statuses.get(
                        rel.get('status_id')).get('identifier')

                    if rel['info'].get('download_id'):
                        status_support = rel['info'].get(
                            'download_status_support',
                            False) in [True, 'true', 'True']
                        rel['info']['download_info'] = {
                            'id': rel['info'].get('download_id'),
                            'downloader':
                            rel['info'].get('download_downloader'),
                            'status_support': status_support,
                        }

                    # Add status to keys
                    rel['info']['status'] = release_status
                    if not empty_info:
                        fireEvent('release.create_from_search', [rel['info']],
                                  added_media,
                                  quality,
                                  single=True)
                    else:
                        release = {
                            '_t': 'release',
                            'identifier': rel.get('identifier'),
                            'media_id': added_media.get('_id'),
                            'quality': quality.get('identifier'),
                            'status': release_status,
                            'last_edit': int(time.time()),
                            'files': {}
                        }

                        # Add downloader info if provided
                        try:
                            release['download_info'] = rel['info'][
                                'download_info']
                            del rel['download_info']
                        except:
                            pass

                        # Add files
                        release_files = releases_files.get(rel.get('id'), [])
                        if not isinstance(release_files, list):
                            release_files = [release_files]

                        if len(release_files) == 0:
                            continue

                        for f in release_files:
                            rfile = all_files.get(f.get('file_id'))
                            if not rfile:
                                continue

                            file_type = type_by_id.get(
                                rfile.get('type_id')).get('identifier')

                            if not release['files'].get(file_type):
                                release['files'][file_type] = []

                            release['files'][file_type].append(
                                rfile.get('path'))

                        try:
                            rls = db.get('release_identifier',
                                         rel.get('identifier'),
                                         with_doc=True)['doc']
                            rls.update(release)
                            db.update(rls)
                        except:
                            db.insert(release)

            log.info('Total migration took %s', time.time() - migrate_start)
            log.info('=' * 30)

            # rename old database
            log.info('Renaming old database to %s ', old_db + '.old')
            os.rename(old_db, old_db + '.old')

            if os.path.isfile(old_db + '-wal'):
                os.rename(old_db + '-wal', old_db + '-wal.old')
            if os.path.isfile(old_db + '-shm'):
                os.rename(old_db + '-shm', old_db + '-shm.old')