Пример #1
0
def parse_bind(bind):
    """Parses a connection string and creates SQL trace metadata"""
    if isinstance(bind, Connection):
        engine = bind.engine
    else:
        engine = bind
    m = re.match(r"Engine\((.*?)\)", str(engine))
    if m is not None:
        u = urlparse(m.group(1))
        # Add Scheme to uses_netloc or // will be missing from url.
        uses_netloc.append(u.scheme)
        safe_url = ""
        if u.password is None:
            safe_url = u.geturl()
        else:
            # Strip password from URL
            host_info = u.netloc.rpartition('@')[-1]
            parts = u._replace(netloc='{}@{}'.format(u.username, host_info))
            safe_url = parts.geturl()
        sql = {}
        sql['database_type'] = u.scheme
        sql['url'] = safe_url
        if u.username is not None:
            sql['user'] = "******".format(u.username)
    return sql
Пример #2
0
def initialize():
    # this function will get called once, when the application starts.

    uses_netloc.append("postgres")
    url = urlparse(
        "postgres://*****:*****@isilo.db.elephantsql.com:5432/mcpxvutz"
    )

    conn = psycopg2.connect(database=url.path[1:],
                            user=url.username,
                            password=url.password,
                            host=url.hostname,
                            port=url.port)

    with conn.cursor() as cursor:
        #cursor.execute('drop table if exists customers; drop table if exists orders; drop table if exists products;')
        cursor.execute(
            "CREATE TABLE if NOT EXISTS customers (id  serial PRIMARY KEY, firstName Varchar(15), lastName Varchar(15), street Varchar(35), city Varchar(15), state VarChar(15), zip Text)"
        )
        cursor.execute(
            "CREATE TABLE if not exists products (id serial PRIMARY KEY , name Varchar(15), price INTEGER)"
        )
        cursor.execute(
            "CREATE TABLE if not exists orders (id  serial  PRIMARY KEY, customerId INTEGER, productId INTEGER, date Varchar (10), Foreign key(customerId) References customers (id) ON Update Cascade ON Delete Cascade, Foreign key(productID) References Products(id) ON Update Cascade ON Delete Cascade)"
        )

    conn.commit()

    return conn
Пример #3
0
def initialize():
    # this function will get called once, when the application starts.
    # this would be a good place to initalize your connection!
    uses_netloc.append("postgres")
    url = urlparse(connection_string)

    conn = psycopg2.connect(database=url.path[1:],
                            user=url.username,
                            password=url.password,
                            port=url.port,
                            host=url.hostname)

    cursor = conn.cursor()
    cursor.execute(
        'CREATE TABLE IF NOT EXISTS customers( id SERIAL PRIMARY KEY, firstName VARCHAR(50), lastName VARCHAR(50), street VARCHAR(100), city VARCHAR(50), state VARCHAR(20), zip VARCHAR(5))'
    )

    cursor.execute(
        'CREATE TABLE IF NOT EXISTS products( id SERIAL PRIMARY KEY, name VARCHAR(50), price FLOAT)'
    )
    cursor.execute(
        'CREATE TABLE IF NOT EXISTS orders(id SERIAL PRIMARY KEY, customerId INT, productId INT, date DATE, FOREIGN KEY (customerId) REFERENCES customers(id) ON DELETE CASCADE ON UPDATE CASCADE, FOREIGN KEY (productId) REFERENCES products(id) ON DELETE CASCADE ON UPDATE CASCADE)'
    )
    conn.commit()

    return conn
Пример #4
0
def connect_to_db(conn_str):
    uses_netloc.append('postgres')
    url = urlparse(conn_str)
    conn = psycopg2.connect(database=url.path[1:],
                            user=url.username,
                            password=url.password,
                            host=url.hostname,
                            port=url.port)
    return conn
Пример #5
0
def connect_db():
    """Connects to the specific database."""
    uses_netloc.append("postgres")
    url = urlparse(app.config["DATABASE_URL"])
    conn = psycopg2.connect(database=url.path[1:],
                            user=url.username,
                            password=url.password,
                            host=url.hostname,
                            port=url.port)
    return conn
Пример #6
0
    def connect_to_db(conn_str):
        uses_netloc.append(
            "postgreslastName, street, city, state, zip) values (%s,%s,%s,%s,%s,%s) returning id', (customer.get(firstName), customer.get(lastName), customer.get(street), customer.get(city), customer.get(state), customer.get(zip)))"
        )
        url = urlparse(conn_str)

        conn = psycopg2.connect(database=url.path[1:],
                                user=url.username,
                                password=url.password,
                                host=url.hostname,
                                port=url.port)

        return conn
Пример #7
0
def update_activity():

    uses_netloc.append("postgres")
    url = urlparse(os.environ["DATABASE_URL"])
    db_connection = None

    try:
        db_connection = psycopg2.connect(database=url.path[1:],
                                         user=url.username,
                                         password=url.password,
                                         host=url.hostname,
                                         port=url.port)
        cursor = db_connection.cursor()
        cursor.execute("select user_id from vk_users")
        user_ids = [str(user_id[0]) for user_id in cursor.fetchall()]
        users = get_users(user_ids)
        current_minute = get_minute()

        for user in users:
            last_seen_ut = user['last_seen']['time']
            user_id = user['id']

            if is_last_seen_today(last_seen_ut):
                last_seen_minute = get_minute(last_seen_ut)
                state = '{{"{}":{}, "{}":{}}}'.format(current_minute,
                                                      user['online'],
                                                      last_seen_minute,
                                                      CHECKPOINT)
            else:
                state = '{{"{}":{}}}'.format(current_minute, user['online'])

            cursor.execute("select update_activity(%s, %s::json)",
                           (user_id, state))

        db_connection.commit()

    except psycopg2.DatabaseError as e:
        print('Error {}'.format(e))
        sys.exit(1)

    finally:
        if db_connection:
            db_connection.close()
def initialize():
    # this function will get called once, when the application starts.
    # this would be a good place to initalize your connection!
    uses_netloc.append("postgres")
    url = urlparse('postgres://*****:*****@isilo.db.elephantsql.com:5432/ldhticnp')

    conn = psycopg2.connect(database=url.path[1:],
        user = url.username,
        password = url.password,
        host = url.hostname,
        port = url.port
        )
    cursor = conn.cursor()
    #cursor.execute('drop table if exists customers; drop table if exists orders; drop table if exists products;')
    cursor.execute('CREATE TABLE IF NOT EXISTS Customers (id SERIAL PRIMARY KEY, firstName Varchar(25), lastName Varchar(25), street Varchar(30), city Varchar(15), state Varchar(15), zip INTEGER);')
    cursor.execute('CREATE TABLE IF NOT EXISTS Products (id SERIAL PRIMARY KEY, name Varchar(25), price REAL);')
    cursor.execute('CREATE TABLE IF NOT EXISTS Orders (id SERIAL PRIMARY KEY, customerId INTEGER, productId INTEGER, date Date, FOREIGN KEY(customerId) REFERENCES Customers(id) ON DELETE CASCADE ON UPDATE CASCADE, FOREIGN KEY(productId) REFERENCES Products(id) ON DELETE CASCADE ON UPDATE CASCADE)')
    conn.commit()

    return conn
Пример #9
0
def connect_db(autocommit = True):
    """
    Creates a connection to the PostgreSQL database server.

    :return: Database connection
    """
    uses_netloc.append('postgres')
    url = urlparse(DATABASE_URL)

    try:
        conn = connect(
            database = url.path[1:],
            user     = url.username,
            password = url.password,
            host     = url.hostname,
            port     = url.port
        )
        conn.autocommit = autocommit
        return conn

    except Error as e:
        print(e)
Пример #10
0
def _sql_meta(engine_instance, args):
    try:
        metadata = {}
        url = urlparse(str(engine_instance.engine.url))
        # Add Scheme to uses_netloc or // will be missing from url.
        uses_netloc.append(url.scheme)
        if url.password is None:
            metadata['url'] = url.geturl()
            name = url.netloc
        else:
            # Strip password from URL
            host_info = url.netloc.rpartition('@')[-1]
            parts = url._replace(netloc='{}@{}'.format(url.username, host_info))
            metadata['url'] = parts.geturl()
            name = host_info
        metadata['user'] = url.username
        metadata['database_type'] = engine_instance.engine.name
        try:
            version = getattr(engine_instance.dialect, '{}_version'.format(engine_instance.engine.driver))
            version_str = '.'.join(map(str, version))
            metadata['driver_version'] = "{}-{}".format(engine_instance.engine.driver, version_str)
        except AttributeError:
            metadata['driver_version'] = engine_instance.engine.driver
        if engine_instance.dialect.server_version_info is not None:
            metadata['database_version'] = '.'.join(map(str, engine_instance.dialect.server_version_info))
        if xray_recorder.stream_sql:
            try:
                if isinstance(args[0], ClauseElement):
                    metadata['sanitized_query'] = str(args[0].compile(engine_instance.engine))
                else:
                    metadata['sanitized_query'] = str(args[0])
            except Exception:
                logging.getLogger(__name__).exception('Error getting the sanitized query')
    except Exception:
        metadata = None
        name = None
        logging.getLogger(__name__).exception('Error parsing sql metadata.')
    return name, metadata
Пример #11
0
def connect_to_database():
    if 'LYRICS_MIXER_DATABASE_URL' in os.environ:
        uses_netloc.append('postgres')
        url = urlparse(os.environ["LYRICS_MIXER_DATABASE_URL"])
        database = PostgresqlDatabase(database=url.path[1:],
                                      user=url.username,
                                      password=url.password,
                                      host=url.hostname,
                                      port=url.port,
                                      autoconnect=False)
    else:
        database = SqliteDatabase(':memory:')

    while database.is_closed():
        try:
            logger.info('Connecting to Database')
            database.connect()
        except:
            pass
        time.sleep(1)

    database.bind([StreamCursor])
    database.create_tables([StreamCursor], safe=True)
Пример #12
0
    def __init__(self, base_url: str):
        logger.debug('Adding s3 to urllib supported protocols for urljoin')
        uses_netloc.append('s3')
        uses_relative.append('s3')

        self.base_url = base_url.rstrip('/') + '/'

        self.mcf = {
            'mcf': {
                'version': '1.0'
            },
            'metadata': {
                'language': LANGUAGE,
                'charset': 'utf8',
                'parentidentifier': 'TBD'
            },
            'spatial': {
                'datatype': 'grid',
                'geomtype': 'solid'
            },
            'identification': {
                'charset': 'utf8',
                'language': 'missing',
                'keywords': {},
                'status': 'onGoing',
                'maintenancefrequency': 'continual'
            },
            'content_info': {
                'type': 'image',
                'dimensions': []
            },
            'contact': {
                'pointOfContact': {},
                'distributor': {}
            },
            'distribution': {}
        }
Пример #13
0
import vim
from urllib.parse import urljoin, uses_relative, uses_netloc
uses_relative.append('gemini')
uses_netloc.append('gemini')


def resolve(path, base=None):
    if base is None:
        base = vim.eval('@%')
        base = vim.current.buffer.vars.get('netfind_basehref', base)
    return urljoin(base, path)
Пример #14
0
from flask import Flask
from flask_cors import CORS
from peewee import PostgresqlDatabase
from urllib.parse import uses_netloc, urlparse
import os

uses_netloc.append('postgres')
url = urlparse(os.environ['DATABASE_URL'])

DATABASE = {
    'engine': 'peewee.PostgresqlDatabase',
    'name': url.path[1:],
    'password': url.password,
    'host': url.hostname,
    'port': url.port,
}

app = Flask(__name__)
app.config.from_object(os.environ['APP_SETTINGS'])
db = PostgresqlDatabase(database=url.path[1:],
                        user=url.username,
                        password=url.password,
                        host=url.hostname,
                        port=url.port)
cors = CORS(app)

if __name__ == '__main__':
    app.run()
Пример #15
0
    def start(self):
        self.started = True

        # thread name
        threading.currentThread().setName('CORE')

        # init sentry
        self.init_sentry()

        # scheduler
        self.scheduler = TornadoScheduler({'apscheduler.timezone': 'UTC'})

        # init core classes
        self.api = API()
        self.config = Config(self.db_type, self.db_prefix, self.db_host,
                             self.db_port, self.db_username, self.db_password)
        self.main_db = MainDB(self.db_type, self.db_prefix, self.db_host,
                              self.db_port, self.db_username, self.db_password)
        self.cache_db = CacheDB(self.db_type, self.db_prefix, self.db_host,
                                self.db_port, self.db_username,
                                self.db_password)
        self.notification_providers = NotificationProviders()
        self.metadata_providers = MetadataProviders()
        self.search_providers = SearchProviders()
        self.series_providers = SeriesProviders()
        self.log = Logger()
        self.alerts = Notifications()
        self.wserver = WebServer()
        self.show_queue = ShowQueue()
        self.search_queue = SearchQueue()
        self.postprocessor_queue = PostProcessorQueue()
        self.version_updater = VersionUpdater()
        self.show_updater = ShowUpdater()
        self.tz_updater = TimeZoneUpdater()
        self.rsscache_updater = RSSCacheUpdater()
        self.daily_searcher = DailySearcher()
        self.failed_snatch_searcher = FailedSnatchSearcher()
        self.backlog_searcher = BacklogSearcher()
        self.proper_searcher = ProperSearcher()
        self.trakt_searcher = TraktSearcher()
        self.subtitle_searcher = SubtitleSearcher()
        self.auto_postprocessor = AutoPostProcessor()
        self.upnp_client = UPNPClient()
        self.announcements = Announcements()
        self.amqp_client = AMQPClient()

        # authorization sso client
        self.auth_server = AuthServer()

        # check available space
        try:
            self.log.info("Performing disk space checks")
            total_space, available_space = get_free_space(self.data_dir)
            if available_space < 100:
                self.log.warning(
                    'Shutting down as SiCKRAGE needs some space to work. You\'ll get corrupted data otherwise. Only %sMB left',
                    available_space)
                return
        except Exception:
            self.log.error('Failed getting disk space: %s',
                           traceback.format_exc())

        # check if we need to perform a restore first
        if os.path.exists(
                os.path.abspath(os.path.join(self.data_dir, 'restore'))):
            self.log.info('Performing restore of backup files')
            success = restore_app_data(
                os.path.abspath(os.path.join(self.data_dir, 'restore')),
                self.data_dir)
            self.log.info("Restoring SiCKRAGE backup: %s!" %
                          ("FAILED", "SUCCESSFUL")[success])
            if success:
                # remove restore files
                shutil.rmtree(os.path.abspath(
                    os.path.join(self.data_dir, 'restore')),
                              ignore_errors=True)

        # migrate old database file names to new ones
        if os.path.isfile(
                os.path.abspath(os.path.join(self.data_dir, 'sickbeard.db'))):
            if os.path.isfile(os.path.join(self.data_dir, 'sickrage.db')):
                helpers.move_file(
                    os.path.join(self.data_dir, 'sickrage.db'),
                    os.path.join(
                        self.data_dir, '{}.bak-{}'.format(
                            'sickrage.db',
                            datetime.datetime.now().strftime(
                                '%Y%m%d_%H%M%S'))))

            helpers.move_file(
                os.path.abspath(os.path.join(self.data_dir, 'sickbeard.db')),
                os.path.abspath(os.path.join(self.data_dir, 'sickrage.db')))

        # setup databases
        self.main_db.setup()
        self.config.db.setup()
        self.cache_db.setup()

        # load config
        self.config.load()

        # migrate config
        self.config.migrate_config_file(self.config_file)

        # add server id tag to sentry
        sentry_sdk.set_tag('server_id', self.config.general.server_id)

        # add user to sentry
        sentry_sdk.set_user({
            'id': self.config.user.sub_id,
            'username': self.config.user.username,
            'email': self.config.user.email
        })

        # config overrides
        if self.web_port:
            self.config.general.web_port = self.web_port
        if self.web_root:
            self.config.general.web_root = self.web_root

        # set language
        change_gui_lang(self.config.gui.gui_lang)

        # set socket timeout
        socket.setdefaulttimeout(self.config.general.socket_timeout)

        # set ssl cert/key filenames
        self.https_cert_file = os.path.abspath(
            os.path.join(self.data_dir, 'server.crt'))
        self.https_key_file = os.path.abspath(
            os.path.join(self.data_dir, 'server.key'))

        # setup logger settings
        self.log.logSize = self.config.general.log_size
        self.log.logNr = self.config.general.log_nr
        self.log.logFile = os.path.join(self.data_dir, 'logs', 'sickrage.log')
        self.log.debugLogging = self.debug or self.config.general.debug
        self.log.consoleLogging = not self.quiet

        # start logger
        self.log.start()

        # user agent
        if self.config.general.random_user_agent:
            self.user_agent = UserAgent().random

        uses_netloc.append('scgi')
        FancyURLopener.version = self.user_agent

        # set torrent client web url
        torrent_webui_url(True)

        if self.config.general.default_page not in DefaultHomePage:
            self.config.general.default_page = DefaultHomePage.HOME

        # attempt to help prevent users from breaking links by using a bad url
        if not self.config.general.anon_redirect.endswith('?'):
            self.config.general.anon_redirect = ''

        if not re.match(r'\d+\|[^|]+(?:\|[^|]+)*',
                        self.config.general.root_dirs):
            self.config.general.root_dirs = ''

        self.naming_force_folders = check_force_season_folders()

        if self.config.general.nzb_method not in NzbMethod:
            self.config.general.nzb_method = NzbMethod.BLACKHOLE

        if self.config.general.torrent_method not in TorrentMethod:
            self.config.general.torrent_method = TorrentMethod.BLACKHOLE

        if self.config.general.auto_postprocessor_freq < self.min_auto_postprocessor_freq:
            self.config.general.auto_postprocessor_freq = self.min_auto_postprocessor_freq

        if self.config.general.daily_searcher_freq < self.min_daily_searcher_freq:
            self.config.general.daily_searcher_freq = self.min_daily_searcher_freq

        if self.config.general.backlog_searcher_freq < self.min_backlog_searcher_freq:
            self.config.general.backlog_searcher_freq = self.min_backlog_searcher_freq

        if self.config.general.version_updater_freq < self.min_version_updater_freq:
            self.config.general.version_updater_freq = self.min_version_updater_freq

        if self.config.general.subtitle_searcher_freq < self.min_subtitle_searcher_freq:
            self.config.general.subtitle_searcher_freq = self.min_subtitle_searcher_freq

        if self.config.failed_snatches.age < self.min_failed_snatch_age:
            self.config.failed_snatches.age = self.min_failed_snatch_age

        if self.config.general.proper_searcher_interval not in CheckPropersInterval:
            self.config.general.proper_searcher_interval = CheckPropersInterval.DAILY

        if self.config.general.show_update_hour < 0 or self.config.general.show_update_hour > 23:
            self.config.general.show_update_hour = 0

        # add app updater job
        self.scheduler.add_job(
            self.version_updater.task,
            IntervalTrigger(hours=1,
                            start_date=datetime.datetime.now() +
                            datetime.timedelta(minutes=4),
                            timezone='utc'),
            name=self.version_updater.name,
            id=self.version_updater.name)

        # add show updater job
        self.scheduler.add_job(
            self.show_updater.task,
            IntervalTrigger(days=1,
                            start_date=datetime.datetime.now().replace(
                                hour=self.config.general.show_update_hour),
                            timezone='utc'),
            name=self.show_updater.name,
            id=self.show_updater.name)

        # add rss cache updater job
        self.scheduler.add_job(self.rsscache_updater.task,
                               IntervalTrigger(minutes=15, timezone='utc'),
                               name=self.rsscache_updater.name,
                               id=self.rsscache_updater.name)

        # add daily search job
        self.scheduler.add_job(
            self.daily_searcher.task,
            IntervalTrigger(minutes=self.config.general.daily_searcher_freq,
                            start_date=datetime.datetime.now() +
                            datetime.timedelta(minutes=4),
                            timezone='utc'),
            name=self.daily_searcher.name,
            id=self.daily_searcher.name)

        # add failed snatch search job
        self.scheduler.add_job(
            self.failed_snatch_searcher.task,
            IntervalTrigger(hours=1,
                            start_date=datetime.datetime.now() +
                            datetime.timedelta(minutes=4),
                            timezone='utc'),
            name=self.failed_snatch_searcher.name,
            id=self.failed_snatch_searcher.name)

        # add backlog search job
        self.scheduler.add_job(
            self.backlog_searcher.task,
            IntervalTrigger(minutes=self.config.general.backlog_searcher_freq,
                            start_date=datetime.datetime.now() +
                            datetime.timedelta(minutes=30),
                            timezone='utc'),
            name=self.backlog_searcher.name,
            id=self.backlog_searcher.name)

        # add auto-postprocessing job
        self.scheduler.add_job(
            self.auto_postprocessor.task,
            IntervalTrigger(
                minutes=self.config.general.auto_postprocessor_freq,
                timezone='utc'),
            name=self.auto_postprocessor.name,
            id=self.auto_postprocessor.name)

        # add find proper job
        self.scheduler.add_job(
            self.proper_searcher.task,
            IntervalTrigger(
                minutes=self.config.general.proper_searcher_interval.value,
                timezone='utc'),
            name=self.proper_searcher.name,
            id=self.proper_searcher.name)

        # add trakt.tv checker job
        self.scheduler.add_job(self.trakt_searcher.task,
                               IntervalTrigger(hours=1, timezone='utc'),
                               name=self.trakt_searcher.name,
                               id=self.trakt_searcher.name)

        # add subtitles finder job
        self.scheduler.add_job(
            self.subtitle_searcher.task,
            IntervalTrigger(hours=self.config.general.subtitle_searcher_freq,
                            timezone='utc'),
            name=self.subtitle_searcher.name,
            id=self.subtitle_searcher.name)

        # add upnp client job
        self.scheduler.add_job(
            self.upnp_client.task,
            IntervalTrigger(seconds=self.upnp_client._nat_portmap_lifetime,
                            timezone='utc'),
            name=self.upnp_client.name,
            id=self.upnp_client.name)

        # start queues
        self.search_queue.start_worker(self.config.general.max_queue_workers)
        self.show_queue.start_worker(self.config.general.max_queue_workers)
        self.postprocessor_queue.start_worker(
            self.config.general.max_queue_workers)

        # start web server
        self.wserver.start()

        # start scheduler service
        self.scheduler.start()

        # perform server checkup
        IOLoop.current().add_callback(self.server_checkup)

        # load shows
        IOLoop.current().add_callback(self.load_shows)

        # load network timezones
        IOLoop.current().spawn_callback(
            self.tz_updater.update_network_timezones)

        # load search provider urls
        IOLoop.current().spawn_callback(self.search_providers.update_urls)

        # startup message
        IOLoop.current().add_callback(self.startup_message)

        # launch browser
        IOLoop.current().add_callback(self.launch_browser)

        # perform server checkups every hour
        PeriodicCallback(self.server_checkup, 1 * 60 * 60 * 1000).start()

        # perform shutdown trigger check every 5 seconds
        PeriodicCallback(self.shutdown_trigger, 5 * 1000).start()

        # start ioloop
        IOLoop.current().start()
Пример #16
0
############################################################
# This module allows easy use of .ini files
# for configuration data.  You should import this into
# your own (user interactive) program too.
import configparser
############################################################

############################################################
# We can load the ini file, and find the postgres_connection
# to use.
config = configparser.ConfigParser()
config.read('config.ini')
connection_string = config['database']['postgres_connection']
############################################################

uses_netloc.append("postgres")

##############################################################################
# Now use the connection string found in the ini file to construct the url used
# to connect to the database.
url = urlparse(connection_string)
##############################################################################


def insert_customer(cursor, name):
    ############################################################################################
    # Note that the second parameter is a tuple - and in Python, if you have a tuple with just
    # one item, you need to use a trailing comma - so Python knows its a tuple and not just
    # a value surrounded by parenthesis.  One of the few very unintuitive parts of the Python syntax!
    ############################################################################################
    cursor.execute("insert into Customers (name) values (%s) returning id",
Пример #17
0
import six
import socket
import select
import ssl
import os
#from pprint import pprint
from urllib.parse import urlparse, uses_netloc
uses_netloc.append('rendezvous')


class InvalidResponseFromRendezVous(Exception):
    pass


class Rendezvous():
    def __init__(self, url, printout=False):
        self.url = url
        urlp = urlparse(url)
        self.hostname = urlp.hostname
        self.port = urlp.port
        self.secret = urlp.path[1:]
        if six.PY3:
            self.secret = str.encode(self.secret)
        path = os.path.dirname(os.path.realpath(__file__))
        self.cert = os.path.abspath("{0}/data/cacert.pem".format(path))
        self.data = six.b("")
        self.printout = printout

    def start(self):

        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
Пример #18
0
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/

PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))


# DATABASES = settings.DATABASES

# Update database configuration with $DATABASE_URL.
db_from_env = dj_database_url.config(conn_max_age=500)
# DATABASES['default'].update(db_from_env)

# Register database schemes in URLs.
uses_netloc.append('mysql')

try:

    # Check to make sure DATABASES is set in settings.py file.
    # If not default to {}

    if 'DATABASES' not in locals():
        DATABASES = {}

    if 'DATABASE_URL' in os.environ:
        url = urlparse(os.environ['DATABASE_URL'])

        # Ensure default database exists.
        DATABASES['default'] = DATABASES.get('default', {})
Пример #19
0
from openmtc_onem2m.exc import CSEError
from openmtc_onem2m.model import AnnounceableResource, get_onem2m_type, \
    CSEBase, RemoteCSE

from openmtc_onem2m.transport import OneM2MRequest, MetaInformation, \
    OneM2MOperation
from openmtc_server.Plugin import Plugin
from copy import deepcopy
from openmtc_server.util.async_ import async_all
from re import sub
from urllib.parse import urlparse
# url join with coap compatibility
from urllib.parse import urljoin, uses_relative, uses_netloc

uses_relative.append('coap')
uses_netloc.append('coap')

AnncResult = namedtuple('AnncResult', ['cse_uri', 'res_con'])


class AnnouncementHandler(Plugin):
    # method constants
    _CREATE = 'create'
    _UPDATE = 'update'
    _DELETE = 'delete'

    def __init__(self, api, config, *args, **kw):
        super(AnnouncementHandler, self).__init__(api, config, *args, **kw)
        self._announcements = {}
        self._cse_base = urlparse(self.config['global']['cse_base']).path
        # TODO_oneM2M: self._cse_links should be filled with registration plugin, using a static value in the mean time
Пример #20
0
    def start(self):
        self.started = True
        self.io_loop = IOLoop.current()

        # thread name
        threading.currentThread().setName('CORE')

        # init core classes
        self.main_db = MainDB(self.db_type, self.db_prefix, self.db_host, self.db_port, self.db_username, self.db_password)
        self.cache_db = CacheDB(self.db_type, self.db_prefix, self.db_host, self.db_port, self.db_username, self.db_password)
        self.notifier_providers = NotifierProviders()
        self.metadata_providers = MetadataProviders()
        self.search_providers = SearchProviders()
        self.log = Logger()
        self.config = Config()
        self.alerts = Notifications()
        self.scheduler = TornadoScheduler({'apscheduler.timezone': 'UTC'})
        self.wserver = WebServer()
        self.name_cache = NameCache()
        self.show_queue = ShowQueue()
        self.search_queue = SearchQueue()
        self.postprocessor_queue = PostProcessorQueue()
        self.version_updater = VersionUpdater()
        self.show_updater = ShowUpdater()
        self.tz_updater = TimeZoneUpdater()
        self.rsscache_updater = RSSCacheUpdater()
        self.daily_searcher = DailySearcher()
        self.failed_snatch_searcher = FailedSnatchSearcher()
        self.backlog_searcher = BacklogSearcher()
        self.proper_searcher = ProperSearcher()
        self.trakt_searcher = TraktSearcher()
        self.subtitle_searcher = SubtitleSearcher()
        self.auto_postprocessor = AutoPostProcessor()
        self.upnp_client = UPNPClient()
        self.quicksearch_cache = QuicksearchCache()

        # setup oidc client
        realm = KeycloakRealm(server_url='https://auth.sickrage.ca', realm_name='sickrage')
        self.oidc_client = realm.open_id_connect(client_id=self.oidc_client_id, client_secret=self.oidc_client_secret)

        # Check if we need to perform a restore first
        if os.path.exists(os.path.abspath(os.path.join(self.data_dir, 'restore'))):
            success = restore_app_data(os.path.abspath(os.path.join(self.data_dir, 'restore')), self.data_dir)
            self.log.info("Restoring SiCKRAGE backup: %s!" % ("FAILED", "SUCCESSFUL")[success])
            if success:
                shutil.rmtree(os.path.abspath(os.path.join(self.data_dir, 'restore')), ignore_errors=True)

        # migrate old database file names to new ones
        if os.path.isfile(os.path.abspath(os.path.join(self.data_dir, 'sickbeard.db'))):
            if os.path.isfile(os.path.join(self.data_dir, 'sickrage.db')):
                helpers.move_file(os.path.join(self.data_dir, 'sickrage.db'),
                                  os.path.join(self.data_dir, '{}.bak-{}'
                                               .format('sickrage.db',
                                                       datetime.datetime.now().strftime(
                                                           '%Y%m%d_%H%M%S'))))

            helpers.move_file(os.path.abspath(os.path.join(self.data_dir, 'sickbeard.db')),
                              os.path.abspath(os.path.join(self.data_dir, 'sickrage.db')))

        # init encryption public and private keys
        encryption.initialize()

        # load config
        self.config.load()

        # set language
        self.config.change_gui_lang(self.config.gui_lang)

        # set socket timeout
        socket.setdefaulttimeout(self.config.socket_timeout)

        # setup logger settings
        self.log.logSize = self.config.log_size
        self.log.logNr = self.config.log_nr
        self.log.logFile = os.path.join(self.data_dir, 'logs', 'sickrage.log')
        self.log.debugLogging = self.config.debug
        self.log.consoleLogging = not self.quiet

        # start logger
        self.log.start()

        # user agent
        if self.config.random_user_agent:
            self.user_agent = UserAgent().random

        uses_netloc.append('scgi')
        FancyURLopener.version = self.user_agent

        # set torrent client web url
        torrent_webui_url(True)

        # Check available space
        try:
            total_space, available_space = get_free_space(self.data_dir)
            if available_space < 100:
                self.log.warning('Shutting down as SiCKRAGE needs some space to work. You\'ll get corrupted data otherwise. Only %sMB left', available_space)
                return
        except Exception:
            self.log.error('Failed getting disk space: %s', traceback.format_exc())

        # perform database startup actions
        for db in [self.main_db, self.cache_db]:
            # perform integrity check
            db.integrity_check()

            # migrate database
            db.migrate()

            # sync database repo
            db.sync_db_repo()

            # cleanup
            db.cleanup()

        # load name cache
        self.name_cache.load()

        if self.config.default_page not in ('schedule', 'history', 'IRC'):
            self.config.default_page = 'home'

        # cleanup cache folder
        for folder in ['mako', 'sessions', 'indexers']:
            try:
                shutil.rmtree(os.path.join(sickrage.app.cache_dir, folder), ignore_errors=True)
            except Exception:
                continue

        if self.config.web_port < 21 or self.config.web_port > 65535:
            self.config.web_port = 8081

        if not self.config.web_cookie_secret:
            self.config.web_cookie_secret = generate_secret()

        # attempt to help prevent users from breaking links by using a bad url
        if not self.config.anon_redirect.endswith('?'):
            self.config.anon_redirect = ''

        if not re.match(r'\d+\|[^|]+(?:\|[^|]+)*', self.config.root_dirs):
            self.config.root_dirs = ''

        self.config.naming_force_folders = check_force_season_folders()

        if self.config.nzb_method not in ('blackhole', 'sabnzbd', 'nzbget'):
            self.config.nzb_method = 'blackhole'

        if self.config.torrent_method not in ('blackhole', 'utorrent', 'transmission', 'deluge', 'deluged',
                                              'download_station', 'rtorrent', 'qbittorrent', 'mlnet', 'putio'):
            self.config.torrent_method = 'blackhole'

        if self.config.autopostprocessor_freq < self.config.min_autopostprocessor_freq:
            self.config.autopostprocessor_freq = self.config.min_autopostprocessor_freq
        if self.config.daily_searcher_freq < self.config.min_daily_searcher_freq:
            self.config.daily_searcher_freq = self.config.min_daily_searcher_freq
        if self.config.backlog_searcher_freq < self.config.min_backlog_searcher_freq:
            self.config.backlog_searcher_freq = self.config.min_backlog_searcher_freq
        if self.config.version_updater_freq < self.config.min_version_updater_freq:
            self.config.version_updater_freq = self.config.min_version_updater_freq
        if self.config.subtitle_searcher_freq < self.config.min_subtitle_searcher_freq:
            self.config.subtitle_searcher_freq = self.config.min_subtitle_searcher_freq
        if self.config.failed_snatch_age < self.config.min_failed_snatch_age:
            self.config.failed_snatch_age = self.config.min_failed_snatch_age
        if self.config.proper_searcher_interval not in ('15m', '45m', '90m', '4h', 'daily'):
            self.config.proper_searcher_interval = 'daily'
        if self.config.showupdate_hour < 0 or self.config.showupdate_hour > 23:
            self.config.showupdate_hour = 0

        # add API token refresh job
        self.scheduler.add_job(
            API().refresh_token,
            IntervalTrigger(
                hours=1,
            ),
            name='SR-API',
            id='SR-API'
        )

        # add version checker job
        self.scheduler.add_job(
            self.version_updater.run,
            IntervalTrigger(
                hours=self.config.version_updater_freq,
            ),
            name=self.version_updater.name,
            id=self.version_updater.name
        )

        # add network timezones updater job
        self.scheduler.add_job(
            self.tz_updater.run,
            IntervalTrigger(
                days=1,
            ),
            name=self.tz_updater.name,
            id=self.tz_updater.name
        )

        # add show updater job
        self.scheduler.add_job(
            self.show_updater.run,
            IntervalTrigger(
                days=1,
                start_date=datetime.datetime.now().replace(hour=self.config.showupdate_hour)
            ),
            name=self.show_updater.name,
            id=self.show_updater.name
        )

        # add rss cache updater job
        self.scheduler.add_job(
            self.rsscache_updater.run,
            IntervalTrigger(
                minutes=15,
            ),
            name=self.rsscache_updater.name,
            id=self.rsscache_updater.name
        )

        # add daily search job
        self.scheduler.add_job(
            self.daily_searcher.run,
            IntervalTrigger(
                minutes=self.config.daily_searcher_freq,
                start_date=datetime.datetime.now() + datetime.timedelta(minutes=4)
            ),
            name=self.daily_searcher.name,
            id=self.daily_searcher.name
        )

        # add failed snatch search job
        self.scheduler.add_job(
            self.failed_snatch_searcher.run,
            IntervalTrigger(
                hours=1,
                start_date=datetime.datetime.now() + datetime.timedelta(minutes=4)
            ),
            name=self.failed_snatch_searcher.name,
            id=self.failed_snatch_searcher.name
        )

        # add backlog search job
        self.scheduler.add_job(
            self.backlog_searcher.run,
            IntervalTrigger(
                minutes=self.config.backlog_searcher_freq,
                start_date=datetime.datetime.now() + datetime.timedelta(minutes=30)
            ),
            name=self.backlog_searcher.name,
            id=self.backlog_searcher.name
        )

        # add auto-postprocessing job
        self.scheduler.add_job(
            self.auto_postprocessor.run,
            IntervalTrigger(
                minutes=self.config.autopostprocessor_freq
            ),
            name=self.auto_postprocessor.name,
            id=self.auto_postprocessor.name
        )

        # add find proper job
        self.scheduler.add_job(
            self.proper_searcher.run,
            IntervalTrigger(
                minutes={
                    '15m': 15,
                    '45m': 45,
                    '90m': 90,
                    '4h': 4 * 60,
                    'daily': 24 * 60
                }[self.config.proper_searcher_interval]
            ),
            name=self.proper_searcher.name,
            id=self.proper_searcher.name
        )

        # add trakt.tv checker job
        self.scheduler.add_job(
            self.trakt_searcher.run,
            IntervalTrigger(
                hours=1
            ),
            name=self.trakt_searcher.name,
            id=self.trakt_searcher.name
        )

        # add subtitles finder job
        self.scheduler.add_job(
            self.subtitle_searcher.run,
            IntervalTrigger(
                hours=self.config.subtitle_searcher_freq
            ),
            name=self.subtitle_searcher.name,
            id=self.subtitle_searcher.name
        )

        # add upnp client job
        self.scheduler.add_job(
            self.upnp_client.run,
            IntervalTrigger(
                seconds=self.upnp_client._nat_portmap_lifetime
            ),
            name=self.upnp_client.name,
            id=self.upnp_client.name
        )

        # add namecache update job
        self.scheduler.add_job(
            self.name_cache.build_all,
            IntervalTrigger(
                days=1,
            ),
            name=self.name_cache.name,
            id=self.name_cache.name
        )

        # start scheduler service
        self.scheduler.start()

        # start queue's
        self.io_loop.add_callback(self.search_queue.watch)
        self.io_loop.add_callback(self.show_queue.watch)
        self.io_loop.add_callback(self.postprocessor_queue.watch)

        # fire off startup events
        self.io_loop.run_in_executor(None, self.quicksearch_cache.run)
        self.io_loop.run_in_executor(None, self.name_cache.run)
        self.io_loop.run_in_executor(None, self.version_updater.run)
        self.io_loop.run_in_executor(None, self.tz_updater.run)

        # start web server
        self.wserver.start()

        # launch browser window
        if all([not sickrage.app.no_launch, sickrage.app.config.launch_browser]):
            self.io_loop.run_in_executor(None, functools.partial(launch_browser, ('http', 'https')[sickrage.app.config.enable_https],
                                                                 sickrage.app.config.web_host, sickrage.app.config.web_port))

        def started():
            self.log.info("SiCKRAGE :: STARTED")
            self.log.info("SiCKRAGE :: APP VERSION:[{}]".format(sickrage.version()))
            self.log.info("SiCKRAGE :: CONFIG VERSION:[v{}]".format(self.config.config_version))
            self.log.info("SiCKRAGE :: DATABASE VERSION:[v{}]".format(self.main_db.version))
            self.log.info("SiCKRAGE :: DATABASE TYPE:[{}]".format(self.db_type))
            self.log.info("SiCKRAGE :: URL:[{}://{}:{}{}]".format(('http', 'https')[self.config.enable_https], self.config.web_host, self.config.web_port,
                                                                  self.config.web_root))

        # start io_loop
        self.io_loop.add_callback(started)
        self.io_loop.start()
Пример #21
0
from os import environ as env
from urllib.parse import uses_netloc, urlparse
from redis import StrictRedis
from rq import Queue

queue_name = env['OUT_QUEUE']
url = env['REDIS_URL']

uses_netloc.append('redis')
url = urlparse(url)
# use StrictRedis to support ssl later on
conn = StrictRedis(host=url.hostname,
                   port=url.port,
                   db=0,
                   password=url.password)

q = Queue(queue_name, connection=conn)

for i in range(1, 11):
    print("Producing job %d" % i)
    q.enqueue('consumer1.consume.consume_func', i)
Пример #22
0
from peewee import *
import config

import os
from urllib.parse import urlparse, uses_netloc

if config.database == True:
    db = SqliteDatabase("db.sqlite3")
else:
    uses_netloc.append(config.database)
    url = urlparse(config.database_url)
    db = PostgresqlDatabase(
        database=url.path[1:],
        user=url.username,
        password=url.password,
        host=url.hostname,
        port=url.port,
    )


class Freer(Model):
    freer_address = CharField(
        max_length=34,
        unique=True,
    )
    freer_user_name = CharField(max_length=32)
    freer_tags = CharField(max_length=220)
    freer_suffix = CharField(max_length=33)
    freer_pubkey = CharField(max_length=66)
    freer_quit = IntegerField()
    freer_CID = CharField(max_length=67)
Пример #23
0
    def start(self):
        self.started = True

        # thread name
        threading.currentThread().setName('CORE')

        # event loop policy that allows loop creation on any thread.
        asyncio.set_event_loop_policy(AnyThreadEventLoopPolicy())

        # scheduler
        self.scheduler = BackgroundScheduler({'apscheduler.timezone': 'UTC'})

        # init core classes
        self.api = API()
        self.main_db = MainDB(self.db_type, self.db_prefix, self.db_host, self.db_port, self.db_username, self.db_password)
        self.cache_db = CacheDB(self.db_type, self.db_prefix, self.db_host, self.db_port, self.db_username, self.db_password)
        self.notifier_providers = NotifierProviders()
        self.metadata_providers = MetadataProviders()
        self.search_providers = SearchProviders()
        self.log = Logger()
        self.config = Config()
        self.alerts = Notifications()
        self.wserver = WebServer()
        self.show_queue = ShowQueue()
        self.search_queue = SearchQueue()
        self.postprocessor_queue = PostProcessorQueue()
        self.version_updater = VersionUpdater()
        self.show_updater = ShowUpdater()
        self.tz_updater = TimeZoneUpdater()
        self.rsscache_updater = RSSCacheUpdater()
        self.daily_searcher = DailySearcher()
        self.failed_snatch_searcher = FailedSnatchSearcher()
        self.backlog_searcher = BacklogSearcher()
        self.proper_searcher = ProperSearcher()
        self.trakt_searcher = TraktSearcher()
        self.subtitle_searcher = SubtitleSearcher()
        self.auto_postprocessor = AutoPostProcessor()
        self.upnp_client = UPNPClient()
        self.announcements = Announcements()

        # authorization sso client
        self.auth_server = AuthServer()

        # check available space
        try:
            self.log.info("Performing disk space checks")
            total_space, available_space = get_free_space(self.data_dir)
            if available_space < 100:
                self.log.warning('Shutting down as SiCKRAGE needs some space to work. You\'ll get corrupted data otherwise. Only %sMB left', available_space)
                return
        except Exception:
            self.log.error('Failed getting disk space: %s', traceback.format_exc())

        # check if we need to perform a restore first
        if os.path.exists(os.path.abspath(os.path.join(self.data_dir, 'restore'))):
            self.log.info('Performing restore of backup files')
            success = restore_app_data(os.path.abspath(os.path.join(self.data_dir, 'restore')), self.data_dir)
            self.log.info("Restoring SiCKRAGE backup: %s!" % ("FAILED", "SUCCESSFUL")[success])
            if success:
                # self.main_db = MainDB(self.db_type, self.db_prefix, self.db_host, self.db_port, self.db_username, self.db_password)
                # self.cache_db = CacheDB(self.db_type, self.db_prefix, self.db_host, self.db_port, self.db_username, self.db_password)
                shutil.rmtree(os.path.abspath(os.path.join(self.data_dir, 'restore')), ignore_errors=True)

        # migrate old database file names to new ones
        if os.path.isfile(os.path.abspath(os.path.join(self.data_dir, 'sickbeard.db'))):
            if os.path.isfile(os.path.join(self.data_dir, 'sickrage.db')):
                helpers.move_file(os.path.join(self.data_dir, 'sickrage.db'),
                                  os.path.join(self.data_dir, '{}.bak-{}'
                                               .format('sickrage.db',
                                                       datetime.datetime.now().strftime(
                                                           '%Y%m%d_%H%M%S'))))

            helpers.move_file(os.path.abspath(os.path.join(self.data_dir, 'sickbeard.db')),
                              os.path.abspath(os.path.join(self.data_dir, 'sickrage.db')))

        # init encryption public and private keys
        encryption.initialize()

        # load config
        self.config.load()

        # set language
        self.config.change_gui_lang(self.config.gui_lang)

        # set socket timeout
        socket.setdefaulttimeout(self.config.socket_timeout)

        # setup logger settings
        self.log.logSize = self.config.log_size
        self.log.logNr = self.config.log_nr
        self.log.logFile = os.path.join(self.data_dir, 'logs', 'sickrage.log')
        self.log.debugLogging = self.config.debug
        self.log.consoleLogging = not self.quiet

        # start logger
        self.log.start()

        # perform database startup actions
        for db in [self.main_db, self.cache_db]:
            # perform integrity check
            self.log.info("Performing integrity check on {} database".format(db.name))
            db.integrity_check()

            # migrate database
            self.log.info("Performing migrations on {} database".format(db.name))
            db.migrate()

            # upgrade database
            self.log.info("Performing upgrades on {} database".format(db.name))
            db.upgrade()

            # cleanup
            self.log.info("Performing cleanup on {} database".format(db.name))
            db.cleanup()

        # user agent
        if self.config.random_user_agent:
            self.user_agent = UserAgent().random

        uses_netloc.append('scgi')
        FancyURLopener.version = self.user_agent

        # set torrent client web url
        torrent_webui_url(True)

        if self.config.default_page not in ('schedule', 'history', 'IRC'):
            self.config.default_page = 'home'

        # attempt to help prevent users from breaking links by using a bad url
        if not self.config.anon_redirect.endswith('?'):
            self.config.anon_redirect = ''

        if not re.match(r'\d+\|[^|]+(?:\|[^|]+)*', self.config.root_dirs):
            self.config.root_dirs = ''

        self.config.naming_force_folders = check_force_season_folders()

        if self.config.nzb_method not in ('blackhole', 'sabnzbd', 'nzbget'):
            self.config.nzb_method = 'blackhole'

        if self.config.torrent_method not in ('blackhole', 'utorrent', 'transmission', 'deluge', 'deluged',
                                              'download_station', 'rtorrent', 'qbittorrent', 'mlnet', 'putio'):
            self.config.torrent_method = 'blackhole'

        if self.config.autopostprocessor_freq < self.config.min_autopostprocessor_freq:
            self.config.autopostprocessor_freq = self.config.min_autopostprocessor_freq

        if self.config.daily_searcher_freq < self.config.min_daily_searcher_freq:
            self.config.daily_searcher_freq = self.config.min_daily_searcher_freq

        if self.config.backlog_searcher_freq < self.config.min_backlog_searcher_freq:
            self.config.backlog_searcher_freq = self.config.min_backlog_searcher_freq

        if self.config.version_updater_freq < self.config.min_version_updater_freq:
            self.config.version_updater_freq = self.config.min_version_updater_freq

        if self.config.subtitle_searcher_freq < self.config.min_subtitle_searcher_freq:
            self.config.subtitle_searcher_freq = self.config.min_subtitle_searcher_freq

        if self.config.failed_snatch_age < self.config.min_failed_snatch_age:
            self.config.failed_snatch_age = self.config.min_failed_snatch_age

        if self.config.proper_searcher_interval not in ('15m', '45m', '90m', '4h', 'daily'):
            self.config.proper_searcher_interval = 'daily'

        if self.config.showupdate_hour < 0 or self.config.showupdate_hour > 23:
            self.config.showupdate_hour = 0

        # add version checker job
        self.scheduler.add_job(
            self.version_updater.task,
            IntervalTrigger(
                hours=self.config.version_updater_freq,
                timezone='utc'
            ),
            name=self.version_updater.name,
            id=self.version_updater.name
        )

        # add network timezones updater job
        self.scheduler.add_job(
            self.tz_updater.task,
            IntervalTrigger(
                days=1,
                timezone='utc'
            ),
            name=self.tz_updater.name,
            id=self.tz_updater.name
        )

        # add show updater job
        self.scheduler.add_job(
            self.show_updater.task,
            IntervalTrigger(
                days=1,
                start_date=datetime.datetime.now().replace(hour=self.config.showupdate_hour),
                timezone='utc'
            ),
            name=self.show_updater.name,
            id=self.show_updater.name
        )

        # add rss cache updater job
        self.scheduler.add_job(
            self.rsscache_updater.task,
            IntervalTrigger(
                minutes=15,
                timezone='utc'
            ),
            name=self.rsscache_updater.name,
            id=self.rsscache_updater.name
        )

        # add daily search job
        self.scheduler.add_job(
            self.daily_searcher.task,
            IntervalTrigger(
                minutes=self.config.daily_searcher_freq,
                start_date=datetime.datetime.now() + datetime.timedelta(minutes=4),
                timezone='utc'
            ),
            name=self.daily_searcher.name,
            id=self.daily_searcher.name
        )

        # add failed snatch search job
        self.scheduler.add_job(
            self.failed_snatch_searcher.task,
            IntervalTrigger(
                hours=1,
                start_date=datetime.datetime.now() + datetime.timedelta(minutes=4),
                timezone='utc'
            ),
            name=self.failed_snatch_searcher.name,
            id=self.failed_snatch_searcher.name
        )

        # add backlog search job
        self.scheduler.add_job(
            self.backlog_searcher.task,
            IntervalTrigger(
                minutes=self.config.backlog_searcher_freq,
                start_date=datetime.datetime.now() + datetime.timedelta(minutes=30),
                timezone='utc'
            ),
            name=self.backlog_searcher.name,
            id=self.backlog_searcher.name
        )

        # add auto-postprocessing job
        self.scheduler.add_job(
            self.auto_postprocessor.task,
            IntervalTrigger(
                minutes=self.config.autopostprocessor_freq,
                timezone='utc'
            ),
            name=self.auto_postprocessor.name,
            id=self.auto_postprocessor.name
        )

        # add find proper job
        self.scheduler.add_job(
            self.proper_searcher.task,
            IntervalTrigger(
                minutes={
                    '15m': 15,
                    '45m': 45,
                    '90m': 90,
                    '4h': 4 * 60,
                    'daily': 24 * 60
                }[self.config.proper_searcher_interval],
                timezone='utc'
            ),
            name=self.proper_searcher.name,
            id=self.proper_searcher.name
        )

        # add trakt.tv checker job
        self.scheduler.add_job(
            self.trakt_searcher.task,
            IntervalTrigger(
                hours=1,
                timezone='utc'
            ),
            name=self.trakt_searcher.name,
            id=self.trakt_searcher.name
        )

        # add subtitles finder job
        self.scheduler.add_job(
            self.subtitle_searcher.task,
            IntervalTrigger(
                hours=self.config.subtitle_searcher_freq,
                timezone='utc'
            ),
            name=self.subtitle_searcher.name,
            id=self.subtitle_searcher.name
        )

        # add upnp client job
        self.scheduler.add_job(
            self.upnp_client.task,
            IntervalTrigger(
                seconds=self.upnp_client._nat_portmap_lifetime,
                timezone='utc'
            ),
            name=self.upnp_client.name,
            id=self.upnp_client.name
        )

        # add announcements job
        self.scheduler.add_job(
            self.announcements.task,
            IntervalTrigger(
                minutes=15,
                timezone='utc'
            ),
            name=self.announcements.name,
            id=self.announcements.name
        )

        # add provider URL update job
        self.scheduler.add_job(
            self.search_providers.task,
            IntervalTrigger(
                hours=1,
                timezone='utc'
            ),
            name=self.search_providers.name,
            id=self.search_providers.name
        )

        # start queues
        self.search_queue.start_worker(self.config.max_queue_workers)
        self.show_queue.start_worker(self.config.max_queue_workers)
        self.postprocessor_queue.start_worker(self.config.max_queue_workers)

        # start web server
        self.wserver.start()

        # fire off jobs now
        self.scheduler.get_job(self.version_updater.name).modify(next_run_time=datetime.datetime.utcnow())
        self.scheduler.get_job(self.tz_updater.name).modify(next_run_time=datetime.datetime.utcnow())
        self.scheduler.get_job(self.announcements.name).modify(next_run_time=datetime.datetime.utcnow())
        self.scheduler.get_job(self.search_providers.name).modify(next_run_time=datetime.datetime.utcnow())

        # start scheduler service
        self.scheduler.start()

        # load shows
        self.scheduler.add_job(self.load_shows)

        # launch browser window
        if all([not sickrage.app.no_launch, sickrage.app.config.launch_browser]):
            self.scheduler.add_job(launch_browser, args=[('http', 'https')[sickrage.app.config.enable_https],
                                                         sickrage.app.config.web_host, sickrage.app.config.web_port])

        self.log.info("SiCKRAGE :: STARTED")
        self.log.info("SiCKRAGE :: APP VERSION:[{}]".format(sickrage.version()))
        self.log.info("SiCKRAGE :: CONFIG VERSION:[v{}]".format(self.config.config_version))
        self.log.info("SiCKRAGE :: DATABASE VERSION:[v{}]".format(self.main_db.version))
        self.log.info("SiCKRAGE :: DATABASE TYPE:[{}]".format(self.db_type))
        self.log.info("SiCKRAGE :: URL:[{}://{}:{}/{}]".format(('http', 'https')[self.config.enable_https],
                                                               (self.config.web_host, get_lan_ip())[self.config.web_host == '0.0.0.0'],
                                                               self.config.web_port,
                                                               self.config.web_root))
Пример #24
0
import os
from urllib.parse import uses_netloc, urlparse

import psycopg2
from flask import Flask

from blueprints import user_api, admin_api
from model.internal_config import DATABASE_CONNECTION, DATABASE_URL

app = Flask(__name__)

app.register_blueprint(user_api)
app.register_blueprint(admin_api)

uses_netloc.append("postgres")
url = urlparse(DATABASE_URL)

conn = psycopg2.connect(
    database=url.path[1:],
    user=url.username,
    password=url.password,
    host=url.hostname,
    port=url.port
)

app.config[DATABASE_CONNECTION] = conn


@app.errorhandler(AttributeError)
def err_handler(err):
    return err.message, 400