Exemplo n.º 1
0
def do():
    """
    Main method: It checks the system's current free-space, and if it has passed a set threshold (30% of free-memory
    remaining), then it will delete the necessary (unused) file(s) to bring the free-space percentage below the
    threshold again. The order of deletion is given by the oldest files that are not being used by any current schedule.

    - It also deletes any expired schedule(s) and marquee(s) from the database.

    Warning:
        - In the case that after removing all possible files (files not being used by any schedule) the free-space \
        percentage is smaller than the threshold, it prompts an error message.

        - In the extreme case that there is no enough memory to store a single file the downloader might have problems \
        downloading files for new schedule(s). So any new schedule that tries to download files will fail and all its \
        file will get removed from the database (after trying to download them *x* times).
    """
    # Getting free space in the system
    free_space_ratio = get_free_space_ratio()
    logger.info("Currently, there is {0}% of free memory. Threshold: {1}%".format(free_space_ratio, MEM_THRESHOLD))

    if free_space_ratio <= MEM_THRESHOLD:
        db = DatabaseManager()  # Getting database manager object

        logger.info("Removing all expired schedules and marquees from database...")
        db.delete_expired_schedules()
        db.delete_expired_marquees()
        db.delete_orphan_files()

        # Listing all files used by all schedules
        files_being_used = db.get_list_all_files()

        # Get all files present in the DOWNLOAD_PATH
        all_files_metadata = dict()
        all_files = os.listdir(DOWNLOAD_PATH)

        # Get creating time for all files and sort tem by that time
        for f in all_files:
            all_files_metadata[f] = os.path.getmtime(DOWNLOAD_PATH + f)

        all_files.sort(key=(lambda filename: all_files_metadata[filename]))

        # Delete a file and check if threshold is met
        for f in all_files:
            if f not in files_being_used:
                logger.info("Removing file [%s]" % f)
                try:
                    os.remove(DOWNLOAD_PATH + f)
                except Exception, err:
                    logger.error("Error trying to delete file %s. Details: %s" % (f, err))

                free_space_ratio = get_free_space_ratio()
                if free_space_ratio > MEM_THRESHOLD:
                    break

        free_space_ratio = get_free_space_ratio()
        if free_space_ratio < MEM_THRESHOLD:
            logger.error("Removed all possible files (not used by any active schedule). "
                         "Free space: {0}% < {1}% (threshold)".format(free_space_ratio, MEM_THRESHOLD))
        else:
            logger.info("Deleted min. number of files necessary. Free-memory:{0}%".format(free_space_ratio))
Exemplo n.º 2
0
    def get_info(self, file_id):
        """Get all info about a file."""
        with DatabaseManager() as db:
            sql = """
                SELECT files.id, files.name, files.desc, files.category_name
                FROM files
                JOIN file_tags
                    ON files.id=file_tags.file_id
                WHERE files.id=?
            """

            cursor = db.execute(sql, (file_id,))

            result = cursor.fetchone()

            if not result:
                result = {}
            else:
                result = dict(result)

                sql = """
                    SELECT tag_name
                    FROM file_tags
                    WHERE file_id=?
                """
                result["tags"] = []
                cursor = db.execute(sql, (file_id,))
                for row in cursor.fetchall():
                    result["tags"].append(dict(row)["tag_name"])

            return json.dumps(result, indent=2)
Exemplo n.º 3
0
    def mod_file(self, file_id, name, category, description, tags):
        """Modify file."""
        with DatabaseManager() as db:
            cursor = db.execute("SELECT * FROM files where id=?", (file_id,))
            result = cursor.fetchone()

            if not name:
                name = result["name"]
            if not category:
                category = result["category_name"]
            if not description:
                description = result["desc"]
            if not tags:
                cur = db.execute(
                    "SELECT tag_name FROM file_tags WHERE file_id=?",
                    (file_id,))
                tags = [t["tag_name"] for t in cur.fetchall()]
            letter_tag = category[0].lower()
            tags.append(letter_tag)

            sql_args = (name, category, description, file_id)

            db.execute("")
            # insert or ignore if exists
            db.execute("INSERT OR IGNORE INTO categories(name) values(?)",
                       (category,))

            db.execute("UPDATE files SET name = ?, " +
                       "category_name = ?, desc = ? WHERE id = ?", sql_args)

            db.execute("DELETE FROM file_tags WHERE file_id = ?", (file_id,))

            if tags:
                self.insert_tags(db, tags, file_id)
Exemplo n.º 4
0
 def getdata(offer, time_frame, fxc):
     db_date = DatabaseManager().return_date(offer, time_frame)
     fm_date = db_date + datetime.timedelta(minutes=1)
     tdn = datetime.datetime.now() + datetime.timedelta(minutes=1)
     to_date = tdn.replace(second=00, microsecond=00)
     data = fxc.get_historical_prices(str(offer), fm_date, to_date,
                                      str(time_frame))
     data = [d.__getstate__()[0] for d in data]
     data = [x for x in data if db_date not in x.values()]
     return data
Exemplo n.º 5
0
def run_main_app():
    conf = {
        '/': {
            'tools.staticdir.root': os.getcwd(),
            'tools.encode.on': True,
            'tools.encode.encoding': 'utf-8',
            'tools.sessions.on': True,
            'tools.sessions.timeout':
            60 * 1,  #timeout is in minutes, * 60 to get hours

            # The default session backend is in RAM. Other options are 'file',
        },
        '/static': {
            'tools.staticdir.on': True,
            'tools.staticdir.dir': 'static',
        },
    }

    cherrypy.site = {'base_path': os.getcwd()}
    database = DatabaseManager()
    database.init_db(DB_NAME)

    main_app = server.MainApp()
    main_app.api = api.base_api.BaseApi(database)
    cherrypy.tree.mount(main_app, "/", conf)

    cherrypy.config.update({
        'server.socket_host': LISTEN_IP,
        'server.socket_port': LISTEN_PORT,
        'engine.autoreload.on': True,
    })

    print("========================================")
    print("           Softeng 701 Server")
    print("========================================")

    cherrypy.engine.start()

    cherrypy.engine.block()
Exemplo n.º 6
0
    def __init__(self):
        self.db = DatabaseManager()

        logging.basicConfig(format=get_format(), level=get_level())
        self.logger = logging.getLogger(__name__)

        self.id_current_sched = None  # currently schedule name being played by player
        self.current_marquee = ""  # currently displaying marquee text
        self.db.set_active_schedules()  # set all schedule to active if they are ready to be played

        turn_tv_on()  # assume TV is off when starting player
        self._setup_omxd()  # set up omxd with correct options for omxplayer
        self._flush_omxd_playlist()  # flush any previous playlist
        self._stop_streaming()
Exemplo n.º 7
0
    def _hist_mining(self):
        """
        Collect events from the Queue
        """
        while True:
            try:
                event = self.hist_queue.get(False)
            except queue.Empty:
                sleep(0.1)            
            else:
                if event.type == 'HISTDATA':
                    mp.Process(target=DatabaseManager(
                    ).write_data, args=(event,)).start()

                elif event.type == 'DBREADY':
                    mp.Process(target=HistoricalCollector(
                    ).historical_prices, args=(self.hist_queue,
                                               self.live_queue, event,)
                                               ).start()
                elif event.type == 'OFFER':
                    mp.Process(target=DatabaseManager(
                    ).database_check, args=(self.hist_queue, event,)
                                            ).start()
Exemplo n.º 8
0
    def find_file(self, name, category, description, tags):
        """Find files."""
        sql = """
            SELECT files.id, files.name, files.desc, files.category_name
            FROM files
            JOIN file_tags
                ON files.id=file_tags.file_id
        """
        args = []

        if name or category or tags or description:
            sql += " WHERE "
            multiple = False
            if name:
                if multiple:
                    sql += "AND "
                sql += "name LIKE ? "
                name = self.surround(name)
                args.append(name)
                multiple = True
            if description:
                if multiple:
                    sql += "AND "
                sql += "desc LIKE ? "
                description = self.surround(description)
                args.append(description)
                multiple = True
            if category:
                if multiple:
                    sql += "AND "
                sql += "category_name LIKE ? "
                category = self.surround(category)
                args.append(category)
                multiple = True
            if tags:
                if multiple:
                    sql += "AND "
                sql += "file_tags.tag_name REGEXP ? "
                tags = [tag.lower() for tag in tags]
                tags = '|'.join(tags)
                args.append(tags)

        sql += "GROUP BY files.id "

        dict_list = []
        with DatabaseManager() as db:
            cursor = db.execute(sql, tuple(args))
            for row in cursor.fetchall():
                dict_list.append(dict(row))
        return json.dumps(dict_list, indent=2)
Exemplo n.º 9
0
    def add_file(self, name, category_name, desc="", tags=None):
        """Add new file."""
        with DatabaseManager() as db:
            db.setup()
            # insert or ignore if exists
            db.execute("INSERT OR IGNORE INTO categories(name) values(?)",
                       (category_name,))

            cursor = db.execute(
                "INSERT INTO files(name, category_name, desc) values(?, ?, ?)",
                (name, category_name, desc))
            letter_tag = category_name[0].lower()
            if tags:
                tags.append(letter_tag)
            else:
                tags = [letter_tag]
            self.insert_tags(db, tags, cursor.lastrowid)
Exemplo n.º 10
0
 def _live_data_session(self, fxc):
     """
     """
     live_offers = []
     while True:
         try:
             event = self.live_queue.get(False)
         except queue.Empty:
             sleep(0.1)
         else:
             if event.type == 'LIVEDATA':
                 mp.Process(target=DatabaseManager().write_data,
                            args=(event, )).start()
             elif event.type == 'GETLIVE':
                 mp.Process(target=self._get_live,
                            args=(
                                event,
                                live_offers,
                            )).start()
             elif event.type == 'LIVEREADY':
                 if event.offer not in live_offers:
                     print("[oo] Live Started %s" % event.offer)
                     live_offers.append(event.offer)
Exemplo n.º 11
0
	#settings = None
	#settings_lock = threading.Lock()
	settings_wrapper = Wrapper(None)
	
	managers_answers = {1 : [], 2 : [], 3 : []}
	#managers_answers_lock = threading.Lock()
	managers_answers_wrapper = Wrapper(managers_answers)
	
	bot = VK_Bot(TOKEN, ID, VERSION, settings_wrapper, managers_answers_wrapper)
	
	l = Loader(sys.argv[1])
	l.dump_file_name = sys.argv[2]
	#settings = l.settings
	#settings_wrapper.payload = settings
	settings_wrapper.payload = l.settings
	logging.debug('Загружены настройки.')
	
	dbm = DatabaseManager(settings_wrapper.payload['connection_settings']['host'], settings_wrapper.payload['connection_settings']['username'], settings_wrapper.payload['connection_settings']['passwd'], settings_wrapper.payload['connection_settings']['database'])
	
	bot_thread = threading.Thread(target = bot.polling, daemon = True)
	bot_thread.start()
	logging.debug('Бот включен.')
	
	#dbm, settings_wrapper, managers_answers_wrapper, l
	prog = Program()
	main_thread = threading.Thread(target = prog.mainloop, args = (bot, dbm, settings_wrapper, managers_answers_wrapper, l), daemon = True)
	main_thread.start()
	
	input()
	bot.stop()
	prog.stop()
Exemplo n.º 12
0
                    apps.get_app(apps.current_app).stop_timer()

                # check if the app doesn't exist yet
                if not apps.is_registered(app_name):
                    app = apps.register(app_name)
                else:
                    app = apps.get_app(app_name)

                # start the timer
                if not app.timer_started():
                    app.start_timer()

                time.sleep(.2)

except KeyboardInterrupt:
    longest_name = max([len(app.name) for app in apps] + [len('Application')]) + 2
    print ''
    print "%s | %s" % ('Application'.ljust(longest_name), 'Session Time')
    print "-" * (longest_name + 18)

    db_man = DatabaseManager()

    for app in apps:
        print "%s | %s" % (
            app.name.ljust(longest_name),
            app.elapse_time(),
        )
        db_man.enter_usage_to_db(app.name.ljust(longest_name), app.total_time)

    db_man.print_total_usage()
Exemplo n.º 13
0
def analyze(snd_pipe, db_path, pp_cfg, parser_cfg, srcFiles, use_pipeline=False, analyzer_process=1, pp_process=1, parser_process=1):
    db = DatabaseManager()
    pp_list = [Preprocessor(**pp_cfg) for i in range(pp_process if use_pipeline else analyzer_process)]
    parser_list = [Parser(**parser_cfg) for i in range(parser_process if use_pipeline else analyzer_process)]
    numFiles = len(srcFiles)
    use_pipeline = use_pipeline

    t_0 = datetime.datetime.now()

    projInfo = {}
    projInfo['predefined'] = pp_list[0].preprocess_predef()

    task_queue = Queue()
    done_queue = Queue()

    for i, srcFile in enumerate(srcFiles):
        task_queue.put(srcFile)
    for i in range(len(pp_list)):
        task_queue.put('STOP')

    if not use_pipeline:
        analyzer_p_list = [Process(target=analyzer_worker, args=(pp, parser, task_queue, done_queue)) for pp, parser in zip(pp_list, parser_list)]
        for analyzer_p in analyzer_p_list:
            analyzer_p.start()

        for i, srcFile in enumerate(srcFiles):
            #print 'analyze: [%d/%d]' % (i,numFiles), srcFile
            projInfo[srcFile] = done_queue.get()
            snd_pipe.send((i, numFiles, srcFile))
            if snd_pipe.poll():
                for analyzer_p in analyzer_p_list:
                    analyzer_p.terminate()
                for analyzer_p in analyzer_p_list:
                    analyzer_p.join()
                Preprocessor.clearTokenCache()
                snd_pipe.send('STOPPED')
                print 'analyze: canceled'
                return
        for analyzer_p in analyzer_p_list:
            analyzer_p.join()
    else:
        pp_queue = Queue()

        pp_p_list = [Process(target=preprocessor_worker, args=(pp, task_queue, pp_queue)) for pp in pp_list]
        for pp_p in pp_p_list:
            pp_p.start()

        parser_p_list = [Process(target=parser_worker, args=(parser, pp_queue, done_queue)) for parser in parser_list]
        for parser_p in parser_p_list:
            parser_p.start()

        for i, srcFile in enumerate(srcFiles):
            #print 'analyze: [%d/%d]' % (i,numFiles), srcFile
            projInfo[srcFile] = done_queue.get()
            snd_pipe.send((i, numFiles, srcFile))
            if snd_pipe.poll():
                for pp_p in pp_p_list:
                    pp_p.terminate()
                for parser_p in parser_p_list:
                    parser_p.terminate()
                for pp_p in pp_p_list:
                    pp_p.join()
                for parser_p in parser_p_list:
                    parser_p.join()
                Preprocessor.clearTokenCache()
                snd_pipe.send('STOPPED')
                print 'analyze: canceled'
                return

        for i in range(len(parser_p_list)):
            pp_queue.put('STOP')
        for pp_p in pp_p_list:
            pp_p.join()
        for parser_p in parser_p_list:
            parser_p.join()

    t_1 = datetime.datetime.now()

    db.createDB(db_path)
    db.addData(projInfo)
    db.saveDB()

    db.closeDB()

    print 'analyze: done', t_1 - t_0
    snd_pipe.send((numFiles, numFiles, 'Generating Database ... done'))
class Dispatcher:
    """
    Class is responsible for notifying users when
    their subscribed channels going online
    """

    def __init__(self, app):
        self.db_manager = DatabaseManager(app)
        self.post_data_queue = Queue()
        self.newly_online_channels = []

    def run(self):
        """
        loops through duties of the dispatcher
        for as long as the server is running
        """
        self.running = True

        last_clean = time()
        while self.running:
            logging.info("Beginning update cycle")

            cycle_start = time()
            extra_time = 0

            try:
                self.process_user_post()
                self.update_channel_status()
                self.notify_users()

                extra_time = CYCLE_TIME - (time() - cycle_start)

                if (time() - last_clean) > CLEAN_PERIOD:
                    self.db_manager.clean()
                    last_clean = time()
            except Exception as e:
                logging.error("Unexpected error encountered:" + traceback.format_exc())

            if extra_time > 0:
                logging.info("waiting %s seconds before next refresh" % int(extra_time))
                sleep(extra_time)

    def process_user_post(self):

        user_requests = []
        processed_users = []

        while not self.post_data_queue.empty():
            user_requests.append(self.post_data_queue.get())

        while len(user_requests) > 0:
            data = json.loads(user_requests.pop())

            reg_id = data["regID"]
            if reg_id not in processed_users:
                processed_users.append(reg_id)
                data["Channels"] = self.sanitize_channels(data["Channels"])
                self.db_manager.add_channels(data["Channels"])

                user_id = self.db_manager.add_user(reg_id)

                self.db_manager.add_subs(user_id, data["Channels"])

        logging.info("all post data processed")

    def sanitize_channels(self, channels):
        return ["".join(re.findall("[a-zA-Z0-9_]", name)).lower() for name in channels]

    def update_channel_status(self):
        logging.info("updating channel status")

        channels = self.db_manager.get_all_channels()

        db_status = {channel.name.lower(): channel.status for channel in channels}
        if len(db_status) > 0:
            server_status = rate_limit_check(list(db_status.keys()))

        else:
            logging.error("database failed to return any channels")
            server_status = []

        new_online = []
        new_offline = []

        try:
            for channel_name in db_status.keys():
                if db_status[channel_name] < server_status[channel_name]:
                    new_online.append(channel_name)
                    self.newly_online_channels.append(channel_name)

                elif db_status[channel_name] > server_status[channel_name]:
                    new_offline.append(channel_name)

        except ValueError:
            logging.error("could not retrieve channel status from api")

        logging.info("{} channels just came online".format(len(new_online)))
        logging.info(("{} channels just went offline").format(len(new_offline)))

        logging.info(self.newly_online_channels)

        self.db_manager.update_channels_status(new_offline, 0)
        self.db_manager.update_channels_status(new_online, 1)

    def notify_users(self):

        users = self.db_manager.get_subbed_users(self.newly_online_channels)

        if len(users) is 0:
            logging.info("no users need to be notified")
            self.newly_online_channels = []
            return

        logging.info("notifying users of channel status")
        reg_ids = []
        for user in users:
            logging.info(("notifying user {user.user_id} of status").format(user=user))
            reg_ids.append(user.reg_id)

        data = {"registration_ids": reg_ids}

        content = json.dumps(data)
        headers = {"Content-type": "application/json", "Authorization": "key=" + API_KEY}

        response = post(url="https://android.googleapis.com/gcm/send", data=content, headers=headers)
        logging.info(response.text)
        self.remove_old_users(json.loads(response.text), reg_ids)

        self.newly_online_channels = []

    def remove_old_users(self, json_response, reg_ids):
        for i, result in enumerate(json_response["results"]):
            if ("registration_id" in result) or ("error" in result):
                self.db_manager.remove_user_sub(reg_ids[i])
Exemplo n.º 15
0
 def rm_file(self, file_id):
     """Remove file."""
     with DatabaseManager() as db:
         db.execute("DELETE FROM files where id=?", (file_id,))
 def __init__(self, app):
     self.db_manager = DatabaseManager(app)
     self.post_data_queue = Queue()
     self.newly_online_channels = []
Exemplo n.º 17
0
from flask import Flask, render_template, jsonify, request
from flask_cors import CORS, cross_origin
from db_manager import DatabaseManager
import requests, os, json

# Statics
app = Flask(__name__)
CORS(app)
app.config['CORS_HEADERS'] = 'Content-Type'

db = DatabaseManager()

# Helper functions
def null_parameter():
    return jsonify({ "error": "parameters cannot be null" }), 400

def check_country(code):
    return code in ["FI", "FRA", "SPA"]
def invalid_country_code():
    return jsonify({"error": "country code is not valid"}), 400

def check_screen(type):
    return type in ["billboard", "standing", "small"]
def invalid_screen_type():
    return jsonify({"error": "screen type is not valid"}), 400

# signup a new agency
@app.route("/sign_up", methods=['POST'])
@cross_origin()
def signup():
    username = request.args.get("username", default = None)
Exemplo n.º 18
0
    def historical_prices(hist_queue, live_queue, event):
        """
        Contacts the database and retrives the latest date, then continues
        with the historical data mining to the present date
        """
        def collect_data(fxc, instrument, time_frame, dbdate):
            """
            Gets the data
            """      
            time_delta = TimeDelta().get_delta(time_frame)

            to_date = None
            fm_date, to_date = DateRange().get_date_block(
                                           time_delta, dbdate, to_date)
            log(instrument).debug("[>>] Starting Block   : %s %s %s %s" % \
                                (instrument, str(fm_date), str(to_date), time_frame))
            breakout = 0
            while True:
                breakdate = datetime.datetime.now() # - datetime.timedelta(minutes = 5)
                if to_date >= breakdate or fm_date >= breakdate:
                    breakout = 1
                    d = datetime.datetime.now()
                    to_date = d.replace(second=00, microsecond=00)

                try:
                    data = fxc.get_historical_prices(
                        str(instrument), fm_date,
                        to_date, str(time_frame)) 
                    data = [d.__getstate__()[0] for d in data]
                    data = [x for x in data if dbdate not in x.values()]

                except (KeyError, IndexError):
                    data = []

                if data != []:
                    hist_queue.put(HistDataEvent(
                        data, instrument, time_frame))

                    log(instrument).debug("[:)] Data Collected   : %s %s %s %s" % \
                        (instrument, str(fm_date), str(to_date), time_frame))
                    fm_date, to_date = DateRange().get_date_block(
                                       time_delta, fm_date, to_date)

                else:
                    log(instrument).debug("[??] Skipping Block   : %s %s %s %s" % \
                                (instrument, str(fm_date), str(to_date), time_frame))
                    fm_date, to_date = DateRange().get_date_block(
                                       time_delta, fm_date, to_date)

                del data
                
                if breakout == 1:
                    break

        fxoffer = event.fxoffer

        while True:
            try:
                fxc = fx.ForexConnectClient(s.FX_USER, s.FX_PASS,
                                            s.FX_ENVR, s.URL
                )
                if fxc.is_connected() == True:
                    break
            except RuntimeError:
                pass

        for offer, time_frames in fxoffer.iteritems():
            for time_frame in time_frames:
                dbdate = DatabaseManager().return_date(offer, time_frame)
                collect_data(fxc, offer, time_frame, dbdate)
                log(offer).debug("[^^] TFrame Complete  : %s |%s|" % (offer, time_frame))

            log(offer).debug("[<>] Offer Complete   : %s |%s|" % (offer, time_frame))
        print("[^^] Hist complete : %s" % offer)
        live_queue.put(LiveReadyEvent(offer))
Exemplo n.º 19
0
class Player:
    """
    Plays (in an infinite loop) the current schedule's play-list. A current schedule is defined with the following
    three properties:

    1. Its play time, date, and day of the week follows within the current system date and time.

    2. It is has the highest priority among a possible set of schedules that satisfy (1).

    3. It is the newest* schedule among a possible set of schedules that satisfy both (1) and (2).

    In a nutshell the player module performs the following functions:

    - Constantly checks for a new schedule to play.

    - Constantly checks for an urgent marquee that needs to overwrite an schedule (optional) marquee text.

    - Checks for future schedules, and attempts to turn the HDMI off in case of no future nor current schedule.

    (*) Newest refers to an schedule whose date and time of **creation** is closest to current system time.
    """

    def __init__(self):
        self.db = DatabaseManager()

        logging.basicConfig(format=get_format(), level=get_level())
        self.logger = logging.getLogger(__name__)

        self.id_current_sched = None  # currently schedule name being played by player
        self.current_marquee = ""  # currently displaying marquee text
        self.db.set_active_schedules()  # set all schedule to active if they are ready to be played

        turn_tv_on()  # assume TV is off when starting player
        self._setup_omxd()  # set up omxd with correct options for omxplayer
        self._flush_omxd_playlist()  # flush any previous playlist
        self._stop_streaming()

    def start_playing(self):
        """
        Computes the schedule that needs to be playing currently. Based on that schedule, check wheter it is 
        a regular schedule or a streaming schedule. If it is a regular schedule it creates a playlist using
        ``omxd``, and starts it. In the other hands, if it is a streaming schedule, it starts the playback
        of the streaming video using ``livestreamer``.

        It also checks every ``SLEEP_WAIT`` seconds for a new schedule, if there is not, it continues playing
        the current schedule. Moreover, if there is no schedule to play it waits for ``SLEEP_WAIT`` seconds 
        until checking for a new schedule again.
        """
        self.logger.debug('start_playing')
        self.streaming_status = STREAM_NOT_PLAYING

        while True:

            # Get schedule that should be played right now and in the next five minutes
            id_sched_to_play = self.db.get_sched_to_play(curr_day=time.strftime("%a").upper(),
                                                         curr_datetime=time.strftime("%Y-%m-%d %H:%M:%S"))
            new_curr_time = (datetime.now() + timedelta(minutes=5)).strftime("%Y-%m-%d %H:%M:%S")
            next_5_min_sched = self.db.get_sched_to_play(curr_day=time.strftime("%a").upper(),
                                                         curr_datetime=new_curr_time)

            # If no schedule to play sleep for SLEEP_TIME before checking again
            # If no sched in the next 5 minutes try to turn the tv off
            if id_sched_to_play is None:
                self.id_current_sched = None  # If No schedule to play then current schedule is also None
                self.current_marquee = ""  # If No schedule to play then there is no marquee
                if next_5_min_sched is None:
                    self.logger.debug("No schedule in the next 5 minutes")
                    turn_tv_off()
                self._kill_java_processes()  # remove any marquee
                self._flush_omxd_playlist()  # remove any playlist
                self._stop_streaming()       # remove any on going streaming playback
                time.sleep(SLEEP_TIME)                  
                continue
            
            # Display marquee if any
            marquee_text = self.db.get_sched_marquee(id_sched_to_play)
            urgent_marquee_text = self.db.get_urgent_marquee_to_play(curr_datetime=time.strftime("%Y-%m-%d %H:%M:%S"))
            if urgent_marquee_text != "":  # Check if urgent marquee text exists
                marquee_text = urgent_marquee_text

            # Update marquee text
            if self.current_marquee != marquee_text:
                self.current_marquee = marquee_text
                self.logger.debug("Using [%s] as marquee text." % self.current_marquee)
                self._display_marquee(self.current_marquee)

            # Update active schedule
            if self.id_current_sched != id_sched_to_play:
                turn_tv_on()                  
                self._flush_omxd_playlist()
                self._stop_streaming() 
                self.id_current_sched = id_sched_to_play

                # Update the playlist for regular schedules
                if self.db.get_sched_type(self.id_current_sched) == TYPE_REGULAR:
                    playlist = self.db.get_list_files(id_sched_to_play)
                    self.logger.debug('Schedule: [%s]. Play list: %s' % (id_sched_to_play, playlist))
                    self._add_playlist(playlist)
                else:
                    self._stop_streaming()
                    streaming_link = self.db.get_sched_streaming_link(self.id_current_sched)
                    self.logger.debug("Streaming link: " + streaming_link)
                    
                    global streaming_thread

                    try:
                        streaming_thread = threading.Thread(target=self._start_streaming, args=(streaming_link,))
                        streaming_thread.start()
                    except threading.ThreadError, e:
                        self.logger.error("Error starting streaming thread")
                    
                    self.streaming_status = STREAM_PLAYING
            
            # No update detected for schedule
            else:
                self.logger.debug("No NEW schedule to play. Check again in %d seconds" % SLEEP_TIME)
                time.sleep(SLEEP_TIME)
                
                if self.db.get_sched_type(self.id_current_sched) == TYPE_STREAM:
                    self._check_streaming_integrity()
                    if self.streaming_status == STREAM_NOT_PLAYING:
                      self._stop_streaming();
                      # streaming_thread.wait()
                      try:
                          streaming_thread = threading.Thread(target=self._start_streaming, args=(streaming_link,))
                          streaming_thread.start()
                      except threading.ThreadError, e:
                          self.logger.error("Error starting streaming thread")
                      
                      self.streaming_status = STREAM_PLAYING
                else:
Exemplo n.º 20
0
___author__ = 'College of Engineering, Seoul National University'
# Contact: Camilo Celis Guzman <*****@*****.**>'

DEFAULT_LINE_N = 5  # Default number of lines to get from logs in case of query parameter erro
TIME_OUT = 5  # Max. time out time in seconds for getting schedules info as JSON in POST# request

app = Flask(__name__)

# Debugging information, ignore otherwise
if not app.debug:
    basicConfig(format=get_format(), level=get_level())
    app.logger.setLevel(get_level())
    app.logger.info('Starting flask webapp...')

# creating instance of database object
db = DatabaseManager()

# get the JSON file from the server
@app.route('/schedule', methods=['POST'])
def get_sched():
    """
    Listen for POST request at ``/schedule`` URL. It receives a JSON file corresponding to a schedule metadata.
    It then, calls the database manager to store this metadata.

    Returns:
        String: 'Status: OK' if successfully saved metadata in the database.

    Warning:
        It logs an error if it was **not** possible to save the schedule metadata in the database
    """
    req_json = request.get_json()