def delete_tweet_log_in_db_log(): """delete old Logs in DB-log""" lib_cm.message_write_to_console(ac, u"delete_tweet_log_in_db_log") date_log_back = (datetime.datetime.now() + datetime.timedelta(days=- 1)) c_date_log_back = date_log_back.strftime("%Y-%m-%d %H:%M") ACTION = ("DELETE FROM TWITTER_LOGS WHERE TW_LOG_TIME < '" + c_date_log_back + "'") db.dbase_log_connect(ac) if db.db_log_con is None: err_message = u"No connect to db for delete_tweet_log_in_db_log" lib_cm.error_write_to_file(ac, err_message) return None try: db_log_cur = db.db_log_con.cursor() db_log_cur.execute(ACTION) db.db_log_con.commit() db.db_log_con.close() log_message = (u"Loeschen der Tweetlogs " "in DB-Log-Tabelle die von gestern sind") db.write_log_to_db(ac, log_message, "e") except Exception, e: lib_cm.message_write_to_console(ac, log_message + u"Error 2 delete_tweet_log_in_db_log: %s</p>" % str(e)) err_message = (log_message + u"Error 2 delete_tweet_log_in_db: %s" % str(e)) lib_cm.error_write_to_file(ac, err_message) db.db_log_con.rollback() db.db_log_con.close() return None
def concatenate_media(filename): """concatenate audio files""" lib_cm.message_write_to_console(ac, u"mp3-Files kombinieren") # use the right char-encoding for supprocesses cmd = db.ac_config_etools[2].encode(ac.app_encode_out_strings) #cmd = "sox" #news_file = lib_cm.extract_filename(ac, # db.ac_config_1[6]).replace("mp3", "wav") news_file = ac.app_file_orig_temp + ".wav" news_file_temp = news_file.replace(".wav", "_temp.wav") #source_path = lib_cm.check_slashes(ac, db.ac_config_1[10]) source_path = ac.app_homepath + lib_cm.check_slashes(ac, db.ac_config_1[10]) #source_file_intro = source_path + "News_ext_Automation_Intro.wav" source_file_intro = source_path + ac.app_file_intro #source_file_closer = source_path + "News_ext_Automation_Closer.wav" source_file_closer = source_path + ac.app_file_closer #dest_path = lib_cm.check_slashes(ac, db.ac_config_1[11]) dest_path = lib_cm.check_slashes(ac, db.ac_config_servpath_a[1]) dest_path_file = dest_path + filename lib_cm.message_write_to_console(ac, cmd) # start subprocess try: p = subprocess.Popen([cmd, u"-S", source_file_intro, news_file_temp, source_file_closer, u"-C" u"192.2", dest_path_file], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() except Exception, e: log_message = ac.app_errorslist[7] + u": %s" % str(e) db.write_log_to_db_a(ac, log_message, "x", "write_also_to_console") return None
def erase_files_prepaere(roboting_sgs): """prepaere erasing files""" date_back = (datetime.datetime.now() + datetime.timedelta(days=- int(db.ac_config_1[3]))) c_date_back = date_back.strftime("%Y_%m_%d") db.write_log_to_db_a(ac, u"Sendedatum muss aelter sein als: " + c_date_back, "t", "write_also_to_console") for item in roboting_sgs: if item[1].strip() == "T": # on Cloud msg = "Veraltete Dateien in Cloud loeschen: " + item[2] db.write_log_to_db_a(ac, msg, "p", "write_also_to_console") path_dest = lib_cm.check_slashes(ac, db.ac_config_servpath_b[5]) path_cloud = lib_cm.check_slashes(ac, item[2]) path_dest_cloud = (path_dest + path_cloud) try: files_sendung_dest = os.listdir(path_dest_cloud) except Exception, e: log_message = ac.app_errorslist[12] + item[2] + ": %s" % str(e) lib_cm.message_write_to_console(ac, log_message) db.write_log_to_db(ac, log_message, "x") return erase_files_from_cloud(path_dest_cloud, files_sendung_dest, c_date_back) if item[3].strip() == "T": # on ftp msg = "Veraltete Dateien auf ftp loeschen: " + item[4] db.write_log_to_db_a(ac, msg, "p", "write_also_to_console") path_dest = lib_cm.check_slashes(ac, db.ac_config_1[6]) path_ftp = lib_cm.check_slashes(ac, item[4]) path_dest_ftp = (path_dest + path_ftp) erase_files_from_ftp(path_dest_ftp, c_date_back)
def write_listeners_to_db(peaklisteners, sql_time): """Write Peaklisteners in db""" # ist schon ein ein satz zur uptime da? c_time = db.read_tbl_row_with_cond( ac, db, "ST_WEB_STREAM_LISTENERS", "ST_WEB_STREAM_LIS_DAY", "ST_WEB_STREAM_LIS_DAY='" + sql_time + "'") if c_time is None: # nix da, neuen satz einfuegen sql_command = ("INSERT INTO ST_WEB_STREAM_LISTENERS" "(ST_WEB_STREAM_LIS_DAY, ST_WEB_STREAM_LIS_NUMBER)" " values ('" + sql_time + "', '" + peaklisteners + "')") lib_cm.message_write_to_console(ac, "Datensatz neu: " + peaklisteners) log_message = (u"Statistik Webstream-Hoerer " "nach neuer Verbindung aktualisiert. Anzahl: " + peaklisteners) else: # schon da, updaten sql_command = ("UPDATE ST_WEB_STREAM_LISTENERS " "SET ST_WEB_STREAM_LIS_NUMBER ='" + peaklisteners + "' where ST_WEB_STREAM_LIS_DAY='" + sql_time + "'") lib_cm.message_write_to_console(ac, "Datensatz aktualisiert: " + peaklisteners) log_message = (u"Statistik Webstream-Hoerer " "bei bestehender Verbindung aktualisiert. Anzahl: " + peaklisteners) db_op_success = db.exec_sql(ac, db, sql_command) if db_op_success is None: # Error 005 Fehler beim Registireren # der Webstream-Hoerer in der Datenbank err_message = ac.app_errorslist[6] + " " + peaklisteners db.write_log_to_db_a(ac, err_message, "x", "write_also_to_console") else: db.write_log_to_db(ac, log_message, "i") return
def load_studio_sendungen(): """seek for live shows""" lib_cm.message_write_to_console(ac, "load_studio_live_sendungen") # zfill fuellt nullen auf bei einstelliger stundenzahl c_date_time_from = (str(ac.time_target.date()) + " " + str(ac.time_target.hour).zfill(2)) c_date_time_to = (str(ac.time_target.date()) + " " + str(ac.time_target.hour + 1).zfill(2)) db_tbl_condition = ("A.SG_HF_SOURCE_ID <> '03' " "AND SUBSTRING(A.SG_HF_TIME FROM 1 FOR 13) >= '" + c_date_time_from + "' " "AND SUBSTRING(A.SG_HF_TIME FROM 1 FOR 13) <= '" + c_date_time_to + "' " "AND A.SG_HF_INFOTIME='F' AND A.SG_HF_MAGAZINE='F'") sendung_data = db.read_tbl_rows_sg_cont_ad_with_cond_a(ac, db, db_tbl_condition) if sendung_data is None: log_message = (u"Keine Studio-Sendungen fuer: " + c_date_time_from + u" bis " + c_date_time_to) else: log_message = (u"Studio-Sendungen vorhanden von: " + c_date_time_from + u" bis " + c_date_time_to + " Uhr") db.write_log_to_db_a(ac, log_message, "t", "write_also_to_console") return sendung_data
def delete_failed_sg_in_db(main_id_sg): """delete sg-main if register sg-content fails""" lib_cm.message_write_to_console(ac, u"delete_failed_sg_in_db with nr: " + str(main_id_sg)) ACTION = ("DELETE FROM SG_HF_MAIN WHERE SG_HF_ID = " + str(main_id_sg) + " ROWS 1") db.dbase_connect(ac) if db.db_con is None: err_message = (u"Error General: " "No connection to db for delete_failed_sg_in_db") lib_cm.error_write_to_file(ac, err_message) return None try: db_cur = db.db_con.cursor() db_cur.execute(ACTION) db.db_con.commit() db.db_con.close() log_message = u"Loeschen der Sendung bei Fehlschlag SG_CONT.. " db.write_log_to_db_a(ac, log_message, "e", "write_also_to_console") except Exception, e: err_message = u"Error 2 delete_failed_sg_in_db: %s" % str(e) #db.write_log_to_db_a(ac, err_message , "x", "write_also_to_console" ) lib_cm.error_write_to_file(ac, err_message) db.write_log_to_db(ac, ac.app_errorslist[3], "x") db.db_con.rollback() db.db_con.close() return None
def load_notis(c_time_back): """ load Notifications from Log """ db_tbl = "USER_LOGS A " db_tbl_fields = ("A.USER_LOG_ID, A.USER_LOG_TIME, A.USER_LOG_ACTION, " "A.USER_LOG_ICON, A.USER_LOG_MODUL_ID ") db_tbl_condition = ("SUBSTRING( A.USER_LOG_ICON FROM 1 FOR 1 ) = 'n' AND " "SUBSTRING( A.USER_LOG_TIME FROM 1 FOR 19) >= '" + c_time_back + "' AND A.USER_LOG_ID > " + str(ac.log_id) + " ORDER BY A.USER_LOG_ID") log_data = db.read_tbl_rows_with_cond_log(ac, db, db_tbl, db_tbl_fields, db_tbl_condition) if log_data is None: return # loop through data for row in log_data: tweetet_log = load_tweet_logs(row[0]) if tweetet_log is not None: lib_cm.message_write_to_console(ac, "schon getwittert: " + row[2]) continue item_log_current = row[2] + " - " + row[1].strftime("%Y-%m-%d %H:%M:%S") lib_cm.message_write_to_console(ac, item_log_current) tweet_message(item_log_current)
def load_actions(c_time_back): """load actions from log""" db_tbl = "USER_LOGS A " db_tbl_fields = ("A.USER_LOG_ID, A.USER_LOG_TIME, A.USER_LOG_ACTION, " "A.USER_LOG_ICON, A.USER_LOG_MODUL_ID ") db_tbl_condition = ("SUBSTRING( A.USER_LOG_ICON FROM 1 FOR 1 ) = 'i' " "AND SUBSTRING( A.USER_LOG_TIME FROM 1 FOR 19) >= '" + c_time_back + "' AND A.USER_LOG_ID > " + str(ac.log_id) + " ORDER BY A.USER_LOG_ID") log_data = db.read_tbl_rows_with_cond_log(ac, db, db_tbl, db_tbl_fields, db_tbl_condition) if log_data is None: return # loop through data item_log_last = "" for row in log_data: tweetet_log = load_tweet_logs(row[0]) if tweetet_log is not None: lib_cm.message_write_to_console(ac, "schon getwittert: " + row[2]) continue item_log_current = row[2] + " - " + row[1].strftime("%Y-%m-%d %H:%M:%S") if row[2] == "Log-Tweeter gestartet": continue if item_log_last == item_log_current: db.write_log_to_db(ac, ac.app_desc + " doppelte Meldung: " + item_log_last, "x") continue lib_cm.message_write_to_console(ac, item_log_current) tweet_log(item_log_current) item_log_last = item_log_current
def check_mairlist_log(self, source_id, time_now, log_data): """load data from marilist logfile""" # concatenate filename mAirlist-Logfile file_mairlist_log = (ac.app_homepath + db.ac_config_1[8] + "_" + source_id + ".log") lib_cm.message_write_to_console(ac, file_mairlist_log) # Daten aus mAirlist_Logdatei holen mairlist_log_data = lib_cm.read_file_first_line(ac, db, file_mairlist_log) lib_cm.message_write_to_console(ac, mairlist_log_data) if mairlist_log_data is None: # Fehler beim Lesen des Logfiles ac.error_counter_read_log_file += 1 log_meldung_1 = ac.app_errorslist[1] + " \n" if ac.error_counter_read_log_file == 1: # Error-Meldung nur einmal registrieren db.write_log_to_db_a(ac, ac.app_errorslist[2], "x", "write_also_to_console") # Ausfall-Meldung nur einmal uebertragen ac.log_start = (str(time_now.date()) + " " + str(time_now.time())[0:8]) ac.log_author = db.ac_config_1[3] ac.log_title = db.ac_config_1[4] web = upload_data_prepare() if web is not None: self.display_logging(log_meldung_1, web) else: self.display_logging(log_meldung_1, None) else: self.display_logging(log_meldung_1, None) return None else: ac.error_counter_read_log_file = 0 # bei direktem Vergleich des Inhalts der Logdatei # (mairlist_log_data) funktioniert folgender # if-Vergleich nicht aussserhalb der ide, deshalb in vari mairlist_log_time = mairlist_log_data[6:25] if ac.log_start == mairlist_log_time: # Keine Aenderung des gespielten Titels, also wieder zurueck log_meldung_1 = ("Keine Aenderung in mAirlist-Log... \n" + ac.log_start + " - " + ac.log_author + " - " + ac.log_title) self.display_logging(log_meldung_1, None) return None else: # 4. Daten aus Logfiles oder db ermitteln ac.log_start = mairlist_log_data[6:25] log_data = mairlist_log_data # Ermitteln ob gebuchte Sendung, oder Musik log_author_title = work_on_data_from_log(time_now, log_data, "mairlist") ac.log_author = log_author_title[0] ac.log_title = log_author_title[1] return True
def filepaths(d_pattern, l_path_title, item, sendung): """concatenate paths and filenames""" success_file = True try: # Verschiebung von Datum Erstsendung new_date = sendung[2] + datetime.timedelta(days=-item[5]) lib_cm.message_write_to_console(ac, new_date.strftime(d_pattern)) if item[1].strip() == "T": # from dropbox path_source = lib_cm.check_slashes(ac, db.ac_config_servpath_b[5]) path_file_source = ( path_source + l_path_title[0] # + sendung[0][2].strftime('%Y_%m_%d') + l_path_title[1].rstrip()) + new_date.strftime(d_pattern) + l_path_title[1].rstrip() ) if item[3].strip() == "T": # from ftp # url_base = db.ac_config_1[3].encode(ac.app_encode_out_strings) # url_source_file = db.ac_config_1[7].encode(ac.app_encode_out_strings) path_source = lib_cm.check_slashes(ac, db.ac_config_1[3]) path_file_source = ( path_source + l_path_title[0] # + sendung[0][2].strftime('%Y_%m_%d') + l_path_title[1].rstrip()) + new_date.strftime(d_pattern) + l_path_title[1].rstrip() ) # it or mag else sendung if sendung[4].strip() == "T" or sendung[5].strip() == "T": path_dest = lib_cm.check_slashes(ac, db.ac_config_servpath_a[1]) else: path_dest = lib_cm.check_slashes(ac, db.ac_config_servpath_a[2]) # replace special char # replace_uchar_sonderzeichen_with_latein path_file_dest = ( path_dest + str(sendung[8]) + "_" + lib_cm.replace_sonderzeichen_with_latein(sendung[16]) + "_" + lib_cm.replace_sonderzeichen_with_latein(sendung[13]) # + lib_cm.replace_uchar_sonderzeichen_with_latein(sendung[0][13]) + ".mp3" ) except Exception, e: log_message = ac.app_errorslist[5] + "fuer: " + sendung[11].encode("ascii", "ignore") + " " + str(e) db.write_log_to_db_a(ac, log_message, "x", "write_also_to_console") success_file = None
def check_file_source_cloud(path_file_source): """check if file exist in dropbox""" lib_cm.message_write_to_console(ac, "check_files_cloud") file_is_online = False if os.path.isfile(path_file_source): filename = lib_cm.extract_filename(ac, path_file_source) lib_cm.message_write_to_console(ac, "vorhanden: " + path_file_source) db.write_log_to_db_a( ac, "Vorproduktion von extern in Cloud vorhanden: " + filename, "k", "write_also_to_console" ) file_is_online = True return file_is_online
def check_file_dest_play_out(path_file_dest, sendung): """check if file exist in play-out""" lib_cm.message_write_to_console(ac, "check if file exist in play-out") success_file = None if os.path.isfile(path_file_dest): filename = lib_cm.extract_filename(ac, path_file_dest) lib_cm.message_write_to_console(ac, "vorhanden: " + filename) db.write_log_to_db_a( ac, "Vorproduktion von extern bereits in Play_Out vorhanden: " + sendung[12], "f", "write_also_to_console" ) success_file = True return success_file
def check_file_source(path_f_source, sendung): """check if file exist in source""" lib_cm.message_write_to_console(ac, "check if file exist in source") success_file = True if not os.path.isfile(path_f_source): filename = lib_cm.extract_filename(ac, path_f_source) lib_cm.message_write_to_console(ac, u"nicht vorhanden: " + filename) db.write_log_to_db_a(ac, u"Vorproduktion fuer extern noch nicht in Play_Out vorhanden: " + sendung[12], "f", "write_also_to_console") success_file = False return success_file
def load_roboting_sgs(dub_way): """search for shows""" lib_cm.message_write_to_console(ac, u"Sendungen suchen, die bearbeitet werden sollen") sendungen_data = db.read_tbl_rows_with_cond(ac, db, "SG_HF_ROBOT", "SG_HF_ROB_TITEL, SG_HF_ROB_STICHWORTE", "SG_HF_ROB_DUB_ID ='" + dub_way + "'") if sendungen_data is None: log_message = u"Keine Sendungen zur Duplizierung vorgesehen.. " db.write_log_to_db_a(ac, log_message, "t", "write_also_to_console") return sendungen_data
def compand_voice(): """compand voice""" lib_cm.message_write_to_console(ac, u"Sprache komprimieren") # use the right char-encoding for supprocesses cmd = db.ac_config_etools[2].encode(ac.app_encode_out_strings) #cmd = "sox" lib_cm.message_write_to_console(ac, cmd) source_file = ac.app_file_orig_temp + ".wav" #dest_file = lib_cm.extract_filename(ac, # db.ac_config_1[6]).replace(".mp3", "_comp.wav") dest_file = ac.app_file_orig_temp + "_comp.wav" lib_cm.message_write_to_console(ac, source_file) compand_prams = db.ac_config_1[12].split() lib_cm.message_write_to_console(ac, compand_prams) # start subprozess #compand 0.3,1 6:-70,-60,-20 -5 -90 try: p = subprocess.Popen([cmd, u"-S", source_file, dest_file, #u"compand", u"0.3,1","6:-70,-60,-20", u"-12", u"-90", u"0.2"], #u"compand", u"0.3,1","-80,-60,-75,-16", u"-18", u"-80", u"0.2"], u"compand", compand_prams[0], compand_prams[1], compand_prams[2], compand_prams[3]], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() except Exception, e: log_message = ac.app_errorslist[3] + u": %s" % str(e) db.write_log_to_db_a(ac, log_message, "x", "write_also_to_console") return None
def write_files_to_archive_prepare(sendung_art): """prepaering for archiving""" lib_cm.message_write_to_console(ac, u"write_files_to_archive_prepare") # paths if sendung_art == "Info-Time": path_sendung_source = db.ac_config_servpath_a[5] path_sendung_dest = db.ac_config_servpath_a[9] log_message = u"Infotime und Magazin archivieren..." if sendung_art == "Sendung normal": path_sendung_source = db.ac_config_servpath_a[6] path_sendung_dest = db.ac_config_servpath_a[10] log_message = u"Sendungen archivieren..." db.write_log_to_db(ac, log_message, "p") # check slashes path_sendung_source = lib_cm.check_slashes(ac, path_sendung_source) path_sendung_dest = lib_cm.check_slashes(ac, path_sendung_dest) lib_cm.message_write_to_console(ac, path_sendung_source) lib_cm.message_write_to_console(ac, path_sendung_dest) # read archive folders try: files_sendung_source = os.listdir(path_sendung_source) except Exception, e: log_message = u"read_files_from_dir Error: %s" % str(e) lib_cm.message_write_to_console(ac, log_message + path_sendung_source) db.write_log_to_db(ac, log_message, "x") return None
def load_manuskript(sendung): """search manuscript""" lib_cm.message_write_to_console(ac, u"Manuskript suchen") manuskript_data = db.read_tbl_row_with_cond(ac, db, "SG_MANUSKRIPT", "SG_MK_TEXT", "SG_MK_SG_CONT_ID =" + str(sendung[1])) if manuskript_data is None: log_message = u"Kein Manuskript fuer externe VP gefunden.. " db.write_log_to_db_a(ac, log_message, "t", "write_also_to_console") return manuskript_data return manuskript_data
def write_files_to_archive(files_sendung_source, path_sendung_source, path_sendung_dest, dir_year, sendung_art): """write files to archive""" lib_cm.message_write_to_console(ac, u"write_files_to_archive " + dir_year) # Times lib_cm.message_write_to_console(ac, db.ac_config_1[2]) date_back = (datetime.datetime.now() + datetime.timedelta(days=- int(db.ac_config_1[2]))) lib_cm.message_write_to_console(ac, db.ac_config_1[4]) nr_of_files_to_archive = int(db.ac_config_1[4]) c_date_back = date_back.strftime("%Y-%m-%d") db.write_log_to_db_a(ac, u"Sendedatum muss vor " + c_date_back + " liegen", "t", "write_also_to_console") # pfad anpassen path_sendung_dest += lib_cm.check_slashes(ac, dir_year) log_message = u"Dateien archivieren nach: " + path_sendung_dest db.write_log_to_db(ac, log_message, "f") try: files_sendung_dest = os.listdir(path_sendung_dest) except Exception, e: log_message = u"read_files_from_dir Error: %s" % str(e) lib_cm.message_write_to_console(ac, log_message) db.write_log_to_db(ac, log_message, "x") return
def reg_lenght(sendung_data, path_file_dest): """calc length of news file and register in db""" lib_cm.message_write_to_console(ac, u"Laenge der VP from extern ermitteln") # use the right char-encoding for supprocesses cmd = db.ac_config_etools[3].encode(ac.app_encode_out_strings) # cmd = "soxi" lib_cm.message_write_to_console(ac, cmd) # start subprozess try: p = subprocess.Popen([cmd, u"-d", path_file_dest], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() except Exception, e: log_message = ac.app_errorslist[8] + u": %s" % str(e) db.write_log_to_db_a(ac, log_message, "x", "write_also_to_console") return
def erase_files_from_play_out_prepare(sendung_art): """Dateien in Play-Out loeschen, Vorbereitung""" lib_cm.message_write_to_console(ac, u"erase_files_from_play_out_prepare") # Pfade if sendung_art == "Info-Time": path_sendung_source = db.ac_config_servpath_a[5] path_sendung_dest = db.ac_config_servpath_a[9] log_message = u"Infotime und Magazin in Play_Out loeschen..." if sendung_art == "Sendung normal": path_sendung_source = db.ac_config_servpath_a[6] path_sendung_dest = db.ac_config_servpath_a[10] log_message = u"Sendungen in Play_Out loeschen..." db.write_log_to_db(ac, log_message, "p") # pfad anpassen path_sendung_source = lib_cm.check_slashes(ac, path_sendung_source) path_sendung_dest = lib_cm.check_slashes(ac, path_sendung_dest) lib_cm.message_write_to_console(ac, path_sendung_source) lib_cm.message_write_to_console(ac, path_sendung_dest) # source: files in list try: files_sendung_source = os.listdir(path_sendung_source) except Exception, e: log_message = u"read_files_from_dir Error: %s" % str(e) lib_cm.message_write_to_console(ac, log_message) db.write_log_to_db(ac, log_message, "x") return None
def erase_files_from_play_out(files_sendung_source, path_sendung_source, path_sendung_dest, dir_year, sendung_art): """Dateien in Play-Out loeschen, Durchfuehrung""" lib_cm.message_write_to_console(ac, u"erase_files_from_play_out") # Times lib_cm.message_write_to_console(ac, db.ac_config_1[3]) date_back = (datetime.datetime.now() + datetime.timedelta(days=- int(db.ac_config_1[3]))) #date_back = datetime.datetime.now() + datetime.timedelta( days=-100 ) lib_cm.message_write_to_console(ac, db.ac_config_1[4]) nr_of_files_to_archive = int(db.ac_config_1[4]) # pfad anpassen path_sendung_dest += lib_cm.check_slashes(ac, dir_year) log_message = (u"Dateien von " + dir_year + u" loeschen aus: " + path_sendung_source) db.write_log_to_db(ac, log_message, "v") c_date_back = date_back.strftime("%Y-%m-%d") db.write_log_to_db_a(ac, u"Sendedatum muss vor " + c_date_back + " liegen", "t", "write_also_to_console") try: files_sendung_dest = os.listdir(path_sendung_dest) except Exception, e: log_message = u"read_files_from_dir Error: %s" % str(e) lib_cm.message_write_to_console(ac, log_message) db.write_log_to_db(ac, log_message, "x") return
def check_mpd_log(self, time_now, log_data): """load data from mpd""" # 1. playing file ermitteln # 2. if id dann aus db holen, sonst tags # load current song mpd_result = mpd.connect(db, ac) if mpd_result is None: db.write_log_to_db_a(ac, ac.app_errorslist[6], "x", "write_also_to_console") self.display_logging("No MPD-Connect", None) return None current_song = mpd.exec_command(db, ac, "song", None) #print current_song if current_song is None: db.write_log_to_db_a(ac, ac.app_errorslist[7], "x", "write_also_to_console") mpd.disconnect() self.display_logging("Aktueller Song nicht von MPD ermittelbar", None) return None mpd.disconnect() if "id" not in current_song: self.display_logging("Aktuelle id von MPD nicht ermittelbar", None) return None # by stream, it could be, that id is eqal but title not # no several ids in stream ar a mess, we will not display this #if "file" in current_song: # log_filename = current_song["file"] #else: # log_filename = "nicht vorhanden" lib_cm.message_write_to_console(ac, current_song["id"]) #if (current_song["id"] == ac.log_songid # and log_filename == ac.log_filename): if current_song["id"] == ac.log_songid: log_meldung_1 = ("Keine Aenderung des MPD-Song-Status... \n" + ac.log_author + " - " + ac.log_title) self.display_logging(log_meldung_1, None) return None else: log_author_title = work_on_data_from_log(time_now, current_song, "mpd") ac.log_author = log_author_title[0] ac.log_title = log_author_title[1] ac.log_songid = current_song["id"] #ac.log_filename = log_filename ac.log_start = (str(time_now.date()) + " " + str(time_now.time())[0:8]) return True
def check_lenght(source_file): """calc length of news file""" lib_cm.message_write_to_console(ac, u"Laenge der News ermitteln") # use the right char-encoding for supprocesses cmd = db.ac_config_etools[3].encode(ac.app_encode_out_strings) #cmd = "soxi" lib_cm.message_write_to_console(ac, cmd) # start subprozess try: p = subprocess.Popen([cmd, u"-d", source_file], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() except Exception, e: log_message = ac.app_errorslist[4] + u": %s" % str(e) db.write_log_to_db_a(ac, log_message, "x", "write_also_to_console") return None
def copy_media_to_play_out(path_file_source, dest_file): """copy audiofile""" success_copy = None try: shutil.move(path_file_source, dest_file) db.write_log_to_db_a( ac, u"Audio Vorproduktion: " + path_file_source.encode("ascii", "ignore"), "v", "write_also_to_console" ) db.write_log_to_db_a(ac, u"Audio kopiert: " + dest_file, "c", "write_also_to_console") success_copy = True except Exception, e: db.write_log_to_db_a(ac, ac.app_errorslist[1], "x", "write_also_to_console") log_message = u"copy_files_to_dir_retry Error: %s" % str(e) lib_cm.message_write_to_console(ac, log_message) db.write_log_to_db(ac, log_message, "x")
def load_sg_first(sg_cont_nr): """search first-sg for repeat""" lib_cm.message_write_to_console(ac, u"Erstsednung zur WH suchen") db_tbl_condition = ("A.SG_HF_FIRST_SG = 'T' AND A.SG_HF_CONTENT_ID='" + str(sg_cont_nr) + "' ") sendung_data = db.read_tbl_rows_sg_cont_ad_with_cond_a(ac, db, db_tbl_condition) if sendung_data is None: log_message = u"Keine Erstsendung zu Wiederholungssendung gefunden " db.write_log_to_db(ac, log_message, "t") return sendung_data return sendung_data
def load_sg(sg_titel): """search show""" lib_cm.message_write_to_console(ac, u"Sendung suchen") db_tbl_condition = ("SUBSTRING(A.SG_HF_TIME FROM 1 FOR 10) >= '" + str(ac.time_target.date()) + "' " + "AND A.SG_HF_FIRST_SG='T' " + "AND B.SG_HF_CONT_TITEL='" + sg_titel + "' ") sendung_data = db.read_tbl_rows_sg_cont_ad_with_cond_b(ac, db, db_tbl_condition) if sendung_data is None: log_message = (u"Keine Sendung mit diesem Titel gefunden: " + sg_titel.encode('ascii', 'ignore')) db.write_log_to_db_a(ac, log_message, "t", "write_also_to_console") return sendung_data return sendung_data
def load_roboting_sgs(): """search shows""" lib_cm.message_write_to_console(ac, u"Sendungen suchen, die bearbeitet werden sollen") sendungen_data = (db.read_tbl_rows_with_cond(ac, db, "SG_HF_ROBOT", "SG_HF_ROB_TITEL, SG_HF_ROB_OUT_DROPB, SG_HF_ROB_FILE_OUT_DB, " "SG_HF_ROB_OUT_FTP, SG_HF_ROB_FILE_OUT_FTP", "SG_HF_ROB_VP_OUT ='T'")) if sendungen_data is None: log_message = u"Keine Sendungen fuer externe VP vorgesehen.. " db.write_log_to_db_a(ac, log_message, "t", "write_also_to_console") return sendungen_data return sendungen_data
def extract_time(website, match_string_2, charackters_forwards): """extract uptime from area""" index_a_ident = string.find(website, match_string_2) if index_a_ident == -1: t_uptime_current = None return t_uptime_current index_a_begin = index_a_ident + int(charackters_forwards) index_a_end = index_a_begin + 20 # website_uptime ermitteln fuer dateiname oder db_satz_id c_uptime_current = website[index_a_begin:index_a_end] lib_cm.message_write_to_console(ac, c_uptime_current) t_uptime_current = time.strptime(c_uptime_current, "%d %b %Y %H:%M:%S") return t_uptime_current
def check_file_dest_ftp(path_ftp, filename_dest): """check if file exist in destination-ftp""" lib_cm.message_write_to_console(ac, "check_files_online_ftp") file_online = False ftp = ftp_connect_and_dir(path_ftp) if ftp is None: return files_online = [] try: files_online = ftp.nlst() except ftplib.error_perm, resp: if str(resp) == "550 No files found": lib_cm.message_write_to_console(ac, u"ftp: no files in this directory") else: log_message = (ac.app_errorslist[9]) db.write_log_to_db_a(ac, log_message, "x", "write_also_to_console")
def copy_to_cloud(path_file_source, path_file_dest): """copy audiofile and infofile""" success_copy = None try: shutil.copy(path_file_source, path_file_dest) db.write_log_to_db_a(ac, u"Audio Vorproduktion: " + path_file_source.encode('ascii', 'ignore'), "v", "write_also_to_console") db.write_log_to_db_a(ac, u"Audio kopiert nach: " + path_file_dest, "c", "write_also_to_console") success_copy = True except Exception, e: db.write_log_to_db_a(ac, ac.app_errorslist[1], "x", "write_also_to_console") log_message = u"copy_files_to_dir_retry Error: %s" % str(e) lib_cm.message_write_to_console(ac, log_message) db.write_log_to_db(ac, log_message, "x")
def erase_files_from_ftp(c_date_back): """erase files from ftp""" lib_cm.message_write_to_console(ac, "erase files from ftp") path_ftp = lib_cm.check_slashes(ac, db.ac_config_1[6]) ftp = ftp_connect_and_dir(path_ftp) if ftp is None: return files_online = [] try: files_online = ftp.nlst() except ftplib.error_perm, resp: if str(resp) == "550 No files found": lib_cm.message_write_to_console( ac, u"ftp: no files in this directory") else: log_message = (ac.app_errorslist[9]) db.write_log_to_db_a(ac, log_message, "x", "write_also_to_console") return None
def delete_files_online_ftp(): """delete old files at Webspace""" lib_cm.message_write_to_console(ac, u"delete_files_online ftp") ftp = ftp_connect_and_dir() if ftp is None: return None # read online-files files_online = [] try: files_online = ftp.nlst() except ftplib.error_perm, resp: if str(resp) == "550 No files found": lib_cm.message_write_to_console(ac, u"ftp: no files in this directory") else: log_message = (ac.app_errorslist[8] + " - " + str(resp)) db.write_log_to_db_a(ac, log_message, "x", "write_also_to_console")
def sftp_connect(): """connect to sftp, login""" try: # Open a transport transport = paramiko.Transport( (db.ac_config_1[5], int(db.ac_config_1[6]))) # Auth transport.connect( username=db.ac_config_1[7], password=db.ac_config_1[8]) # Go! sftp = paramiko.SFTPClient.from_transport(transport) return sftp, transport except Exception as e: lib_cm.message_write_to_console(ac, e) db.write_log_to_db_a(ac, ac.app_errorslist[1], "x", "write_also_to_console") return None return None
def check_live_sendungen(sg_id): """ check if show is live """ lib_cm.message_write_to_console(ac, "check_live_sendungen") # zfill. filling with 0 if hour under 10 db_tbl_condition = ("A.SG_HF_ID= '" + sg_id + "' AND A.SG_HF_LIVE='T'") sendung_data = db.read_tbl_rows_sg_cont_ad_with_cond_a( ac, db, db_tbl_condition) if sendung_data is None: log_message = (u"Keine Live-Sendungen: " + sg_id) else: log_message = (u"Live-Sendungen: " + sg_id) db.write_log_to_db_a(ac, log_message, "t", "write_also_to_console") return sendung_data
def copy_to_cloud(path_file_source, path_file_dest): """copy audiofile and infofile""" success_copy = None try: shutil.copy(path_file_source, path_file_dest) db.write_log_to_db_a( ac, u"Audio Vorproduktion: " + path_file_source.encode('ascii', 'ignore'), "v", "write_also_to_console") db.write_log_to_db_a(ac, u"Audio kopiert nach: " + path_file_dest, "c", "write_also_to_console") success_copy = True except Exception, e: db.write_log_to_db_a(ac, ac.app_errorslist[1], "x", "write_also_to_console") log_message = u"copy_files_to_dir_retry Error: %s" % str(e) lib_cm.message_write_to_console(ac, log_message) db.write_log_to_db(ac, log_message, "x")
def erase_files_from_cloud(c_date_back): """erase files from cloud""" db.write_log_to_db_a(ac, u"Veraltete Dateien in Cloud loeschen", "p", "write_also_to_console") # Paths path_sendung_dest_base = lib_cm.check_slashes(ac, db.ac_config_servpath_b[5]) path_sendung_dest = (path_sendung_dest_base + lib_cm.check_slashes(ac, db.ac_config_1[5])) lib_cm.message_write_to_console(ac, path_sendung_dest) try: files_sendung_dest = os.listdir(path_sendung_dest) except Exception, e: log_message = ac.app_errorslist[3] + u": %s" % str(e) lib_cm.message_write_to_console(ac, log_message) db.write_log_to_db(ac, log_message, "x") return
def calk_date_month(option): """calc diff between weekdays for upcomming month""" # thanx to # http://code.activestate.com/recipes/425607-findng-the-xth-day-in-a-month/ lib_cm.message_write_to_console(ac, ac.time_target_start.date()) n_month = ac.time_target_start.month + 1 if n_month == 12: n_year = ac.time_target_start.year + 1 else: n_year = ac.time_target_start.year bom, days = monthrange(n_year, n_month) firstmatch = (int(option[1]) - bom) % 7 + 1 try: n_day = xrange(firstmatch, days + 1, 7)[int(option[2])] except Exception, e: lib_cm.message_write_to_console(ac, "Fehler: %s" % str(e)) return None
def write_sg_main(l_data_sg_main): """register main-sg""" lib_cm.message_write_to_console(ac, u"Sendung eintragen") sql_command_sg_main = ( "INSERT INTO SG_HF_MAIN( SG_HF_ID, " "SG_HF_CONTENT_ID, SG_HF_TIME, " "SG_HF_DURATION, SG_HF_INFOTIME, SG_HF_MAGAZINE, " "SG_HF_PODCAST, SG_HF_ON_AIR, " "SG_HF_SOURCE_ID, SG_HF_REPEAT_PROTO, SG_HF_FIRST_SG)" " values ('" + str(l_data_sg_main[0]) + "', '" + str(l_data_sg_main[1]) + "', '" + str(l_data_sg_main[2]) + "', '" + l_data_sg_main[3] + "', '" + l_data_sg_main[4] + "', '" + l_data_sg_main[5] + "', '" + l_data_sg_main[6] + "', '" + l_data_sg_main[7] + "', '" + l_data_sg_main[8] + "', '" + l_data_sg_main[9] + "', '" + l_data_sg_main[10] + "')") lib_cm.message_write_to_console(ac, sql_command_sg_main) db_op_success_main = db.exec_sql(ac, db, sql_command_sg_main) return db_op_success_main
def ftp_connect_and_dir(path_ftp): """connect to ftp, login and change dir""" try: ftp = ftplib.FTP(db.ac_config_1[7]) except (socket.error, socket.gaierror): lib_cm.message_write_to_console(ac, u"ftp: no connect to: " + db.ac_config_1[7]) db.write_log_to_db_a(ac, ac.app_errorslist[6], "x", "write_also_to_console") return None try: ftp.login(db.ac_config_1[8], db.ac_config_1[9]) except ftplib.error_perm, resp: lib_cm.message_write_to_console(ac, "ftp: no login to: " + db.ac_config_1[7]) log_message = (ac.app_errorslist[7] + " - " + db.ac_config_1[7]) db.write_log_to_db_a(ac, log_message, "x", "write_also_to_console") return None
def load_prev_sendungen(): """search for shows from upcomming hour""" lib_cm.message_write_to_console(ac, "load_prev_sendungen") # zfill for adding zeros by one digit c_date_time = (str(ac.time_target.date()) + " " + str(ac.time_target.hour).zfill(2)) #db_tbl_condition = ("A.SG_HF_ON_AIR = 'T' AND " db_tbl_condition = ("SUBSTRING(A.SG_HF_TIME FROM 1 FOR 13) >= '" + c_date_time + "' " "AND A.SG_HF_INFOTIME='F' AND A.SG_HF_MAGAZINE='F'") sendung_data = db.read_tbl_rows_sg_cont_ad_with_cond_a( ac, db, db_tbl_condition) if sendung_data is None: log_message = u"Keine Sendungen für: " + str(ac.time_target.date()) db.write_log_to_db(ac, log_message, "t") return sendung_data return sendung_data
def fetch_media_ftp(dest_file): """mp3-File von Server holen""" lib_cm.message_write_to_console(ac, u"mp3-File von Server holen") # all cmds must be in the right charset cmd = db.ac_config_etools[1].encode(ac.app_encode_out_strings) #cmd = "wget" url_source_file = db.ac_config_1[7].encode(ac.app_encode_out_strings) url_user = "******" + db.ac_config_1[8].encode(ac.app_encode_out_strings) url_pw = "--password="******": %s" % str(e) db.write_log_to_db_a(ac, log_message, "x", "write_also_to_console") return
def write_sg_cont(l_data_sg_content): """register content-sg""" sql_command_sg_cont = ( "INSERT INTO SG_HF_CONTENT( SG_HF_CONT_ID, " "SG_HF_CONT_SG_ID, " "SG_HF_CONT_AD_ID, SG_HF_CONT_TITEL, " "SG_HF_CONT_FILENAME, SG_HF_CONT_STICHWORTE, " "SG_HF_CONT_GENRE_ID, SG_HF_CONT_SPEECH_ID, SG_HF_CONT_TEAMPRODUCTION, " "SG_HF_CONT_UNTERTITEL, SG_HF_CONT_REGIEANWEISUNG, SG_HF_CONT_WEB )" " values ('" + str(l_data_sg_content[0]) + "', '" + str(l_data_sg_content[1]) + "', '" + str(l_data_sg_content[2]) + "', '" + l_data_sg_content[3] + "', '" + l_data_sg_content[4] + "', '" + l_data_sg_content[5] + "', '" + l_data_sg_content[6] + "', '" + l_data_sg_content[7] + "', '" + l_data_sg_content[8] + "', '" + l_data_sg_content[9] + "', '" + l_data_sg_content[10] + "', '" + l_data_sg_content[11] + "')") lib_cm.message_write_to_console(ac, sql_command_sg_cont) db_op_success_cont = db.exec_sql(ac, db, sql_command_sg_cont) return db_op_success_cont
def filepaths(d_pattern, l_path_title, item, sendung): """concatenate paths and filenames""" success_file = True try: # Verschiebung von Datum Erstsendung new_date = sendung[2] + datetime.timedelta(days=-item[5]) lib_cm.message_write_to_console(ac, new_date.strftime(d_pattern)) if item[1].strip() == "T": # from dropbox path_source = lib_cm.check_slashes(ac, db.ac_config_servpath_b[5]) path_file_source = (path_source + l_path_title[0] #+ sendung[0][2].strftime('%Y_%m_%d') + l_path_title[1].rstrip()) + new_date.strftime(d_pattern) + l_path_title[1].rstrip()) if item[3].strip() == "T": # from ftp #url_base = db.ac_config_1[3].encode(ac.app_encode_out_strings) #url_source_file = db.ac_config_1[7].encode(ac.app_encode_out_strings) path_source = lib_cm.check_slashes(ac, db.ac_config_1[3]) path_file_source = (path_source + l_path_title[0] #+ sendung[0][2].strftime('%Y_%m_%d') + l_path_title[1].rstrip()) + new_date.strftime(d_pattern) + l_path_title[1].rstrip()) # it or mag else sendung if sendung[4].strip() == "T" or sendung[5].strip() == "T": path_dest = lib_cm.check_slashes(ac, db.ac_config_servpath_a[1]) else: path_dest = lib_cm.check_slashes(ac, db.ac_config_servpath_a[2]) # replace special char # replace_uchar_sonderzeichen_with_latein path_file_dest = (path_dest + str(sendung[8]) + "_" + lib_cm.replace_sonderzeichen_with_latein(sendung[16]) + "_" + lib_cm.replace_sonderzeichen_with_latein(sendung[13]) #+ lib_cm.replace_uchar_sonderzeichen_with_latein(sendung[0][13]) + ".mp3") except Exception, e: log_message = (ac.app_errorslist[5] + "fuer: " + sendung[11].encode('ascii', 'ignore') + " " + str(e)) db.write_log_to_db_a(ac, log_message, "x", "write_also_to_console") success_file = None
def load_sg(sg_titel, dub_way): """search first-sg for template""" lib_cm.message_write_to_console(ac, u"Sendung als Vorlage suchen") db_tbl_condition = ("A.SG_HF_FIRST_SG ='T' " "AND SUBSTRING(A.SG_HF_TIME FROM 1 FOR 19) >= '" + ac.time_target_start.strftime("%Y-%m-%d %T") + "' " "AND SUBSTRING(A.SG_HF_TIME FROM 1 FOR 19) <= '" + ac.time_target_end.strftime("%Y-%m-%d %T") + "' " "AND B.SG_HF_CONT_TITEL='" + sg_titel + "' ") sendung_data = db.read_tbl_rows_sg_cont_ad_with_cond_c( ac, db, db_tbl_condition) if sendung_data is None: log_message = (u"Keine Erst-Sendung mit diesem Titel " "in diesem Zeitraum gefunden: " + sg_titel + " - " + ac.time_target_start.strftime("%Y-%m-%d %T") + " bis " + ac.time_target_end.strftime("%Y-%m-%d %T")) db.write_log_to_db_a(ac, log_message, "e", "write_also_to_console") return sendung_data return sendung_data
def erase_files_from_play_out_old_year(files_sendung_source, path_sendung_source, path_sendung_dest, dir_year, sendung_art): """Dateien vorangegangener Jahre in Play-Out loeschen, Durchfuehrung""" lib_cm.message_write_to_console(ac, u"erase_files_from_play_out_old_year") # Zeiten lib_cm.message_write_to_console(ac, db.ac_config_1[3]) days_back = int(db.ac_config_1[3]) date_back = (datetime.datetime.now() + datetime.timedelta(days=-days_back)) #lib_cm.message_write_to_console(ac, db.ac_config_1[4]) nr_of_files_to_archive = int(db.ac_config_1[4]) # pfad anpassen path_sendung_dest += lib_cm.check_slashes(ac, dir_year) log_message = ("Dateien von " + dir_year + " loeschen, die in Folgejahren erneut gesendet wurden: " + path_sendung_source) db.write_log_to_db(ac, log_message, "v") c_date_back = date_back.strftime("%Y-%m-%d") db.write_log_to_db_a( ac, u"Wiederholungs-Sendedatum muss vor " + c_date_back + " liegen", "t", "write_also_to_console") try: files_sendung_dest = os.listdir(path_sendung_dest) except Exception, e: log_message = u"read_files_from_dir Error: %s" % str(e) lib_cm.message_write_to_console(ac, log_message) db.write_log_to_db(ac, log_message, "x") return
def load_live_sendungen(up_to_target_hour, live): """ search for live-shows for the upcomming hour """ lib_cm.message_write_to_console(ac, "load_live_sendungen") # zfill. filling with 0 if hour under 10 c_date_time_from = (str(ac.time_target.date()) + " " + str(ac.time_target.hour).zfill(2)) c_date_time_to = (str(ac.time_target.date()) + " " + str(ac.time_target.hour + up_to_target_hour).zfill(2)) if live is True: db_tbl_condition = ( "A.SG_HF_ON_AIR = 'T' " "AND SUBSTRING(A.SG_HF_TIME FROM 1 FOR 13) >= '" + c_date_time_from + "' " "AND SUBSTRING(A.SG_HF_TIME FROM 1 FOR 13) <= '" + c_date_time_to + "' " "AND A.SG_HF_INFOTIME='F' AND A.SG_HF_MAGAZINE='F' " "AND A.SG_HF_LIVE='T'") else: db_tbl_condition = ( "A.SG_HF_ON_AIR = 'T' " "AND SUBSTRING(A.SG_HF_TIME FROM 1 FOR 13) >= '" + c_date_time_from + "' " "AND SUBSTRING(A.SG_HF_TIME FROM 1 FOR 13) <= '" + c_date_time_to + "' " "AND A.SG_HF_INFOTIME='F' AND A.SG_HF_MAGAZINE='F'") sendung_data = db.read_tbl_rows_sg_cont_ad_with_cond_a( ac, db, db_tbl_condition) if sendung_data is None: log_message = (u"Keine Live-Sendungen fuer: " + c_date_time_from + u" bis " + c_date_time_to) else: log_message = (u"Live-Sendungen vorhanden von: " + c_date_time_from + u" bis " + c_date_time_to + " Uhr") db.write_log_to_db_a(ac, log_message, "t", "write_also_to_console") return sendung_data
def check_listeners(listeners_number, listeners_option, sql_time): """compare Peaklisteners with last Registration""" # anzahl der letzten registrierung reg_listeners = db.read_tbl_row_with_cond( ac, db, "ST_WEB_STREAM_LISTENERS", "FIRST 1 ST_WEB_STREAM_LIS_ID, ST_WEB_STREAM_LIS_NUMBER ", "ST_WEB_STREAM_LIS_OPTION = '" + listeners_option + "' ORDER BY ST_WEB_STREAM_LIS_ID DESC") #"SELECT FIRST 1 ST_WEB_STREAM_LIS_NUMBER FROM ST_WEB_STREAM_LISTENERS " #"ORDER BY ST_WEB_STREAM_LIS_ID DESC") if reg_listeners is None: sql_command = ("INSERT INTO ST_WEB_STREAM_LISTENERS" "(ST_WEB_STREAM_LIS_NUMBER, ST_WEB_STREAM_LIS_OPTION)" " values ('" + listeners_number + "', '" + listeners_option + "')") lib_cm.message_write_to_console(ac, "D-satz neu: " + listeners_number) log_message = (u"Statistik Webstream-Hoerer " "nach neuer Verbindung aktualisiert. Anzahl: " + listeners_number) write_listeners_to_db_1(sql_command, log_message) return if int(listeners_number) < int(reg_listeners[1]): # neuen satz einfuegen sql_command = ("INSERT INTO ST_WEB_STREAM_LISTENERS" "(ST_WEB_STREAM_LIS_NUMBER, ST_WEB_STREAM_LIS_OPTION)" " values ('" + listeners_number + "', '" + listeners_option + "')") lib_cm.message_write_to_console(ac, "D-satz neu: " + listeners_number) if listeners_option == "C": log_message = (u"Webstream-Hoerer aktuell " "aktualisiert. Anzahl: " + listeners_number) else: log_message = (u"Webstream-Hoerer max " "neu registriert. Anzahl: " + listeners_number) write_listeners_to_db_1(sql_command, log_message) elif int(listeners_number) > int(reg_listeners[1]): # updaten sql_command = ("UPDATE ST_WEB_STREAM_LISTENERS " "SET ST_WEB_STREAM_LIS_NUMBER = '" + listeners_number + "' where ST_WEB_STREAM_LIS_ID = " + str(reg_listeners[0])) lib_cm.message_write_to_console( ac, "Datensatz aktualisiert: " + listeners_number) if listeners_option == "C": log_message = (u"Webstream-Hoerer aktuell " "aktualisiert. Anzahl: " + listeners_number) else: log_message = (u"Webstream-Hoerer max " "aktualisiert. Anzahl: " + listeners_number) write_listeners_to_db_1(sql_command, log_message) elif int(listeners_number) == int(reg_listeners[1]): # nothing to do log_message = (u"Webstream scheint zu laufen, " "keine Aenderung der " + listeners_option + " Hoererzahl") db.write_log_to_db(ac, log_message, "p") return
def load_podcast(): """check for Podcasts in db""" lib_cm.message_write_to_console(ac, u"load_podcast") db_tbl_condition = ("A.SG_HF_ON_AIR = 'T' AND " + "SUBSTRING(A.SG_HF_TIME FROM 1 FOR 10) = '" + str(ac.time_target.date()) + "' " + "AND A.SG_HF_PODCAST='T' ") # ORDER BY A.SG_HF_TIME is added in read_tbl_rows_sg_cont_ad_with_cond_a sendung_data = db.read_tbl_rows_sg_cont_ad_with_cond_a(ac, db, db_tbl_condition) if sendung_data is None: log_message = (u"Keine Podcast-Sendungen für: " + str(ac.time_target.date())) db.write_log_to_db(ac, log_message, "t") return sendung_data log_message = (u"Podcast-Sendungen vorhanden für: " + str(ac.time_target.date())) db.write_log_to_db(ac, log_message, "t") return sendung_data
def lets_rock(): """Main funktion """ print "lets_rock " # Read acive user in table adress user_active_number = db.count_rows(ac, db, "AD_MAIN", "AD_USER_OK_AKTIV='T'") if user_active_number is None: # Error 000 Fehler beim Ermitteln der aktiven Macher db.write_log_to_db_a(ac, ac.app_errorslist[0], "x", "write_also_to_console") return log_message = "Aktive Macher: " + str(user_active_number) db.write_log_to_db(ac, log_message, "t") lib_cm.message_write_to_console(ac, log_message) # Save Number sql_command = ( "INSERT INTO ST_USER_OK_ACTIVE ( ST_USER_OK_ACTIVE_NUMBER ) " "VALUES ( '" + str(user_active_number) + "')") db_ok = db.exec_sql(ac, db, sql_command) if db_ok is None: # Error 001 Fehler beim Registireren der Aktiven Macher in der Datenbank err_message = (ac.app_desc + " " + ac.app_errorslist[1] + " " + user_active_number) db.write_log_to_db_a(ac, err_message, "x", "write_also_to_console") return # reset user_active sql_command = "UPDATE AD_MAIN SET AD_USER_OK_AKTIV='F' " db_ok_1 = db.exec_sql(ac, db, sql_command) if db_ok_1 is None: # Error 002 Fehler beim Zuruecksetzen # der Aktiven Macher in der Datenbank db.write_log_to_db_a(ac, ac.app_errorslist[2], "x", "write_also_to_console") return return
def load_sg(): """search news in db""" lib_cm.message_write_to_console(ac, "search news in db") db_tbl_condition = ("A.SG_HF_ON_AIR = 'T' " "AND SUBSTRING(A.SG_HF_TIME FROM 1 FOR 16) = '" + ac.time_target_start.strftime("%Y-%m-%d %H") + ":" + db.ac_config_1[3].strip() + "' " "AND A.SG_HF_INFOTIME = 'T' " "AND B.SG_HF_CONT_TITEL = '" + db.ac_config_1[2].strip() + "'") sendung_data = db.read_tbl_rows_sg_cont_ad_with_cond_b( ac, db, db_tbl_condition) if sendung_data is None: log_message = (u"Keine externen News " "fuer diese Zeit vorgesehen: " + ac.time_target_start.strftime("%Y-%m-%d %H") + ":" + db.ac_config_1[3].strip() + " Uhr") db.write_log_to_db_a(ac, log_message, "t", "write_also_to_console") return sendung_data return sendung_data
def lets_rock(): """main function """ print "lets_rock " # extendet params load_extended_params_ok = load_extended_params() if load_extended_params_ok is None: return lib_cm.message_write_to_console(ac, u"lets_rock check_and_work_on_files") path_source = lib_cm.check_slashes(ac, db.ac_config_servpath_b[3]) path_dest = lib_cm.check_slashes(ac, db.ac_config_servpath_b[4]) #lib_cm.message_write_to_console(ac, path_source) #lib_cm.message_write_to_console(ac, path_dest) # read mp3gain-folder try: files_source = os.listdir(path_source) except Exception, e: log_message = u"read_files_from_dir Error: %s" % str(e) lib_cm.message_write_to_console(ac, log_message + path_source) db.write_log_to_db(ac, log_message, "x") return None
def fetch_media_ftp(dest_file, url_source_file): """mp3-File von Server holen""" lib_cm.message_write_to_console(ac, u"mp3-File von Server holen") # all cmds must be in the right charset cmd = db.ac_config_etools[1].encode(ac.app_encode_out_strings) #cmd = "wget" #url_base = db.ac_config_1[3].encode(ac.app_encode_out_strings) if url_source_file[0:7] == "http://": # downlaod via http must become another wget-syntax url_user = ("--user="******"--password="******": %s" % str(e) db.write_log_to_db_a(ac, log_message, "x", "write_also_to_console") return
def add_id3(sendung_data): """write id3-tag in mp3-file""" lib_cm.message_write_to_console(ac, u"id3-Tag in mp3-File schreiben") # use the right char-encoding for supprocesses cmd = db.ac_config_etools[4].encode(ac.app_encode_out_strings) #cmd = "id3v2" #dest_path = lib_cm.check_slashes(ac, db.ac_config_1[11]) dest_path = lib_cm.check_slashes(ac, db.ac_config_servpath_a[1]) dest_path_file = dest_path + sendung_data[12] c_author = (sendung_data[15].encode(ac.app_encode_out_strings) + " " + sendung_data[16].encode(ac.app_encode_out_strings)) c_title = sendung_data[11].encode(ac.app_encode_out_strings) lib_cm.message_write_to_console(ac, cmd) # start subprocess try: p = subprocess.Popen( [cmd, u"-a", c_author, u"-t", c_title, dest_path_file], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() except Exception, e: log_message = ac.app_errorslist[8] + u": %s" % str(e) db.write_log_to_db_a(ac, log_message, "x", "write_also_to_console") return None
def ftp_upload(path_f_source, path_ftp, filename_dest): """upload file""" success_upload = False lib_cm.message_write_to_console(ac, u"upload_file") ftp = ftp_connect_and_dir(path_ftp) if ftp is None: return if os.path.isfile(path_f_source): if ac.app_windows == "yes": f = open(path_f_source, "rb") else: f = open(path_f_source, "r") else: return success_upload try: c_ftp_cmd = "STOR " + filename_dest ftp.storbinary(c_ftp_cmd, f) except ftplib.error_perm, resp: log_message = (ac.app_errorslist[9] + " - " + db.ac_config_1[7]) db.write_log_to_db_a(ac, log_message, "x", "write_also_to_console") f.close() return success_upload
def mix_bed(): """adding soundbed""" lib_cm.message_write_to_console(ac, u"Soundbed drunter legen") # use the right char-encoding for supprocesses cmd = db.ac_config_etools[2].encode(ac.app_encode_out_strings) #cmd = "sox" lib_cm.message_write_to_console(ac, cmd) #news_file = lib_cm.extract_filename(ac, # db.ac_config_1[6]).replace(".mp3", "_comp.wav") news_file = ac.app_file_orig_temp + "_comp.wav" news_file_temp = news_file.replace("_comp.wav", "_temp.wav") # start subprocess #silence 1 0.1 1% reverse silence 1 0.1 1% reverse try: p = subprocess.Popen([ cmd, u"-S", u"-m", ac.app_file_bed_trim, news_file, news_file_temp ], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() except Exception, e: log_message = ac.app_errorslist[6] + u": %s" % str(e) db.write_log_to_db_a(ac, log_message, "x", "write_also_to_console") return
def search_sg(sg_titel, t_sg_time): """check if show already exist""" lib_cm.message_write_to_console( ac, u"Pruefen ob geplante Sendebuchung schon vorhanden") db_tbl_condition = ("A.SG_HF_FIRST_SG ='T' AND " "SUBSTRING(A.SG_HF_TIME FROM 1 FOR 19) = '" + str(t_sg_time) + "' " + "AND B.SG_HF_CONT_TITEL='" + sg_titel + "' ") sendung_data = db.read_tbl_rows_sg_cont_ad_with_cond_a( ac, db, db_tbl_condition) if sendung_data is None: log_message = (u"Noch keine Sendung mit diesem Titel " "zu dieser Zeit gefunden: " + sg_titel.encode('ascii', 'ignore') + " " + str(t_sg_time)) else: log_message = (u"Sendung bereits gebucht mit diesem Titel " "zu dieser Zeit: " + sg_titel.encode('ascii', 'ignore') + " " + str(t_sg_time)) db.write_log_to_db_a(ac, log_message, "t", "write_also_to_console") return sendung_data
def trim_bed(c_lenght): """trim soundbed to length of news""" lib_cm.message_write_to_console(ac, u"Soundbed auf News trimmen") # use the right char-encoding for supprocesses cmd = db.ac_config_etools[2].encode(ac.app_encode_out_strings) #cmd = "sox" lib_cm.message_write_to_console(ac, c_lenght) source_path = ac.app_homepath + lib_cm.check_slashes( ac, db.ac_config_1[10]) source_file = source_path + ac.app_file_bed dest_file = ac.app_file_bed_trim # start subprocess #silence 1 0.1 1% reverse silence 1 0.1 1% reverse l = c_lenght[0:7] try: p = subprocess.Popen( [cmd, u"-S", source_file, dest_file, u"trim", u"0", l], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() except Exception, e: log_message = ac.app_errorslist[5] + u": %s" % str(e) db.write_log_to_db_a(ac, log_message, "x", "write_also_to_console") return None
def log_duration(list_live_sendungen): """Log recordings and calc length""" duration = 0 title = list_live_sendungen[0][11] for item in list_live_sendungen: if item[11] == title: live = check_live_sendungen(str(list_live_sendungen[0][0])) # sum duration if live is None: # it's not live, so we must go away here break duration += lib_cm.get_seconds_from_tstr(item[3]) lib_cm.message_write_to_console(ac, duration) # logging db.write_log_to_db( ac, u"Mitschnitt vorbereiten: " + str(item[2]) + u" - " + lib_cm.replace_uchar_sonderzeichen_with_latein(item[15]) + u" - " + lib_cm.replace_uchar_sonderzeichen_with_latein(item[11]), "t") db.write_log_to_db(ac, u"Live-Sendung: " + str(item[0]), "t") else: break return duration
def load_errors(c_time_back): """ load errors from Log """ twitter_errors = None db_tbl = "USER_LOGS A " db_tbl_fields = ("A.USER_LOG_ID, A.USER_LOG_TIME, A.USER_LOG_ACTION, " "A.USER_LOG_ICON, A.USER_LOG_MODUL_ID ") db_tbl_condition = ("SUBSTRING( A.USER_LOG_ICON FROM 1 FOR 1 ) = 'x' AND " "SUBSTRING( A.USER_LOG_TIME FROM 1 FOR 19) >= '" + c_time_back + "' AND A.USER_LOG_ID > " + str(ac.log_id) + " ORDER BY A.USER_LOG_ID") log_data = db.read_tbl_rows_with_cond_log(ac, db, db_tbl, db_tbl_fields, db_tbl_condition) if log_data is None: return # loop through data item_log_last = "" for row in log_data: if (row[2][33:94] == "-message-: -Not authorized to use this endpoint." "-, -code-: 37"): # ignore lib_cm.message_write_to_console(ac, "Twitter-Error-Code 37: ") continue tweetet_log = load_tweet_logs(row[0]) if tweetet_log is not None: lib_cm.message_write_to_console(ac, "schon getwittert: " + row[2]) continue item_log_current = row[2] + " - " + row[1].strftime( "%Y-%m-%d %H:%M:%S") # don't bomb twitter item_log_current_a = row[2] if item_log_last == item_log_current_a: # duplicate continue if (row[2] == "001 Fehler beim Twittern " "User is over daily status update limit."): twitter_errors = "yes" return twitter_errors if (row[2] == "001 Fehler beim Twittern " "Failed to send request: [Errno -2] Name or service not known" ): twitter_errors = "yes" return twitter_errors if (row[2] == "Vorhergehende Twitter-Fehler,"): twitter_errors = "yes" return twitter_errors lib_cm.message_write_to_console(ac, item_log_current) twitter_errors = tweet_message(item_log_current) item_log_last = row[2] return twitter_errors
def erase_files_from_cloud(path_dest_cloud, files_sendung_dest, c_date_back): """erase files from dropbox""" lib_cm.message_write_to_console(ac, u"erase_files_from_cloud") x = 0 z = 0 for item in files_sendung_dest: if item == ".dropbox": # don't delete dropboxfile continue if item[0:10] < c_date_back: try: file_to_delete = path_dest_cloud + item os.remove(file_to_delete) log_message = u"geloescht: " + item db.write_log_to_db(ac, log_message, "e") z += 1 except Exception, e: log_message = ac.app_errorslist[4] + u": %s" % str(e) lib_cm.message_write_to_console(ac, log_message) db.write_log_to_db(ac, log_message, "x") x += 1