def get_sdr_data(sdr_name): """extract SDR info""" MY_LOGGER.debug('get sdr data for %s', sdr_name) antenna = '???' chipset = '???' sdr_type = '???' centre_frequency = '???' frequency_range = '???' modules = '???' sdr_active = '???' serial_number = '???' bias_t = '???' sdr_data = wxcutils.load_json(CONFIG_PATH, 'sdr.json') for row in sdr_data['sdr']: MY_LOGGER.debug('L1 = %s', row) if row['name'] == sdr_name: antenna = row['antenna'] chipset = row['chipset'] sdr_type = row['sdr type'] centre_frequency = row['centre frequency'] frequency_range = row['frequency range'] modules = row['modules'] sdr_active = row['sdr active'] serial_number = row['serial number'] bias_t = row['bias t'] MY_LOGGER.debug('%s %s %s %s %s %s %s %s %s', antenna, chipset, sdr_type, centre_frequency, frequency_range, modules, sdr_active, serial_number, bias_t) return antenna, chipset, sdr_type, centre_frequency, frequency_range, modules, \ sdr_active, serial_number, bias_t
def webhooks(w_config_path, w_config_file, w_site_config_file, w_imagesfile, w_satellite, w_location, w_colour, w_elevation, w_duration, w_pass_start, w_channel_a, w_channel_b, w_description): """send data to webhooks as configured""" MY_UTIL_LOGGER.debug('webhooks called with %s %s %s %s %s %s %s %s %s %s %s %s %s', w_config_path, w_config_file, w_site_config_file, w_imagesfile, w_satellite, w_location, w_colour, w_elevation, w_duration, w_pass_start, w_channel_a, w_channel_b, w_description) # convert w_colour from hex string to an int w_colour = int(w_colour, 16) w_config = wxcutils.load_json(w_config_path, w_config_file) w_site_config = wxcutils.load_json(w_config_path, w_site_config_file) MY_UTIL_LOGGER.debug('Iterate through webhooks') for w_row in w_config['webhooks']: MY_UTIL_LOGGER.debug('webhook last 3 chars = %s', w_row[len(w_row) - 3:]) w_webhook = DiscordWebhook(url=w_row) # create embed object for webhook w_embed = DiscordEmbed(title=w_satellite, description=w_location, color=w_colour) # set image w_embed.set_image(url=w_imagesfile) # set footer w_embed.set_footer(text=w_config['footer'].replace('[SITE]', w_site_config['website'])) # add fields to embed w_embed.add_embed_field(name='Satellite', value=':satellite_orbital:' + w_satellite) w_embed.add_embed_field(name='Max Elevation', value=(w_elevation + '°')) w_embed.add_embed_field(name='Duration', value=(w_duration + ' seconds')) w_embed.add_embed_field(name='Pass start', value=w_pass_start) if w_channel_a != '': w_embed.add_embed_field(name='Channel A', value=w_channel_a) if w_channel_b != '': w_embed.add_embed_field(name='Channel B', value=w_channel_b) if w_description != '': w_embed.add_embed_field(name='Pass Description', value=w_description) # add embed object to webhook w_webhook.add_embed(w_embed) w_response = w_webhook.execute() MY_UTIL_LOGGER.debug('response = %s', w_response)
def send_email(se_config_file, se_subject): """send the email""" se_ok_status = True MY_LOGGER.debug('Using config file %s', se_config_file) # load email config email_info = wxcutils.load_json(CONFIG_PATH, se_config_file) # don't log ALL the email config, it includes a password # setup the message message = MIMEMultipart('alternative') message['Subject'] = se_subject message['From'] = email_info['from'] message['To'] = email_info['notify'] MY_LOGGER.debug('Sending (header) to = %s', email_info['notify']) MY_LOGGER.debug('Sending (deliver) to:') for email_address in email_info['notify'].split(','): MY_LOGGER.debug('EMAIL TO -----> %s', email_address) # plain text se_text = se_subject + ALERT_INFO + NEWLINE MY_LOGGER.debug('se_text = %s', se_text) # html text se_html = '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">' + \ '<html><head>' + \ '<title>' + se_subject + '</title></head>' + NEWLINE + \ '<body><h2>' + se_subject + ' - ' + ALERT_INFO + '</h2>' + NEWLINE + \ '</body></html>' MY_LOGGER.debug('se_html = %s', se_html) # build email message.attach(MIMEText(se_text, "plain")) message.attach(MIMEText(se_html, "html")) # send email try: MY_LOGGER.debug('Trying to send email') context = ssl.create_default_context() with smtplib.SMTP(email_info['smtp server'], email_info['smtp server port']) as server: server.ehlo() server.starttls(context=context) server.ehlo() server.login(email_info['username'], email_info['password']) server.sendmail(email_info['from'], email_info['notify'].split(','), message.as_string()) MY_LOGGER.debug('Email sent') except: se_ok_status = False MY_LOGGER.error('Email sending error - %s %s %s', sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]) return se_ok_status
def sat_validation(): """validate satellite info""" sat_servers = wxcutils.load_json(CONFIG_PATH, 'sat-servers.json') result = [] for sat_server in sat_servers: MY_LOGGER.debug('Processing %s', sat_server) address = 'tcp://' + sat_server['ip'] + ':' + sat_server['port'] sub0 = Sub0(dial=address, recv_timeout=100, topics="") # make sure everyone is connected time.sleep(0.1) retry_counter = 1 op = 'unset' while retry_counter <= 10: try: op = json.loads(sub0.recv().decode("utf-8")) result.append({ 'timestamp': op['timestamp'], 'skipped_symbols': op['skipped_symbols'], 'viterbi_errors': op['viterbi_errors'], 'reed_solomon_errors': op['reed_solomon_errors'], 'ok': 'Locked' if op['ok'] else 'Unlocked', 'label': sat_server['label'], 'when': str(time.time()) }) break except: MY_LOGGER.debug('Attempt %d', retry_counter) MY_LOGGER.debug( 'Unexpected error connecting to %s : 0 %s 1 %s 2 %s', address, sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]) retry_counter += 1 MY_LOGGER.debug('Sleeping 2 seconds...') time.sleep(2) MY_LOGGER.debug('op %s', op) MY_LOGGER.debug('result = %s', result) # close the socket now we've finished with it sub0.close() wxcutils.save_json(OUTPUT_PATH, 'satellite-receivers.json', result)
def tweet_text(tt_config_path, tt_config_file, tt_text): """tweet text using info from the config file""" tt_config = wxcutils.load_json(tt_config_path, tt_config_file) # authentication MY_UTIL_LOGGER.debug('Authenticating to Twitter API') tt_auth = tweepy.OAuthHandler(tt_config['consumer key'], tt_config['consumer secret']) tt_auth.set_access_token(tt_config['access token'], tt_config['access token secret']) # get api tt_api = tweepy.API(tt_auth) # send tweet MY_UTIL_LOGGER.debug('Sending tweet with text = %s', tt_text) tt_api.update_status(tt_text) MY_UTIL_LOGGER.debug('Tweet sent')
def send_tweet(tt_config_path, tt_config_file, tt_text, tt_file): """tweet text with image using info from the config file""" # get the Twitter API config tt_config = wxcutils.load_json(tt_config_path, tt_config_file) # authentication MY_LOGGER.debug('Authenticating to Twitter API') tt_auth = tweepy.OAuthHandler(tt_config['consumer key'], tt_config['consumer secret']) tt_auth.set_access_token(tt_config['access token'], tt_config['access token secret']) # get api tt_api = tweepy.API(tt_auth) # upload the file MY_LOGGER.debug('Uploading file = %s', tt_file) if '.jpg' in tt_file or '.gif' in tt_file or '.png' in tt_file: MY_LOGGER.debug('Image file upload') # upload file tt_media = tt_api.media_upload(tt_file) # send tweet MY_LOGGER.debug('Sending tweet with text = %s, image = %s', tt_text, tt_file) tt_status = tt_api.update_status(status=tt_text, media_ids=[tt_media.media_id]) else: if '.mp4' in tt_file: MY_LOGGER.debug('Video file upload') # upload file # using Tweepy API v4 MY_LOGGER.debug('media_upload') tt_media_id = tt_api.media_upload(tt_file, media_category='tweet_video', chunked=True) # send tweet MY_LOGGER.debug('Sending tweet with text = %s, image = %s', tt_text, tt_file) tt_status = tt_api.update_status(status=tt_text, media_ids=[tt_media_id.media_id]) else: MY_LOGGER.debug('Unknown file type - can' 't upload') MY_LOGGER.debug('Tweet sent with status = %s', tt_status)
def backup_servers(): """backup code and config on servers""" # all server code backups are full servers = wxcutils.load_json(CONFIG_PATH, 'servers.json') for server in servers: MY_LOGGER.debug('-' * 40) MY_LOGGER.debug('server = %s', server['server']) for directory in server['directories']: MY_LOGGER.debug('-' * 10) MY_LOGGER.debug(directory['title']) errors.append({ 'type': server['server'] + ' - ' + directory['title'], 'errors': do_rsync('caWv', directory['exclude'], directory['source'], directory['destination']) }) if directory['cmd']: MY_LOGGER.debug('cmd = %s', directory['cmd']) wxcutils.run_cmd('rm -rf ' + directory['cmd']) MY_LOGGER.debug('-' * 40)
def tweet_text_image(tt_config_path, tt_config_file, tt_text, tt_image_file): """tweet text with image using info from the config file""" tt_config = wxcutils.load_json(tt_config_path, tt_config_file) # authentication MY_UTIL_LOGGER.debug('Authenticating to Twitter API') tt_auth = tweepy.OAuthHandler(tt_config['consumer key'], tt_config['consumer secret']) tt_auth.set_access_token(tt_config['access token'], tt_config['access token secret']) # get api tt_api = tweepy.API(tt_auth) # send tweet MY_UTIL_LOGGER.debug('Sending tweet with text = %s, image = %s', tt_text, tt_image_file) # upload file tt_media = tt_api.media_upload(tt_image_file) # send tweet MY_UTIL_LOGGER.debug('Sending tweet with text = %s, image = %s', tt_text, tt_image_file) tt_status = tt_api.update_status(status=tt_text, media_ids=[tt_media.media_id]) MY_UTIL_LOGGER.debug('Tweet sent with status = %s', tt_status)
# start logging MODULE = 'watchdog' MY_LOGGER = wxcutils.get_logger(MODULE, LOG_PATH, MODULE + '.log') MY_LOGGER.debug('-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+') MY_LOGGER.debug('Execution start') MY_LOGGER.debug('APP_PATH = %s', APP_PATH) MY_LOGGER.debug('CODE_PATH = %s', CODE_PATH) MY_LOGGER.debug('LOG_PATH = %s', LOG_PATH) MY_LOGGER.debug('OUTPUT_PATH = %s', OUTPUT_PATH) MY_LOGGER.debug('IMAGE_PATH = %s', IMAGE_PATH) MY_LOGGER.debug('WORKING_PATH = %s', WORKING_PATH) MY_LOGGER.debug('CONFIG_PATH = %s', CONFIG_PATH) # load config NETCONFIG = wxcutils.load_json(OUTPUT_PATH, 'network.json') MY_LOGGER.debug('attempt = %s', NETCONFIG['attempt']) MY_LOGGER.debug('timeout = %s', NETCONFIG['timeout']) FILE_BASE = '/home/pi/goes/' MY_LOGGER.debug('FILE_BASE = %s', FILE_BASE) # test for network connectivity for key, value in NETCONFIG.items(): if key == 'addresses': for nc in NETCONFIG[key]: if nc['Active'] == 'yes': MY_LOGGER.debug('-' * 20) MY_LOGGER.debug(nc) # need to fix updating the NETCONFIG part! nc['status'] = test_connection(nc, NETCONFIG['attempt'],
try: # extract parameters STATION = sys.argv[1] BASE_DIR = sys.argv[2] except: MY_LOGGER.critical('Exception whilst parsing command line parameters: %s %s %s', sys.argv[1], sys.argv[2], sys.argv[3]) # re-throw it as this is fatal raise MY_LOGGER.debug('station = %s', STATION) # load current master MASTER = wxcutils.load_json(WORKING_PATH, 'master.json') # load new set NEW = wxcutils.load_json(WORKING_PATH, STATION + '-filefound.json') # find what is in the master but not in the new one # DELTA = [x for x in MASTER + NEW if x not in MASTER or x not in NEW] DELTA = [_dict for _dict in NEW if _dict not in MASTER] NUM_DIFFERENCES = len(DELTA) MY_LOGGER.debug('Number of differences = %d', NUM_DIFFERENCES) # save out request from station list wxcutils.save_json(WORKING_PATH, STATION + '-filerequest.json', DELTA) if NUM_DIFFERENCES > 0: KEYS = DELTA[0].KEYS()
MODULE = 'sync' MY_LOGGER = wxcutils.get_logger(MODULE, LOG_PATH, MODULE + '.log') MY_LOGGER.debug('-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+') MY_LOGGER.debug('Execution start') MY_LOGGER.debug('APP_PATH = %s', APP_PATH) MY_LOGGER.debug('CODE_PATH = %s', CODE_PATH) MY_LOGGER.debug('LOG_PATH = %s', LOG_PATH) MY_LOGGER.debug('OUTPUT_PATH = %s', OUTPUT_PATH) MY_LOGGER.debug('IMAGE_PATH = %s', IMAGE_PATH) MY_LOGGER.debug('WORKING_PATH = %s', WORKING_PATH) MY_LOGGER.debug('CONFIG_PATH = %s', CONFIG_PATH) MY_LOGGER.debug('QUEUE_PATH = %s', QUEUE_PATH) try: # load data for rsync RSYNC_CONFIG = wxcutils.load_json(CONFIG_PATH, 'config-rsync.json') # log drive space free to file drive_validation() # check for files to process NO_FILES_TO_PROCESS = True for file_name in glob.glob(QUEUE_PATH + '*.json'): NO_FILES_TO_PROCESS = False MY_LOGGER.debug('File to process - %s', file_name.replace(QUEUE_PATH, '')) process_file(file_name.replace(QUEUE_PATH, '')) if NO_FILES_TO_PROCESS: MY_LOGGER.debug('No file(s) to process') except:
MY_LOGGER.debug('CODE_PATH = %s', CODE_PATH) MY_LOGGER.debug('LOG_PATH = %s', LOG_PATH) MY_LOGGER.debug('OUTPUT_PATH = %s', OUTPUT_PATH) MY_LOGGER.debug('IMAGE_PATH = %s', IMAGE_PATH) MY_LOGGER.debug('WORKING_PATH = %s', WORKING_PATH) MY_LOGGER.debug('CONFIG_PATH = %s', CONFIG_PATH) URL_BASE = 'https://kiwiweather.com/goes/' MY_LOGGER.debug('URL_BASE = %s', URL_BASE) THRESHOLD = 25 MY_LOGGER.debug('THRESHOLD = %d', THRESHOLD) MAXAGE = 3600 MY_LOGGER.debug('MAXAGE = %d', MAXAGE) # load tweet text strings TWEETTEXT = wxcutils.load_json(CONFIG_PATH, 'twitter-text.json') # load tweet config TWEETS = wxcutils.load_json(CONFIG_PATH, 'twitter-config.json') THRESHOLD = int(TWEETS['Light threshold']) MY_LOGGER.debug('THRESHOLD = %d', THRESHOLD) MAXAGE = int(TWEETS['Max age']) MY_LOGGER.debug('MAXAGE = %d', MAXAGE) HOURS = int(TWEETS['Run hours']) MY_LOGGER.debug('HOURS = %d', HOURS) # determine delay between tweets based on number to tweet TWEET_COUNT = 0 for key, value in TWEETS.items():
MODULE = 'satellite_status' MY_LOGGER = wxcutils.get_logger(MODULE, LOG_PATH, MODULE + '.log') MY_LOGGER.debug('-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+') MY_LOGGER.debug('Execution start') MY_LOGGER.debug('APP_PATH = %s', APP_PATH) MY_LOGGER.debug('CODE_PATH = %s', CODE_PATH) MY_LOGGER.debug('LOG_PATH = %s', LOG_PATH) MY_LOGGER.debug('OUTPUT_PATH = %s', OUTPUT_PATH) MY_LOGGER.debug('IMAGE_PATH = %s', IMAGE_PATH) MY_LOGGER.debug('WORKING_PATH = %s', WORKING_PATH) MY_LOGGER.debug('CONFIG_PATH = %s', CONFIG_PATH) try: # load config CONFIG_INFO = wxcutils.load_json(CONFIG_PATH, 'config.json') # grap the status web pages METEOR_STATUS_PAGE = get_page( 'http://happysat.nl/Meteor/html/Meteor_Status.html') NOAA_STATUS_PAGE = get_page( 'https://www.ospo.noaa.gov/Operations/POES/status.html') ISS_STATUS_PAGE = get_page('http://ariss-sstv.blogspot.com/') # output as html MY_LOGGER.debug('Build webpage') with open(OUTPUT_PATH + 'satellitestatus.html', 'w') as html: # html header html.write('<!DOCTYPE html>') html.write( '<html lang=\"en\"><head>'
def send_email(se_text0, se_html0, se_text1, se_html1, se_text2, se_html2, se_text3, se_html3, se_text4, se_html4, se_text5, se_html5, se_config_file): """send the email""" se_ok_status = True MY_LOGGER.debug('Using config file %s', se_config_file) # load email config email_info = wxcutils.load_json(CONFIG_PATH, se_config_file) # don't log ALL the email config, it includes a password # setup the message message = MIMEMultipart('alternative') message['Subject'] = 'Watchdog - ' + EMAIL_SUBJECT message['From'] = email_info['from'] message['To'] = email_info['notify'] MY_LOGGER.debug('Sending (header) to = %s', email_info['notify']) MY_LOGGER.debug('Sending (deliver) to:') for email_address in email_info['notify'].split(','): MY_LOGGER.debug('EMAIL TO -----> %s', email_address) # plain text se_text = EMAIL_SUBJECT + ' - ' + ALERT_INFO + NEWLINE + \ se_text0 + NEWLINE + \ se_text1 + NEWLINE + \ se_text2 + NEWLINE + \ se_text3 + NEWLINE + \ se_text4 + NEWLINE + \ se_text5 + NEWLINE + \ 'Last status change on ' + ALERT_INFO MY_LOGGER.debug('se_text = %s', se_text) # html text se_html = '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">' + \ '<html><head>' + \ '<title>Watchdog - ' + EMAIL_SUBJECT + '</title></head>' + NEWLINE + \ '<body><h2>' + EMAIL_SUBJECT + ' - ' + ALERT_INFO + '</h2>' + NEWLINE + \ '<h3>Satellites - Web Server</h3>' + \ '<table border="1">' + \ '<tr><th>Status</th><th>Status Change?</th><th>Satellite</th><th>Threshold (min)</th><th>Age (min)</th><th>Delta (min)</th></tr>' + \ se_html0 + \ '</table>' + NEWLINE + \ '<h3>Satellites - Data Server</h3>' + \ '<table border="1">' + \ '<tr><th>Status</th><th>Status Change?</th><th>Satellite</th><th>Threshold (min)</th><th>Age (min)</th><th>Delta (min)</th></tr>' + \ se_html1 + \ '</table>' + NEWLINE + \ '<h3>Servers</h3>' + \ '<table border="1">' + \ '<tr><th>Status</th><th>Status Change?</th><th>Server</th><th>Max Used (percent)</th><th>Used (percent)</th><th>Delta (percent)</th></tr>' + \ se_html2 + \ '</table>' + NEWLINE +\ '<h3>Network</h3>' + \ '<table border="1">' + \ '<tr><th>Status</th><th>Status Change?</th><th>Connection</th><th>Information</th><th>Date</th></tr>' + \ se_html3 + \ '</table>' + NEWLINE +\ '<h3>Satellite Lock</h3>' + \ '<table border="1">' + \ '<tr><th>Status</th><th>Status Change?</th><th>Connection</th><th>Lock?</th><th>Skipped Symbols</th><th>Reed Solomon Errors</th><th>Viterbi Errors</th><th>Date</th></tr>' + \ se_html4 + \ '</table>' + NEWLINE +\ '<h3>Pings</h3>' + \ '<table border="1">' + \ '<tr><th>Status</th><th>Status Change?</th><th>Connection</th><th>Information</th><th>Date</th></tr>' + \ se_html5 + \ '</table>' + NEWLINE +\ '<p>Last status change on ' + ALERT_INFO + '</p>' + \ '</body></html>' MY_LOGGER.debug('se_html = %s', se_html) # build email message.attach(MIMEText(se_text, "plain")) message.attach(MIMEText(se_html, "html")) # send email try: MY_LOGGER.debug('Trying to send email') context = ssl.create_default_context() with smtplib.SMTP(email_info['smtp server'], email_info['smtp server port']) as server: server.ehlo() server.starttls(context=context) server.ehlo() server.login(email_info['username'], email_info['password']) server.sendmail(email_info['from'], email_info['notify'].split(','), message.as_string()) MY_LOGGER.debug('Email sent') except: se_ok_status = False MY_LOGGER.error('Email sending error - %s %s %s', sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]) return se_ok_status
HOURS = int(time.strftime('%H')) MINUTES = int(time.strftime('%M')) # send status update on first run in a day # otherwise only send on a status change if (HOURS == 0) and (1 < MINUTES < 14): MY_LOGGER.debug('Daily update email') EMAIL_SUBJECT = 'Daily Update' EMAIL_REQUIRED = True else: MY_LOGGER.debug('Status change email') EMAIL_SUBJECT = 'Status Change' EMAIL_REQUIRED = False # load satellite info SATELLITE_INFO = wxcutils.load_json(CONFIG_PATH, 'config-watchdog.json') SATELLITE_DATA = wxcutils.load_json(WEB_PATH + 'goes/', 'last-received.json') # MY_LOGGER.debug('current SATELLITE_INFO = %s', SATELLITE_INFO) # get the run time, to use for status update date time ALERT_INFO = get_local_date_time() + ' ' + LOCAL_TIME_ZONE + \ ' [' + get_utc_date_time() + ' UTC].' MY_LOGGER.debug('ALERT_INFO = %s', ALERT_INFO) # iterate through satellites on this server MY_LOGGER.debug('-' * 20) MY_LOGGER.debug('Iterate through satellites on this server') for si in SATELLITE_INFO: MY_LOGGER.debug('-' * 20) MY_LOGGER.debug('Processing - %s', si['Display Name']) if si['Active'] == 'yes':
if number_processes('web.py') == 1: # get local time zone LOCAL_TIME_ZONE = subprocess.check_output("date"). \ decode('utf-8').split(' ')[-2] MY_LOGGER.debug('LOCAL_TIME_ZONE = %s', LOCAL_TIME_ZONE) FILE_BASE = '/home/pi/goes/' MY_LOGGER.debug('FILE_BASE = %s', FILE_BASE) WEB_PATH = FILE_BASE + 'web/' # load logo and branding info LOGOBLACK = cv2.imread(CONFIG_PATH + 'logo-black.jpg') LOGOWHITE = cv2.imread(CONFIG_PATH + 'logo-white.jpg') BRANDING = wxcutils.load_json(CONFIG_PATH, 'branding.json') # get the last directory name used for a sync MY_LOGGER.debug('Loading config') SATELLITE_INFO = wxcutils.load_json(CONFIG_PATH, 'web.json') # loop through active satellites for key, value in SATELLITE_INFO.items(): MY_LOGGER.debug('key = %s, value = %s', key, value) for si in SATELLITE_INFO[key]: if si['Active'] == 'yes': MY_LOGGER.debug('-' * 20) MY_LOGGER.debug(si) try: proccess_satellite(si) except:
def process_file(pf_file_name): """process a file, moving files to remote server using rynch and performing locking""" def do_sync(ds_source, ds_destination): """synch the file over""" MY_LOGGER.debug('rsync %s %s %s', '-avz', ds_source, ds_destination) pf_cmd = Popen(['rsync', '-avz', ds_source, ds_destination], stdout=PIPE, stderr=PIPE) pf_stdout, pf_stderr = pf_cmd.communicate() pf_stdout = pf_stdout.decode('utf-8') pf_stderr = pf_stderr.decode('utf-8') MY_LOGGER.debug('stdout:%s', pf_stdout) if pf_stderr == '': MY_LOGGER.debug('rsync successful') return True MY_LOGGER.debug('rsync error = %s', pf_stderr) return False # load the queue file pf_file_data = wxcutils.load_json(QUEUE_PATH, pf_file_name) # recover the lock_id pf_lock_id = pf_file_data['lock'] MY_LOGGER.debug('pf_lock_id = %s', pf_lock_id) # iterate through the files for pf_file in pf_file_data['files']: if pf_file['copied'] == 'no': MY_LOGGER.debug('To copy - %s %s %s %s', pf_file['source path'], pf_file['source file'], pf_file['destination path'], pf_file['copied']) pf_result = do_sync(pf_file['source path'] + '/' + pf_file['source file'], RSYNC_CONFIG['remote user'] + '@' + RSYNC_CONFIG['remote host'] + \ ':' + RSYNC_CONFIG['remote directory'] + '/' + \ pf_file['destination path'] + '/' + \ pf_file['source file'] + '.LOCK.' + pf_lock_id) if pf_result: pf_file['copied'] = 'yes' # check if any files left to be copied pf_files_to_copy = False for pf_file in pf_file_data['files']: if pf_file['copied'] == 'no': pf_files_to_copy = True break if pf_files_to_copy: MY_LOGGER.debug('Files still to copy') MY_LOGGER.debug('Work still to be done, save file for future processing') wxcutils.save_json(QUEUE_PATH, pf_file_name, pf_file_data) else: MY_LOGGER.debug('All files copied over, copy the unlock over') pf_unlock_file = pf_lock_id + '.UNLOCK' wxcutils.run_cmd('touch ' + QUEUE_PATH + pf_unlock_file) pf_result = do_sync(QUEUE_PATH + pf_unlock_file, RSYNC_CONFIG['remote user'] + '@' + RSYNC_CONFIG['remote host'] + \ ':' + RSYNC_CONFIG['remote directory'] + '/' + pf_unlock_file) if pf_result: MY_LOGGER.debug('lock file copied over successfully') wxcutils.run_cmd('rm ' + QUEUE_PATH + pf_unlock_file) wxcutils.save_json(QUEUE_PATH, pf_file_name, pf_file_data) wxcutils.run_cmd('rm ' + QUEUE_PATH + pf_file_name) else: MY_LOGGER.debug('Work still to be done, save file for future processing') wxcutils.save_json(QUEUE_PATH, pf_file_name, pf_file_data)
MY_LOGGER.debug('WORKING_PATH = %s', WORKING_PATH) MY_LOGGER.debug('CONFIG_PATH = %s', CONFIG_PATH) # check if web is already running, if so exit this code if number_processes('electro-l-2.py') == 1: FILE_BASE = '/home/pi/goes/electro-l-2/' MY_LOGGER.debug('FILE_BASE = %s', FILE_BASE) # get local time zone LOCAL_TIME_ZONE = subprocess.check_output("date"). \ decode('utf-8').split(' ')[-2] MY_LOGGER.debug('LOCAL_TIME_ZONE = %s', LOCAL_TIME_ZONE) # get the configuration information CONFIG_INFO = wxcutils.load_json(CONFIG_PATH, 'electro.json') MY_LOGGER.debug('Last sync data - note Moscow date / time, not UTC') MY_LOGGER.debug('Last Year = %s', CONFIG_INFO['Last Year']) MY_LOGGER.debug('Last Month = %s', CONFIG_INFO['Last Month']) MY_LOGGER.debug('Last Day = %s', CONFIG_INFO['Last Day']) MY_LOGGER.debug('Last Time = %s', CONFIG_INFO['Last Time']) MY_LOGGER.debug('FTP Server Info') MY_LOGGER.debug('ftp site = %s', CONFIG_INFO['ftp site']) MY_LOGGER.debug('port = %s', CONFIG_INFO['port']) MY_LOGGER.debug('username = %s', CONFIG_INFO['username']) MY_LOGGER.debug('password = -not logged-') # create FTP connection and log in ftp = FTP() ftp.connect(CONFIG_INFO['ftp site'], int(CONFIG_INFO['port']))
MY_LOGGER.debug('WORKING_PATH = %s', WORKING_PATH) MY_LOGGER.debug('CONFIG_PATH = %s', CONFIG_PATH) # try: # check if find_files is already running, if so exit this code if number_processes('find_files.py') == 1: # get local time zone LOCAL_TIME_ZONE = subprocess.check_output("date"). \ decode('utf-8').split(' ')[-2] base_dir = '/home/pi/gk-2a/xrit-rx/received/LRIT/' MY_LOGGER.debug('base_dir = %s', base_dir) # load latest times data latest_timestamps = wxcutils.load_json(OUTPUT_PATH, 'gk2a_info.json') # find latest directory date_directory = find_latest_directory(base_dir) MY_LOGGER.debug('latest directory = %s', date_directory) date_base_dir = os.path.join(base_dir, date_directory) data_directories = find_directories(date_base_dir) # data store for files list # currently just for FD FILES = [] # find latest file in each directory and copy to output directory for directory in data_directories: MY_LOGGER.debug('---------------------------------------------')
def build_capture_pages(): """build the capture pages""" # sort ALL_PASSES by local_sort to get them in the right order # for the local time zone MY_LOGGER.debug('Sort passes') passes = sorted(ALL_PASSES, key=lambda k: k['local sort']) # find the start of time move_config = wxcutils.load_json(CONFIG_PATH, 'config-move.json') MY_LOGGER.debug('Start of time is %s %s', move_config['Start Month'], move_config['Start Year']) date_start = datetime.strptime('01 ' + move_config['Start Month'] + ' ' + \ move_config['Start Year'], '%d %m %Y') date_now = datetime.now() # get the historic links data to include in all pages historic_links = get_links(date_start, date_now) # if between 1:00:00am and 1:01:59 - rebuild all previous content pages # not perfectly efficient, but means that all pages have the link list for all # months / years recorded after that month hours = int(time.strftime('%H')) minutes = int(time.strftime('%M')) if ((hours == 1) and (minutes in (0, 1))) or REBUILD == 'rebuild': # rebuilding all pages overnight MY_LOGGER.debug('Building pages for all mmonths / years overnight') for d_t in rrule.rrule(rrule.MONTHLY, dtstart=date_start, until=date_now): month = d_t.strftime('%m') month_name = d_t.strftime('%B') year = d_t.strftime('%Y') file_path = TARGET + CONFIG_INFO[ 'Link Base'] + year + '/' + month + '/' MY_LOGGER.debug('Building captures page = %s for %s %s', file_path, month_name, year) build_month_page(passes, file_path, CAPTURES_PAGE, month, month_name, year, historic_links) # rebuild the page for this month # do this every time we run to get latest pass included day = date_now.strftime('%d') month = date_now.strftime('%m') month_name = date_now.strftime('%B') year = date_now.strftime('%Y') MY_LOGGER.debug('Local date = %s %s (%s) %s', day, month, month_name, year) file_path = TARGET + CONFIG_INFO['Link Base'] + year + '/' + month + '/' MY_LOGGER.debug('Building captures page = %s for %s %s (current month)', file_path, month_name, year) build_month_page(passes, file_path, CAPTURES_PAGE, month, month_name, year, historic_links) # build current page which redirects to current month page # MY_LOGGER.debug('Page data = %s', PAGE_DATA) current_link = CONFIG_INFO[ 'Link Base'] + year + '/' + month + '/' + CAPTURES_PAGE with open(TARGET + CAPTURES_PAGE, 'w') as html: # html header label = month_name + ' ' + year html.write('<!DOCTYPE html>') html.write( '<html lang=\"en\"><head>' '<meta charset=\"UTF-8\">' '<meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">' '<meta name=\"description\" content=\"WxCapture redirection page to current captures plus historic monthly / yearly captures\">' '<meta name=\"keywords\" content=\"' + CONFIG_INFO['webpage keywords'] + '\">' '<meta name=\"author\" content=\"WxCapture\">' '<title>Captures</title>' '<link rel=\"stylesheet\" href=\"css/styles.css\">' '<link rel=\"shortcut icon\" type=\"image/png\" href=\"' + CONFIG_INFO['Link Base'] + 'favicon.png\"/>') html.write('<meta http-equiv = \"refresh\" content=\"0; url=\'' + current_link + '\'\" />') html.write('</head>') html.write('<body>') html.write('<section class=\"content-section container\">') html.write('<h2 class=\"section-header\">Redirect Page</h2>') html.write( '<p>Your browser should be redirecting you to the page for the current month - ' ) html.write('<a href=\"' + current_link + '\">' + label + '</a>.</p>') html.write('<p>Click the link if you have not been redirected.</p>') html.write('</section>') html.write('</body></html>')
MY_LOGGER.debug('LOG_PATH = %s', LOG_PATH) MY_LOGGER.debug('OUTPUT_PATH = %s', OUTPUT_PATH) MY_LOGGER.debug('IMAGE_PATH = %s', IMAGE_PATH) MY_LOGGER.debug('WORKING_PATH = %s', WORKING_PATH) MY_LOGGER.debug('CONFIG_PATH = %s', CONFIG_PATH) # get local time zone LOCAL_TIME_ZONE = subprocess.check_output("date"). \ decode('utf-8').split(' ')[-2] MY_LOGGER.debug('LOCAL_TIME_ZONE = %s', LOCAL_TIME_ZONE) FILE_BASE = '/home/pi/goes/EWS-G1/' MY_LOGGER.debug('FILE_BASE = %s', FILE_BASE) # get the last directory name used for a sync CONFIG_INFO = wxcutils.load_json(CONFIG_PATH, 'ews-g1.json') URL_BASE = CONFIG_INFO['URL'] MY_LOGGER.debug('URL_BASE = %s', URL_BASE) LAST_DIRECTORY = CONFIG_INFO['Last Directory'] MY_LOGGER.debug('LAST_DIRECTORY = %s', LAST_DIRECTORY) directories = sorted(listFD(URL_BASE, '')) last_read = '' # loop through directories for directory in directories: directory_datetime = directory.split('/')[5]
LOG_PATH = APP_PATH + 'logs/' CONFIG_PATH = APP_PATH + 'config/' # start logging MODULE = 'config_server' MY_LOGGER = wxcutils.get_logger(MODULE, LOG_PATH, MODULE + '.log') MY_LOGGER.debug('-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+') MY_LOGGER.debug('Execution start') MY_LOGGER.debug('APP_PATH = %s', APP_PATH) MY_LOGGER.debug('LOG_PATH = %s', LOG_PATH) MY_LOGGER.debug('CONFIG_PATH = %s', CONFIG_PATH) try: # load config CONFIG_INFO = wxcutils.load_json(CONFIG_PATH, 'config.json') TARGET = CONFIG_INFO['web doc root location'] MY_LOGGER.debug('TARGET = %s', TARGET) # config validation CONFIG_ERRORS, CONFIG_HTML = config_validation() # drive space validation DRIVE_ERRORS, DRIVE_HTML = drive_validation(CONFIG_INFO) # output as html MY_LOGGER.debug('Build webpage') with open(TARGET + 'config_server.html', 'w') as html: # html header html.write('<!DOCTYPE html>')
def get_last_backup_data(): """load last backup data""" return wxcutils.load_json(CONFIG_PATH, 'last_backup.json')
APP_PATH = HOME + '/wxcapture/web/' LOG_PATH = APP_PATH + 'logs/' CONFIG_PATH = APP_PATH + 'config/' # start logging MODULE = 'move_modal' MY_LOGGER = wxcutils.get_logger(MODULE, LOG_PATH, MODULE + '.log') MY_LOGGER.debug('-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+') MY_LOGGER.debug('Execution start') MY_LOGGER.debug('APP_PATH = %s', APP_PATH) MY_LOGGER.debug('LOG_PATH = %s', LOG_PATH) MY_LOGGER.debug('CONFIG_PATH = %s', CONFIG_PATH) # load config CONFIG_INFO = wxcutils.load_json(CONFIG_PATH, 'config.json') # set up paths MY_PATH = '/home/mike/wxcapture/output/' TARGET = CONFIG_INFO['web doc root location'] CAPTURES_PAGE = 'captures.html' try: # see if args passed try: REBUILD = sys.argv[1] except: REBUILD = '' # get local time zone LOCAL_TIME_ZONE = subprocess.check_output("date").decode('utf-8').split( ' ')[-2]
def config_validation(): """config""" def is_number(test_num): try: float(test_num) return True except ValueError: return False cv_errors_found = False cv_results = '' cv_files_info = wxcutils.load_json(CONFIG_PATH, 'config-validation.json') cv_results += '<h3>File Level Checks</h3><table><tr><th>Filename</th><th>Description</th><th>Required</th><th>Found</th><th>Valid JSON</th><th>Errors</th></tr>' for cv_filename in cv_files_info: MY_LOGGER.debug('cv_filename = %s', cv_filename) MY_LOGGER.debug('description %s', cv_files_info[cv_filename]['description']) MY_LOGGER.debug('required %s', cv_files_info[cv_filename]['required']) cv_error = '' cv_files_info[cv_filename]['exists'] = 'no' if os.path.isfile(CONFIG_PATH + cv_filename): cv_files_info[cv_filename]['exists'] = 'yes' if cv_files_info[cv_filename]['exists'] != 'yes' and cv_files_info[ cv_filename]['required'] == 'yes': cv_error = 'Required file is missing' cv_files_info[cv_filename]['valid json'] = 'no' if valid_json_file(cv_filename): cv_files_info[cv_filename]['valid json'] = 'yes' if cv_error == '': cv_error = '(none)' cv_results += '<tr>' else: cv_results += '<tr class=\"row-highlight\">' cv_errors_found = True cv_results += '<td>' + cv_filename + '</td><td>' + cv_files_info[cv_filename]['description'] + '</td><td>' + \ cv_files_info[cv_filename]['required'] + '</td><td>' + cv_files_info[cv_filename]['exists'] + '</td><td>' + \ cv_files_info[cv_filename]['valid json'] + '</td><td>' + cv_error + '</td></tr>' cv_results += '</table>' for cv_filename in cv_files_info: cv_results += '<h3>Content Checks - ' + cv_filename + '</h3><table><tr><th>Key</th><th>Value</th><th>Description</th><th>Required</th><th>Found</th><th>Valid Values</th><th>Errors</th></tr>' cv_test_file = wxcutils.load_json(CONFIG_PATH, cv_filename) MY_LOGGER.debug('Parse = %s', cv_files_info[cv_filename]) if cv_filename in ('sdr.json', 'satellites.json'): MY_LOGGER.debug('sdr and satellites specific checking - %s', cv_filename) cv_field_entry = '' if cv_filename == 'sdr.json': cv_field_entry = 'sdr' if cv_filename == 'satellites.json': cv_field_entry = 'satellites' # for every entity MY_LOGGER.debug('looping through entities - %s', cv_field_entry) for cv_file_row1 in cv_test_file[cv_field_entry]: MY_LOGGER.debug('cv_file_row1 = %s', cv_file_row1) cv_name = '' for cv_row in cv_files_info[cv_filename]['field validation']: MY_LOGGER.debug( '%s req = %s vv = %s desc = %s', cv_row, cv_files_info[cv_filename]['field validation'][cv_row] ['required'], cv_files_info[cv_filename] ['field validation'][cv_row]['valid values'], cv_files_info[cv_filename]['field validation'][cv_row] ['description']) cv_error = '' try: cv_value = str(cv_file_row1[cv_row]) if cv_row == 'name': cv_name = cv_value cv_found = 'yes' cv_error = '' except KeyError: MY_LOGGER.debug( 'config_validation exception handler - value missing: %s %s %s', sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]) cv_value = '' cv_found = 'no' cv_error += 'Field missing from file. ' if cv_value == '' and cv_files_info[cv_filename][ 'field validation'][cv_row]['required'] == 'yes': cv_error += 'Required value is missing. ' cv_valid_values = cv_files_info[cv_filename][ 'field validation'][cv_row]['valid values'] if cv_valid_values == '-pattern-': cv_valid_values = 'The value must follow the pattern, as shown in the description' if cv_valid_values == '-number-': cv_valid_values = 'The value must be a number, as shown in the description' if not is_number(cv_value): cv_error += 'Value is not a number. ' if cv_valid_values == '-any-': cv_valid_values = 'The value can have any value, as shown in the description' if '|' in cv_valid_values: if not cv_value in cv_valid_values: cv_error += 'Value is not a valid value' cv_valid_values = 'Valid values, separated by a \'|\' are: ' + cv_valid_values if cv_error == '': cv_error = '(none)' cv_results += '<tr>' else: cv_results += '<tr class=\"row-highlight\">' cv_errors_found = True if cv_files_info[cv_filename]['field validation'][cv_row][ 'hidden'] == 'yes': cv_value = '*hidden*' cv_results += '<td>' + cv_name + ' - ' + cv_row + '</td>' cv_results += '<td>' + cv_value + '</td>' cv_results += '<td>' + cv_files_info[cv_filename][ 'field validation'][cv_row]['description'] + '</td>' cv_results += '<td>' + cv_files_info[cv_filename][ 'field validation'][cv_row]['required'] + '</td>' cv_results += '<td>' + cv_found + '</td>' cv_results += '<td>' + cv_valid_values + '</td>' cv_results += '<td>' + cv_error + '</td></tr>' else: for cv_row in cv_files_info[cv_filename]['field validation']: MY_LOGGER.debug( '%s %s %s', cv_files_info[cv_filename]['field validation'] [cv_row]['required'], cv_files_info[cv_filename] ['field validation'][cv_row]['valid values'], cv_files_info[cv_filename]['field validation'][cv_row] ['description']) cv_error = '' try: MY_LOGGER.debug('testing field %s', cv_row) cv_value = str(cv_test_file[cv_row]) cv_found = 'yes' cv_error = '' except KeyError: MY_LOGGER.debug( 'config_validation exception handler - value missing: %s %s %s', sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]) cv_value = '' cv_found = 'no' cv_error += 'Field missing from file. ' if cv_value == '' and cv_files_info[cv_filename][ 'field validation'][cv_row]['required'] == 'yes': cv_error += 'Required value is missing. ' cv_valid_values = cv_files_info[cv_filename][ 'field validation'][cv_row]['valid values'] if cv_valid_values == '-pattern-': cv_valid_values = 'The value must follow the pattern, as shown in the description' if cv_valid_values == '-number-': cv_valid_values = 'The value must be a number, as shown in the description' if not is_number(cv_value): cv_error += 'Value is not a number. ' if cv_valid_values == '-any-': cv_valid_values = 'The value can have any value, as shown in the description' if '|' in cv_valid_values: if not cv_value in cv_valid_values: cv_error += 'Value is not a valid value' cv_valid_values = 'Valid values, separated by a \'|\' are: ' + cv_valid_values if cv_error == '': cv_error = '(none)' cv_results += '<tr>' else: cv_results += '<tr class=\"row-highlight\">' cv_errors_found = True if cv_files_info[cv_filename]['field validation'][cv_row][ 'hidden'] == 'yes': cv_value = '*hidden*' cv_results += '<td>' + cv_row + '</td>' cv_results += '<td>' + cv_value + '</td>' cv_results += '<td>' + cv_files_info[cv_filename][ 'field validation'][cv_row]['description'] + '</td>' cv_results += '<td>' + cv_files_info[cv_filename][ 'field validation'][cv_row]['required'] + '</td>' cv_results += '<td>' + cv_found + '</td>' cv_results += '<td>' + cv_valid_values + '</td>' cv_results += '<td>' + cv_error + '</td></tr>' cv_results += '</table>' if cv_errors_found: MY_LOGGER.debug('Config errors found') else: MY_LOGGER.debug('No config errors found') return cv_errors_found, cv_results
MY_LOGGER.critical( 'Exception whilst parsing command line parameters: %s %s %s', sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]) # re-throw it as this is fatal raise MY_LOGGER.debug('satellite = %s', SATELLITE) MY_LOGGER.debug('START_EPOCH = %s', str(START_EPOCH)) MY_LOGGER.debug('DURATION = %s', str(DURATION)) MY_LOGGER.debug('MAX_ELEVATION = %s', str(MAX_ELEVATION)) MY_LOGGER.debug('REPROCESS = %s', REPROCESS) # load config CONFIG_INFO = wxcutils.load_json(CONFIG_PATH, 'config.json') # load satellites SATELLITE_INFO = wxcutils.load_json(CONFIG_PATH, 'satellites.json') # load image options IMAGE_OPTIONS = wxcutils.load_json(CONFIG_PATH, 'config-SSTV.json') # get local time zone LOCAL_TIME_ZONE = subprocess.check_output("date"). \ decode('utf-8').split(' ')[-2] # create filename base FILENAME_BASE = wxcutils.epoch_to_utc(START_EPOCH, '%Y-%m-%d-%H-%M-%S') + \ '-' + SATELLITE.replace(' ', '_').replace('(', '').replace(')', '') MY_LOGGER.debug('FILENAME_BASE = %s', FILENAME_BASE)
def fix_file(ff_path, ff_filename): """fix file for upgrade""" def update_page(up_page, up_search, up_replace): """update text file with search / replace""" MY_LOGGER.debug('%s %s', up_search, up_replace) if up_search in up_page: MY_LOGGER.debug('%s found', up_search) ff_location = up_page.find(up_search) if ff_location > 0: MY_LOGGER.debug('string found at %d', ff_location) # up_page.replace(up_search, up_replace, 1) left = up_page[0:ff_location] skip = ff_location + len(up_search) right = up_page[skip:] up_page = left + up_replace + right else: MY_LOGGER.debug('string NOT found') else: MY_LOGGER.debug('%s NOT found', up_search) # MY_LOGGER.debug('%s', up_page) return up_page def fix_img(fi_page, fi_path, fi_filename): """fix up the img tags for lightbox""" MY_LOGGER.debug('starting fix_img %s %s', fi_path, fi_filename) start_tag = '<a href=\"images/' mid_tag = '\"><img src=\"images/' end_tag = '\"></a>' parse_pos = 0 new_page = '' img_pos = fi_path.find('/wxcapture/') img_path = fi_path[img_pos:] + 'images/' MY_LOGGER.debug('img_path = %s', img_path) while parse_pos >= 0: if fi_page.find(start_tag, parse_pos) > 0: first_pos_left = fi_page.find(start_tag, parse_pos) + len(start_tag) first_pos_right = fi_page.find(mid_tag, first_pos_left) second_pos_left = first_pos_right + len(mid_tag) second_pos_right = fi_page.find(end_tag, second_pos_left) main_img = fi_page[first_pos_left:first_pos_right] thumb_img = fi_page[second_pos_left:(second_pos_right)] bits = main_img[:10].split('-') img_path = CONFIG_INFO['Link Base'] + bits[0] + '/' + bits[ 1] + '/' + bits[2] + '/images/' MY_LOGGER.debug('%d %s %s %s', parse_pos, main_img, thumb_img, img_path) new_bit_1 = fi_page[parse_pos:(first_pos_left - len(start_tag))] new_bit_2 = '<a class=\"example-image-link\" href=\"' + img_path + \ main_img+ \ '\" data-lightbox=\"' + img_path + main_img + \ '\"><img class=\"example-image\" src=\"' + img_path + \ thumb_img + '"></a>' MY_LOGGER.debug('-start-----------------') MY_LOGGER.debug(new_page) MY_LOGGER.debug('-new_bit_1-------------') MY_LOGGER.debug(new_bit_1) MY_LOGGER.debug('-new_bit_2-------------') MY_LOGGER.debug(new_bit_2) MY_LOGGER.debug('-end-------------------') new_page += new_bit_1 + new_bit_2 parse_pos = second_pos_right + len(end_tag) else: # get the rest of the page new_page += fi_page[parse_pos:] parse_pos = -1 MY_LOGGER.debug('completed fix_img') # fix plot reference MY_LOGGER.debug('fix plot reference') new_page = update_page(new_page, '<img src=\"images/', '<img src=\"' + img_path) return new_page def fix_img_iss(fi_page, fi_path, fi_filename): """fix up the img tags only, no lightbox required""" MY_LOGGER.debug('starting fix_img %s %s', fi_path, fi_filename) start_tag = '<img src=\"images/' end_tag = '\">' parse_pos = 0 new_page = '' img_pos = fi_path.find('/wxcapture/') img_path = fi_path[img_pos:] + 'images/' MY_LOGGER.debug('img_path = %s', img_path) while parse_pos >= 0: if fi_page.find(start_tag, parse_pos) > 0: MY_LOGGER.debug('img tag found') pos_left = fi_page.find(start_tag, parse_pos) + len(start_tag) pos_right = fi_page.find(end_tag, pos_left) main_img = fi_page[pos_left:pos_right] bits = main_img[:10].split('-') img_path = CONFIG_INFO['Link Base'] + bits[0] + '/' + bits[ 1] + '/' + bits[2] + '/images/' MY_LOGGER.debug('%d %s %s', parse_pos, main_img, img_path) new_bit_1 = fi_page[parse_pos:(pos_left - len(start_tag))] new_bit_2 = '<img src=\"' + img_path + main_img + '\">' MY_LOGGER.debug('-start-----------------') MY_LOGGER.debug(new_page) MY_LOGGER.debug('-new_bit_1-------------') MY_LOGGER.debug(new_bit_1) MY_LOGGER.debug('-new_bit_2-------------') MY_LOGGER.debug(new_bit_2) MY_LOGGER.debug('-end-------------------') new_page += new_bit_1 + new_bit_2 parse_pos = pos_right + len(end_tag) else: # get the rest of the page new_page += fi_page[parse_pos:] parse_pos = -1 MY_LOGGER.debug('completed fix_img') # # fix plot reference # MY_LOGGER.debug('fix plot reference') # new_page = update_page(new_page, '<img src=\"images/', '<img src=\"' + img_path) return new_page def fix_audio(fi_page, fi_path, fi_filename): """fix up the audio tags only""" MY_LOGGER.debug('starting fix_audio %s %s', fi_path, fi_filename) start_tag = '<a href=\"audio/' end_tag = '\">' parse_pos = 0 new_page = '' audio_pos = fi_path.find('/wxcapture/') audio_path = fi_path[audio_pos:] + 'audio/' MY_LOGGER.debug('audio_path = %s', audio_path) while parse_pos >= 0: if fi_page.find(start_tag, parse_pos) > 0: MY_LOGGER.debug('audio tag found') pos_left = fi_page.find(start_tag, parse_pos) + len(start_tag) pos_right = fi_page.find(end_tag, pos_left) main_audio = fi_page[pos_left:pos_right] bits = main_audio[:10].split('-') audio_path = CONFIG_INFO['Link Base'] + bits[0] + '/' + bits[ 1] + '/' + bits[2] + '/audio/' MY_LOGGER.debug('%d %s %s', parse_pos, main_audio, audio_path) new_bit_1 = fi_page[parse_pos:(pos_left - len(start_tag))] new_bit_2 = '<a href=\"' + audio_path + main_audio + '\">' MY_LOGGER.debug('-start-----------------') MY_LOGGER.debug(new_page) MY_LOGGER.debug('-new_bit_1-------------') MY_LOGGER.debug(new_bit_1) MY_LOGGER.debug('-new_bit_2-------------') MY_LOGGER.debug(new_bit_2) MY_LOGGER.debug('-end-------------------') new_page += new_bit_1 + new_bit_2 parse_pos = pos_right + len(end_tag) else: # get the rest of the page new_page += fi_page[parse_pos:] parse_pos = -1 MY_LOGGER.debug('completed fix_audio') return new_page MY_LOGGER.debug('fix_file %s %s', ff_path, ff_filename) # load config CONFIG_INFO = wxcutils.load_json(CONFIG_PATH, 'config.json') # create page backup file # only if there isn't an existing .backup file (i.e. our backup of the original) if not os.path.isfile(ff_path + ff_filename + '.backup'): MY_LOGGER.debug('no existing backup, so creating page backup file') wxcutils.copy_file(ff_path + ff_filename, ff_path + ff_filename + '.backup') else: MY_LOGGER.debug('File backup exists, so retaining original backup') # restore the backup and re-fix it wxcutils.copy_file(ff_path + ff_filename + '.backup', ff_path + ff_filename) # load file MY_LOGGER.debug('load file') ff_page = wxcutils.load_file(ff_path, ff_filename) # add stylesheets MY_LOGGER.debug('add stylesheets') ff_page = update_page( ff_page, '</head>', '<meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\"><meta name=\"description\" content=\"Satellite pass capture page for NOAA / Meteor / International Space Station (ISS) SSTV / Amsat (Amateur Satellites)\"><meta name=\"keywords\" content=\"wxcapture, weather, satellite, NOAA, Meteor, images, ISS, Zarya, SSTV, Amsat, orbit, APT, LRPT, SDR, Mike, KiwiinNZ, Predictions, Auckland, New Zealand, storm, cyclone, hurricane, front, rain, wind, cloud\"><meta name=\"author\" content=\"WxCapture\"><link rel=\"stylesheet\" href=\"/css/styles.css\"><link rel=\"stylesheet\" href=\"/lightbox/css/lightbox.min.css\"></head>' ) # add script code MY_LOGGER.debug('add script code') ff_page = update_page( ff_page, '</body>', '<script src=\"/lightbox/js/lightbox-plus-jquery.min.js\"></script></body>' ) # remove table start MY_LOGGER.debug('remove table start') ff_page = update_page(ff_page, '</h2><table><tr><td>', '</h2>') # remove table end MY_LOGGER.debug('remove table end') ff_page = update_page(ff_page, '</td><td></table>', '<br>') # remove table border MY_LOGGER.debug('remove table border') ff_page = update_page(ff_page, '<table border = 1>', '<table>') # fix audio link - amsat only if 'ISS' in ff_filename or 'SSTV' in ff_filename or 'SAUDISAT' in ff_filename or 'FOX' in ff_filename: ff_page = fix_audio(ff_page, ff_path, ff_filename) # update img tags to use lightbox if 'ISS' in ff_filename or 'SSTV' in ff_filename: ff_page = fix_img_iss(ff_page, ff_path, ff_filename) else: ff_page = fix_img(ff_page, ff_path, ff_filename) # MY_LOGGER.debug('%s', ff_page) # save file wxcutils.save_file(ff_path, ff_filename, ff_page)