예제 #1
0
def sat_validation():
    """validate satellite info"""

    sat_servers = wxcutils.load_json(CONFIG_PATH, 'sat-servers.json')

    result = []
    for sat_server in sat_servers:
        MY_LOGGER.debug('Processing %s', sat_server)
        address = 'tcp://' + sat_server['ip'] + ':' + sat_server['port']

        sub0 = Sub0(dial=address, recv_timeout=100, topics="")

        # make sure everyone is connected
        time.sleep(0.1)

        retry_counter = 1
        op = 'unset'
        while retry_counter <= 10:
            try:
                op = json.loads(sub0.recv().decode("utf-8"))
                result.append({
                    'timestamp': op['timestamp'],
                    'skipped_symbols': op['skipped_symbols'],
                    'viterbi_errors': op['viterbi_errors'],
                    'reed_solomon_errors': op['reed_solomon_errors'],
                    'ok': 'Locked' if op['ok'] else 'Unlocked',
                    'label': sat_server['label'],
                    'when': str(time.time())
                })
                break
            except:
                MY_LOGGER.debug('Attempt %d', retry_counter)
                MY_LOGGER.debug(
                    'Unexpected error connecting to %s : 0 %s 1 %s 2 %s',
                    address,
                    sys.exc_info()[0],
                    sys.exc_info()[1],
                    sys.exc_info()[2])
                retry_counter += 1
                MY_LOGGER.debug('Sleeping 2 seconds...')
                time.sleep(2)

        MY_LOGGER.debug('op %s', op)

    MY_LOGGER.debug('result = %s', result)

    # close the socket now we've finished with it
    sub0.close()

    wxcutils.save_json(OUTPUT_PATH, 'satellite-receivers.json', result)
예제 #2
0
def build_pass_json():
    """build json file for all passes"""
    MY_LOGGER.debug('building pass json')
    json_data = []
    for filename in find_files(TARGET, '*.html'):
        if filename.split(
                TARGET
        )[1][:
             2] == '20' and 'captures' not in filename and 'meteor' not in filename and 'noaa' not in filename:
            # MY_LOGGER.debug('found pass page - filename = %s', filename)
            bpj_file_path, html_file = os.path.split(filename)
            base_filename, base_extension = os.path.splitext(html_file)
            filename_root = filename[:len(filename) - len(base_extension)]
            # look for all the image files and add to the list
            # to avoid the json file getting too large, extract the enhancement part only
            image_files = glob.glob(bpj_file_path + '/images/' +
                                    base_filename + '*.jpg')
            image_enhancements = []
            for entry in image_files:
                if entry[len(entry) - 7:] != '-tn.jpg':
                    result = entry.replace('.jpg', '').replace(
                        bpj_file_path + '/images/',
                        '').replace(base_filename, '')
                    image_enhancements.append(result[1:])

            json_data.append({
                'path': filename_root.replace(TARGET, ''),
                'enhancement': image_enhancements
            })
            # build data for catures pages
            # MY_LOGGER.debug('filename_root = %s', filename_root.replace(TARGET, '')[11:30])
            local_sort = wxcutils.epoch_to_local(
                wxcutils.utc_to_epoch(
                    filename_root.replace(TARGET, '')[11:30],
                    '%Y-%m-%d-%H-%M-%S'), '%Y-%m-%d-%H-%M-%S')
            # MY_LOGGER.debug('local = %s', local)
            ALL_PASSES.append({
                'path': filename_root.replace(TARGET, ''),
                'local sort': local_sort,
                'local year': local_sort[:4],
                'local month': local_sort[5:7],
                'local day': local_sort[8:10],
                'local time': local_sort[11:19]
            })
    MY_LOGGER.debug('saving passses.json')
    wxcutils.save_json(TARGET, 'passes.json', json_data)
예제 #3
0
                # create thumbnail
                if extenstion != '.txt':
                    create_thumbnail(directory, extenstion)

                # create file with date time info
                if directory != 'ANT':
                    wxcutils.save_file(OUTPUT_PATH, directory + '.txt',
                                       date_time)
                else:
                    wxcutils.save_file(OUTPUT_PATH, 'ANT.txt.txt', date_time)
        else:
            MY_LOGGER.debug('File unchanged')

    # save latest times data
    wxcutils.save_json(OUTPUT_PATH, 'gk2a_info.json', latest_timestamps)

    # rsync files to servers
    wxcutils.run_cmd('rsync -rtPv ' + OUTPUT_PATH +
                     ' [email protected]:/home/mike/wxcapture/gk-2a')
    wxcutils.run_cmd(
        'rsync -rtPv ' + base_dir +
        ' --exclude *_sanchez* --exclude *web* [email protected]:/home/pi/goes/gk-2a'
    )

else:
    MY_LOGGER.debug('Another instance of find_files.py is already running')
    MY_LOGGER.debug(
        'Skip running this instance to allow the existing one to complete')

# except:
예제 #4
0
FILE_BASE = '/home/pi/goes/'
MY_LOGGER.debug('FILE_BASE = %s', FILE_BASE)

# test for network connectivity
for key, value in NETCONFIG.items():
    if key == 'addresses':
        for nc in NETCONFIG[key]:
            if nc['Active'] == 'yes':
                MY_LOGGER.debug('-' * 20)
                MY_LOGGER.debug(nc)
                # need to fix updating the NETCONFIG part!
                nc['status'] = test_connection(nc, NETCONFIG['attempt'],
                                               NETCONFIG['timeout'])
                nc['when'] = time.time()
wxcutils.save_json(OUTPUT_PATH, 'network.json', NETCONFIG)

# test if goesproc is running or processing
if not is_running('goesproc') or not is_processing('goesproc', 10):
    # need to kick off the code
    MY_LOGGER.debug('Kicking it off')
    wxcutils.run_cmd(
        'goesproc -c /usr/share/goestools/goesproc-goesr.conf -m packet ' +
        '--subscribe tcp://203.86.195.49:5004 --out /home/pi/goes &')
    if is_running('goesproc'):
        MY_LOGGER.debug('goesproc is now running')
    else:
        MY_LOGGER.critical(
            'goesproc is NOT running and could not be restarted')

# log drive space free to file
예제 #5
0
MY_LOGGER.debug('station = %s', STATION)

# load current master
MASTER = wxcutils.load_json(WORKING_PATH, 'master.json')

# load new set
NEW = wxcutils.load_json(WORKING_PATH, STATION + '-filefound.json')

# find what is in the master but not in the new one
# DELTA = [x for x in MASTER + NEW if x not in MASTER or x not in NEW]
DELTA = [_dict for _dict in NEW if _dict not in MASTER]
NUM_DIFFERENCES = len(DELTA)
MY_LOGGER.debug('Number of differences = %d', NUM_DIFFERENCES)

# save out request from station list
wxcutils.save_json(WORKING_PATH, STATION + '-filerequest.json', DELTA)

if NUM_DIFFERENCES > 0:
    KEYS = DELTA[0].KEYS()
    with open(WORKING_PATH + STATION + '-filerequest.csv', 'w', newline='')  as output_file:
        DICT_WRITER = csv.DictWriter(output_file, KEYS)
        DICT_WRITER.writeheader()
        DICT_WRITER.writerows(DELTA)
else:
    MY_LOGGER.debug('No differences to write to a csv file, writing empty file')
    wxcutils.save_file(WORKING_PATH, STATION + '-filerequest.csv', '')

# create zip command
CMD = 'zip ' + STATION + '-GK-2a.zip '
for line in DELTA:
    CMD += ' ' + BASE_DIR + line['date'] + '/' + line['type'] + '/' + \
예제 #6
0
TYPES = []

# find all date directories
MY_LOGGER.debug('Finding date directories')
DATES = find_directories(base_dir)

# iterate through all the dates
MY_LOGGER.debug('Iterate through all files')
iterate_dirctories()

# find the max file counter for an image type
MY_LOGGER.debug('Find max counter per image type')
counter_max = dict()
find_max_values()

MISSING = []
MY_LOGGER.debug('Find missing')
# iterate through days / types and look for missing files
# need to go through all days in date range to catch full missing days
find_missing()

# save out data
MY_LOGGER.debug('Save out results')
wxcutils.save_json(WORKING_PATH, station + '-filefound.json', FILES)
wxcutils.save_json(WORKING_PATH, station + '-filemissing.json', MISSING)
save_csv(WORKING_PATH, station + '-filefound.csv', FILES)
save_csv(WORKING_PATH, station + '-filemissing.csv', MISSING)

MY_LOGGER.debug('Execution end')
MY_LOGGER.debug('-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+')
예제 #7
0
파일: web.py 프로젝트: wxcapture/wxcapture
    MY_LOGGER.debug('Loading config')
    SATELLITE_INFO = wxcutils.load_json(CONFIG_PATH, 'web.json')

    # loop through active satellites
    for key, value in SATELLITE_INFO.items():
        MY_LOGGER.debug('key = %s, value = %s', key, value)
        for si in SATELLITE_INFO[key]:
            if si['Active'] == 'yes':
                MY_LOGGER.debug('-' * 20)
                MY_LOGGER.debug(si)
                try:
                    proccess_satellite(si)
                except:
                    MY_LOGGER.debug('Exception whilst processing satellite %s',
                                    si['Name'])
                    MY_LOGGER.error('Loop exception handler: %s %s %s',
                                    sys.exc_info()[0],
                                    sys.exc_info()[1],
                                    sys.exc_info()[2])

    # save updated config
    wxcutils.save_json(CONFIG_PATH, 'web.json', SATELLITE_INFO)

else:
    MY_LOGGER.debug('Another instance of web.py is already running')
    MY_LOGGER.debug(
        'Skip running this instance to allow the existing one to complete')

MY_LOGGER.debug('Execution end')
MY_LOGGER.debug('-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+')
예제 #8
0
                if nc['status'] == 'OK':
                    EMAIL_TEXT3 += 'OK - network connectivity is good' + ' - '
                    EMAIL_HTML3 += '<td>Good connectivity</td><td>' + \
                        wxcutils.epoch_to_local(nc['when'], '%m/%d/%Y %H:%M') + '</td></tr>' + NEWLINE
                else:
                    EMAIL_TEXT3 = 'Error - network connecitivity issue - ' + nc[
                        'status'] + ''
                    EMAIL_HTML3 += '<td>' + nc['status'] + '</td><td>' + \
                        wxcutils.epoch_to_local(nc['when'], '%m/%d/%Y %H:%M') + '</td></tr>' + NEWLINE

MY_LOGGER.debug('HTML = ' + EMAIL_HTML3)
MY_LOGGER.debug('txt = ' + EMAIL_TEXT3)

# save last
wxcutils.save_json(CONFIG_PATH, 'network.json', LATESTNETWORK)

# validate satellite status
MY_LOGGER.debug('-' * 20)
LATESTSATSTATUS = wxcutils.load_json(WEB_PATH + 'gk-2a/',
                                     'satellite-receivers.json')
PREVIOUSSATSTATUS = wxcutils.load_json(CONFIG_PATH, 'satellite-receivers.json')

EMAIL_TEXT4 = ''
EMAIL_HTML4 = ''
PREVIOUS = ''

for si1 in LATESTSATSTATUS:
    MY_LOGGER.debug('-' * 20)
    MY_LOGGER.debug('Processing - %s', si1['label'])
    MY_LOGGER.debug('si1 %s', si1)
예제 #9
0
    process_overlaps()

    # sort data
    MY_LOGGER.debug('Sort passes')
    SAT_DATA = sorted(SAT_DATA, key=lambda k: k['time'])

    # schedule
    MY_LOGGER.debug('AT scheduling')
    for elem in SAT_DATA:
        try:
            if elem['scheduler'] != '':
                if time.time() < elem['time']:
                    MY_LOGGER.debug('>>>>>>>> %s', elem['scheduler'])
                    wxcutils.run_cmd(elem['scheduler'])
                    elem['timezone'] = LOCAL_TIME_ZONE
                    wxcutils.save_json(OUTPUT_PATH,
                                       elem['filename_base'] + '.json', elem)
                    MY_LOGGER.debug('pass json = %s', elem)
                else:
                    MY_LOGGER.debug(
                        '%s - not scheduled due to being in the past',
                        elem['satellite'])
            else:
                MY_LOGGER.debug('%s removed - active but not being recorded',
                                elem['satellite'])
        except ValueError:
            MY_LOGGER.debug(
                'when must be at a time in the future, never in the past - can ignore'
            )
    wxcutils.save_json(WORKING_PATH, 'passes_today.json', SAT_DATA)

    # find satellite pass for next few days
예제 #10
0
                        # end of month, go up a directory
                        ftp.cwd('..')
                        MY_LOGGER.debug('current directory = %s', ftp.pwd())
                else:
                    MY_LOGGER.debug('invalid month = %s', month)

            # end of year, go up a directory
            ftp.cwd('..')
            MY_LOGGER.debug('current directory = %s', ftp.pwd())

    # close connection
    ftp.quit()

    # update config file with latest processed info
    # only if files were processed
    if last_year:
        CONFIG_INFO['Last Year'] = last_year
        CONFIG_INFO['Last Month'] = last_month
        CONFIG_INFO['Last Day'] = last_day
        CONFIG_INFO['Last Time'] = last_time
        wxcutils.save_json(CONFIG_PATH, 'electro.json', CONFIG_INFO)

else:
    MY_LOGGER.debug('Another instance of electro-l-2.py is already running')
    MY_LOGGER.debug(
        'Skip running this instance to allow the existing one to complete')

MY_LOGGER.debug('Execution end')
MY_LOGGER.debug('-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+')
예제 #11
0
def save_last_backup_data():
    """dave last backup data"""
    wxcutils.save_json(CONFIG_PATH, 'last_backup.json', LAST_BACKUP_DATA)
예제 #12
0
파일: sync.py 프로젝트: wxcapture/wxcapture
def process_file(pf_file_name):
    """process a file, moving files to remote server
    using rynch and performing locking"""


    def do_sync(ds_source, ds_destination):
        """synch the file over"""
        MY_LOGGER.debug('rsync %s %s %s', '-avz', ds_source, ds_destination)
        pf_cmd = Popen(['rsync', '-avz', ds_source, ds_destination], stdout=PIPE, stderr=PIPE)
        pf_stdout, pf_stderr = pf_cmd.communicate()
        pf_stdout = pf_stdout.decode('utf-8')
        pf_stderr = pf_stderr.decode('utf-8')
        MY_LOGGER.debug('stdout:%s', pf_stdout)
        if pf_stderr == '':
            MY_LOGGER.debug('rsync successful')
            return True
        MY_LOGGER.debug('rsync error = %s', pf_stderr)
        return False


    # load the queue file
    pf_file_data = wxcutils.load_json(QUEUE_PATH, pf_file_name)

    # recover the lock_id
    pf_lock_id = pf_file_data['lock']
    MY_LOGGER.debug('pf_lock_id = %s', pf_lock_id)

    # iterate through the files
    for pf_file in pf_file_data['files']:
        if pf_file['copied'] == 'no':
            MY_LOGGER.debug('To copy - %s %s %s %s', pf_file['source path'],
                            pf_file['source file'], pf_file['destination path'],
                            pf_file['copied'])
            pf_result = do_sync(pf_file['source path'] + '/' + pf_file['source file'],
                                RSYNC_CONFIG['remote user'] + '@' + RSYNC_CONFIG['remote host'] + \
                                    ':' + RSYNC_CONFIG['remote directory'] + '/' + \
                                        pf_file['destination path'] + '/' + \
                                            pf_file['source file'] + '.LOCK.' + pf_lock_id)
            if pf_result:
                pf_file['copied'] = 'yes'

    # check if any files left to be copied
    pf_files_to_copy = False
    for pf_file in pf_file_data['files']:
        if pf_file['copied'] == 'no':
            pf_files_to_copy = True
            break
    if pf_files_to_copy:
        MY_LOGGER.debug('Files still to copy')
        MY_LOGGER.debug('Work still to be done, save file for future processing')
        wxcutils.save_json(QUEUE_PATH, pf_file_name, pf_file_data)
    else:
        MY_LOGGER.debug('All files copied over, copy the unlock over')
        pf_unlock_file = pf_lock_id  + '.UNLOCK'
        wxcutils.run_cmd('touch ' + QUEUE_PATH + pf_unlock_file)
        pf_result = do_sync(QUEUE_PATH + pf_unlock_file,
                            RSYNC_CONFIG['remote user'] + '@' + RSYNC_CONFIG['remote host'] + \
                                ':' + RSYNC_CONFIG['remote directory'] + '/' + pf_unlock_file)
        if pf_result:
            MY_LOGGER.debug('lock file copied over successfully')
            wxcutils.run_cmd('rm ' + QUEUE_PATH + pf_unlock_file)
            wxcutils.save_json(QUEUE_PATH, pf_file_name, pf_file_data)
            wxcutils.run_cmd('rm ' + QUEUE_PATH + pf_file_name)
        else:
            MY_LOGGER.debug('Work still to be done, save file for future processing')
            wxcutils.save_json(QUEUE_PATH, pf_file_name, pf_file_data)
예제 #13
0
                    # can now delete the original image to save space
                    wxcutils.run_cmd('rm ' + file_location + filename)

                    # save a thumbnail of a channel 1 image to send to webserver
                    if filename[4] == '1':
                        cmd = 'vips resize ' + file_location + filename.replace(
                            '.png', '.jpg'
                        ) + ' ' + OUTPUT_PATH + 'ews-g1-1.jpg' + ' 0.1843'
                        MY_LOGGER.debug('cmd %s', cmd)
                        wxcutils.run_cmd(cmd)

                        # create file with date time info
                        MY_LOGGER.debug('Writing out last generated date file')
                        wxcutils.save_file(
                            OUTPUT_PATH, 'ews-g1-1.txt',
                            get_last_generated_text(
                                filename.replace('.png', '.jpg')))
                else:
                    MY_LOGGER.debug('File already exists')

        last_read = directory_datetime

# update config file with latest directory
# only if directories were processed
if last_read:
    CONFIG_INFO['Last Directory'] = last_read
    wxcutils.save_json(CONFIG_PATH, 'ews-g1.json', CONFIG_INFO)

MY_LOGGER.debug('Execution end')
MY_LOGGER.debug('-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+')