def merge_vcfs(output_dirpath, sampleID, raw_vcf_fpaths, ref_fpath): merge_vcf_fpath = os.path.join(output_dirpath, sampleID + '.g.vcf') log_fpath = os.path.join(output_dirpath, sampleID + '.log') variants = '-V ' + ' -V '.join(raw_vcf_fpaths) variants = variants.split() cmd = ['java', '-cp', gatk_fpath, 'org.broadinstitute.gatk.tools.CatVariants', '-R', ref_fpath, '-assumeSorted', '-out', merge_vcf_fpath] utils.call_subprocess(cmd + variants, stderr=open(log_fpath, 'a')) return merge_vcf_fpath
def process_single_chunk(ref_fpath, sampleID, bam_fpath, scratch_dirpath, log_fpath, chr, part, start, end): mem_gb = config.max_single_gatk_mem raw_g_vcf_fpath = os.path.join(scratch_dirpath, sampleID + chr + '.' + str(part) + '.g.vcf') chr_chunk = chr + ':' + str(start) + '-' + str(end) cmd = ['java', '-Xmx%sg' % str(mem_gb), '-jar', gatk_fpath, '-T', 'HaplotypeCaller', '-R', ref_fpath, '-L', chr_chunk, '-I', bam_fpath, '-ERC', 'GVCF', '-variant_index_type', 'LINEAR', '-variant_index_parameter', '128000', '-o', raw_g_vcf_fpath] if not reduced_workflow: recaltable_fpath = os.path.join(scratch_dirpath, sampleID + '.table') cmd += ['-BQSR', recaltable_fpath, '--dbsnp', dbsnp_fpath] utils.call_subprocess(cmd, stderr=open(log_fpath, 'a')) return raw_g_vcf_fpath
def _mount_drive(self): """If not already mounted, mount the USB flash drive. Return False if failed. """ # If the /mnt/usbflash/data dir does not exist, # try to mount sda1, sdb1 or sdc1. The flash drive # could be on any of those devices. if utils.path_exists(usb_mgr_config.usb_flash_data_dir): return True dev_a = usb_mgr_config.usb_flash_device_a dev_b = usb_mgr_config.usb_flash_device_b dev_c = usb_mgr_config.usb_flash_device_c mount_pt = usb_mgr_config.usb_flash_dir utils.call_subprocess('umount %s' % mount_pt) utils.call_subprocess('mount %s %s' % (dev_a, mount_pt)) if utils.path_exists(usb_mgr_config.usb_flash_data_dir): self._log.info('Mounted USB device %s' % dev_a) return True utils.call_subprocess('mount %s %s' % (dev_b, mount_pt)) if utils.path_exists(usb_mgr_config.usb_flash_data_dir): self._log.info('Mounted USB device %s' % dev_b) return True utils.call_subprocess('mount %s %s' % (dev_c, mount_pt)) if utils.path_exists(usb_mgr_config.usb_flash_data_dir): self._log.info('Mounted USB device %s' % dev_c) return True return False
def _shutdown_super(controllers, log): # For debugging only. Save selected logs to flash. utils.call_subprocess('mkdir -p /aal-pip/field/reboot_logs') #utils.call_subprocess('cp /var/log/cases_mgr.log /aal-pip/field/reboot_logs', log) #utils.call_subprocess('cp /var/log/fg_mgr.log /aal-pip/field/reboot_logs', log) #utils.call_subprocess('cp /var/log/gps_mgr.log /aal-pip/field/reboot_logs', log) utils.call_subprocess('cp /var/log/hw_mgr.log /aal-pip/field/reboot_logs', log) #utils.call_subprocess('cp /var/log/modem_svr.log /aal-pip/field/reboot_logs', log) #utils.call_subprocess('cp /var/log/sc_mgr.log /aal-pip/field/reboot_logs', log) utils.call_subprocess('cp /var/log/super.log /aal-pip/field/reboot_logs', log) #utils.call_subprocess('cp /var/log/svr_proxy.log /aal-pip/field/reboot_logs', log) #utils.call_subprocess('cp /var/log/usb_mgr.log /aal-pip/field/reboot_logs', log) utils.call_subprocess( 'cp /var/log/cpu_load_wd.log /aal-pip/field/reboot_logs', log) c = controllers c.store_hskp.stop() c.control_ethernet_power.stop() c.refresh_status.stop() c.control_cases_power.stop() c.control_gps_power.stop() c.control_temp.stop() c.control_fgsc_power.stop() c.control_hf_power.stop() c.control_modem_power.stop() c.monitor_rudics_comm.stop()
def refresh_sys_time_status(log): """Update the system time status""" global status global subprocess_lock subprocess_lock.acquire() [time_stat, stderr] = utils.call_subprocess('cat /proc/gps_pps') subprocess_lock.release() # time_stat looks like: # Sync Age,Sys Time Error,Lat,Long # 1284036102,0.0,0.0,0.0 # or: # Sync Age,Sys Time Error,Lat,Long # 60,-0.996319,4217.6544,-08342.6943 status['sync_age'] = 0 status['sys_time_error'] = 0.0 status['lat'] = 0.0 status['long'] = 0.0 lines = time_stat.split('\n') if len(lines) < 2: # gps_pps driver not running return if lines[0].split(',')[0] != 'Sync Age': return fields = lines[1].split(',') status['sync_age'] = int(fields[0]) status['sys_time_error'] = float(fields[1]) status['lat'] = garmin_to_degs(float(fields[2])) status['long'] = garmin_to_degs(float(fields[3])) # Iridium Time Fix JAN2018 if status['sync_age'] > 10000: status['sync_age'] = utils.get_iridium_time()
def _init_adc_params(log): """Get the ADC offset and gain for this TS-7260 board """ global adc_offset global adc_gain global subprocess_lock cpu_sn = '?' subprocess_lock.acquire() [s, stderr] = utils.call_subprocess('cat /proc/cpuinfo') subprocess_lock.release() #log.debug('s = %s' % s) lines = s.split('\n') for line in lines: #log.debug('line = %s' % line) if line.find('Serial') != -1: fields = line.split() #log.debug('fields = %s' % repr(fields)) cpu_sn = fields[2] break if cpu_sn in hw_mgr_config.adc_params: adc_offset = hw_mgr_config.adc_params[cpu_sn][0] adc_gain = hw_mgr_config.adc_params[cpu_sn][1] else: log.error('This CPU serial number is not in hw_mgr_config.py') adc_offset = 0.0 adc_gain = 1.0 log.info('cpu_sn = %s' % repr(cpu_sn)) log.info('adc_offset = %s' % str(adc_offset)) log.info('adc_gain = %s' % str(adc_gain))
def refresh_uptime_status(log): """Update uptime, mem usage and CPU load status""" global status global subprocess_lock subprocess_lock.acquire() [uptime, stderr] = utils.call_subprocess('uptime') subprocess_lock.release() status['uptime'] = uptime
def _initModem(self): """Send configuration commands to the modem Return True if all commands were successful """ if not self._serial_port_is_open(): return False self._port.write('\r') utils.wait(1) self.flushBoth() # 'AT&F0=0' = restore factory defaults # 'ATE0' = turn command echo off # 'AT+CBST=71,0,1' = NAL recommended setup cmd # 'AT&S0=1' = auto answer after one ring # 'AT&K3' = enable RTS, CTS # 'ATS12=150' = set the +++ guard time to 150 msec # 'AT&D2 = set DTR mode to 2 (see manual) # 'AT+CSQF = get the RF signal quality # 'AT+CICCID' = get the ICCID from the SIM ret_val = self._sendCmd('AT') and \ self._sendCmd('AT&F0=0') and \ self._sendCmd('ATE0') and \ self._sendCmd('AT+CBST=71,0,1') and \ self._sendCmd('AT&S0=1') and \ self._sendCmd('AT&K3') and \ self._sendCmd('ATS12=150') and \ self._sendCmd('AT&D2') and \ self._sendCmd('AT+CSQF') and \ self._sendCmd('AT+CICCID') # Pull the SIM ICCID out of the most recent # response from the modem. #self._log.debug('ICCID as read from the modem %s:' % self._raw_response) self._iccid = self._strip_out_iccid(self._raw_response) try: self._sendCmd('AT-MSSTM') msstm = self._raw_response.strip() self._log.info('AT%s' % msstm) utils.call_subprocess('echo %s > /tmp/iridium_time' % msstm[8:16]) except: pass return ret_val
def verify_trust_policy_signature(self, tagent_location, policy_location): try: self.log_obj.info('Verifiying trust policy signature ...') poutput = utils.create_subprocess([ tagent_location, ProcessTrustpolicyXML.VERIFY_TRUST_SIG, policy_location ]) utils.call_subprocess(poutput) if poutput.returncode == 0: self.log_obj.debug("Trust policy signature verified.") return True else: self.log_obj.error( "Trust policy signature verification failed.") return False except Exception as e: self.log_obj.exception( "Failed while doing verification of trust policy signature!") raise e
def _get_cpu_serial_num(self): cpu_sn = '?' [s, stderr] = utils.call_subprocess('cat /proc/cpuinfo') #self._log.debug('s = %s' % s) lines = s.split('\n') for line in lines: #self._log.debug('line = %s' % line) if line.find('Serial') != -1: fields = line.split() #self._log.debug('fields = %s' % repr(fields)) cpu_sn = fields[2] break return cpu_sn
def _get_5_min_cpu_load(self): #self._log.info('Entering _get_5_min_cpu_load()') (output, error) = utils.call_subprocess('uptime') #self._log.info(' uptime cmd output is %s ' % output) fields = output.split() num_fields = len(fields) load_index = num_fields - 2 if load_index < 0: self._log.error('uptime parsing error, not enough fields') #self._log.error('Leaving _get_5_min_cpu_load()') return 0.0 load_str = fields[load_index] #self._log.info(' load_str is %s ' % load_str) load_str = load_str.replace(",", "") #self._log.info(' returning %f ' % float(load_str)) #self._log.info('Leaving _get_5_min_cpu_load()') return float(load_str)
def sbcctl_cmd(args, log): """Execute any sbcctl command. Does not return sbcctl console output args is a string of command arguments""" #log.debug('Entering sbcctl_cmd. args = %s' % args) executable = ''.join([global_config.field_bin_dir, '/sbcctl']) exe_and_args = ' '.join([executable, args]) #log.debug(' exe_and_args = %s' % exe_and_args) subprocess_lock.acquire() (con_out, con_err) = utils.call_subprocess(exe_and_args) subprocess_lock.release() if (con_out is None) and (con_err is None): log.error('utils.call_subprocess failed trying to execute: %s' % exe_and_args) #log.debug('Exiting sbcctl_cmd') return if con_err: log.error('Error from %s command: %s' % (exe_and_args, con_err))
def refresh_sbcctl_status(log): """Parse the status info returned by sbcctl and store it in global status dict """ global status executable = ''.join([global_config.field_bin_dir, '/sbcctl']) subprocess_lock.acquire() #log.debug('refresh_sbcctl_status(): Calling utils.call_subprocess') (con_out, con_err) = utils.call_subprocess([executable, 'status']) #log.debug('refresh_sbcctl_status(): Returned from utils.call_subprocess') subprocess_lock.release() if (con_out is None) and (con_err is None): log.error('utils.subprocess failed') return if con_err: log.error('Error from sbcctl status command: %s' % con_err) #log.debug('refresh_sbcctl_status(): starting sbcctl output parsing') lines = con_out.split('\n') status['sys_date'] = get_str(lines[2], 1) status['sys_time'] = get_str(lines[2], 2) status['irid_pwr'] = get_int(lines[6], 2) status['fg_pwr'] = get_int(lines[7], 2) status['sc_pwr'] = get_int(lines[8], 2) status['cases_pwr'] = get_int(lines[9], 2) status['hf_pwr'] = get_int(lines[10], 2) status['htr_pwr'] = get_int(lines[11], 2) status['gps_pwr'] = get_int(lines[12], 2) status['ethernet_pwr'] = get_int(lines[48], 1) status['usb_pwr'] = get_int(lines[49], 1) status['pc104_pwr'] = get_int(lines[50], 1) status['rs232_pwr'] = get_int(lines[51], 1) status['cpu_temp'] = get_float(lines[31], 1) status['router_temp'] = get_router_temp(lines[45], log) status['batt_1_temp'] = get_batt_temp(lines[39], log) status['batt_1_temp_raw_v'] = get_float(lines[39], 1) status['batt_2_temp'] = get_batt_temp(lines[40], log) status['batt_3_temp'] = get_batt_temp(lines[41], log) status['batt_1_volt'] = get_batt_v(lines[42]) status['batt_2_volt'] = get_batt_v(lines[43]) status['batt_3_volt'] = get_batt_v(lines[44]) status['in_current'] = get_input_current(lines[46]) status['in_current_adc'] = get_float(lines[46], 1) status['ovr_cur_status'] = get_int(lines[20], 2) status['ovr_cur_reset'] = get_int(lines[19], 2) status['jumper_2'] = get_int(lines[25], 1) status['jumper_3'] = get_int(lines[26], 1) status['jumper_4'] = get_int(lines[27], 1) status['jumper_5'] = get_int(lines[28], 1) status['jumper_6'] = get_int(lines[29], 1) #log.debug('refresh_sbcctl_status(): finished sbcctl output parsing') # Calculate input power #log.debug('refresh_sbcctl_status(): starting power calculation') V1 = status['batt_1_volt'] V2 = status['batt_2_volt'] V3 = status['batt_3_volt'] I = status['in_current'] V = V1 if V2 > V: V = V2 if V3 > V: V = V3 status['in_power'] = I * V
def process_single_sample(ref_fpath, sample_id, bam_fpath, scratch_dirpath, output_dirpath, num_threads): log_fpath = os.path.join(output_dirpath, sample_id + '.log') final_bam_fpath = os.path.join(scratch_dirpath, sample_id + '.bam') replace_rg_fpath = os.path.join(scratch_dirpath, sample_id + '.temp.bam') bqsr_fpath = os.path.join(scratch_dirpath, sample_id + '.bqsr.bam') targetintervals_fpath = os.path.join(scratch_dirpath, sample_id + '_realignment_targets.list') validate_log_fpath = os.path.join(scratch_dirpath, sample_id + '.validate.txt') mem_gb = str(config.max_memory) ignored_errors = ['IGNORE=%s' % error for error in ignored_errors_in_bam] cmd = ['java', '-Xmx%sg' % mem_gb, '-jar', config.picard_fpath, 'ValidateSamFile', 'INPUT=%s' % bam_fpath, 'OUTPUT=%s' % validate_log_fpath, 'IGNORE_WARNINGS=true', 'MAX_OUTPUT=1'] + ignored_errors is_corrupted_file = utils.call_subprocess(cmd, stderr=open(log_fpath, 'a')) if is_corrupted_file: new_bam_fpath = os.path.join(scratch_dirpath, sample_id + '_sort.bam') utils.call_subprocess(['java', '-Xmx%sg' % mem_gb, '-jar', config.picard_fpath, 'SortSam', 'INPUT=%s' % bam_fpath, 'OUTPUT=%s' % new_bam_fpath, 'TMP_DIR=%s' % config.picard_tmp_dirpath, 'SORT_ORDER=coordinate', 'VALIDATION_STRINGENCY=LENIENT', 'CREATE_INDEX=true'], stderr=open(log_fpath, 'a')) utils.call_subprocess(['java', '-Xmx%sg' % mem_gb, '-jar', config.picard_fpath, 'AddOrReplaceReadGroups', 'INPUT=%s' % new_bam_fpath, 'OUTPUT=%s' % replace_rg_fpath, 'TMP_DIR=%s' % config.picard_tmp_dirpath, 'RGPL=illumina', 'RGSM=%s' % sample_id, 'RGLB=lib', 'RGPU=adapter', 'VALIDATION_STRINGENCY=LENIENT', 'CREATE_INDEX=true'], stderr=open(log_fpath, 'a')) bam_fpath = replace_rg_fpath print 'Realigning indels...' cmd = ['java', '-Xmx%sg' % mem_gb, '-jar', config.gatk_fpath, '-T', 'RealignerTargetCreator', '-R', ref_fpath, '-nt', num_threads, '-I', bam_fpath, '-o', targetintervals_fpath] if not config.reduced_workflow: cmd += ['-known', config.gold_indels_fpath, '-known', config.tg_indels_fpath] utils.call_subprocess(cmd, stderr=open(log_fpath, 'a')) cmd = ['java', '-Xmx%sg' % mem_gb, '-jar', config.gatk_fpath, '-T', 'IndelRealigner', '-R', ref_fpath, '-I', bam_fpath, '-targetIntervals', targetintervals_fpath, '-o', final_bam_fpath] if not config.reduced_workflow: cmd += ['-known', config.gold_indels_fpath, '-known', config.tg_indels_fpath] utils.call_subprocess(cmd, stderr=open(log_fpath, 'a')) if not config.reduced_workflow: print 'Recalibrating bases...' recaltable_fpath = os.path.join(scratch_dirpath, sample_id + '.table') utils.call_subprocess(['java', '-Xmx%sg' % mem_gb, '-jar', config.gatk_fpath, '-T', 'BaseRecalibrator', '-R', ref_fpath, '-nct', num_threads, '-I', final_bam_fpath, '-knownSites', config.dbsnp_fpath, '-dt', 'ALL_READS', '-dfrac', '0.10 ', '-o', recaltable_fpath], stderr=open(log_fpath, 'a')) utils.call_subprocess(['java', '-Xmx%sg' % mem_gb, '-jar', config.gatk_fpath, '-T', 'PrintReads', '-R', ref_fpath, '-nct', num_threads, '-I', final_bam_fpath, '-BQSR', recaltable_fpath, '-o', bqsr_fpath], stderr=open(log_fpath, 'a')) print 'Building BAM index...' utils.call_subprocess(['java', '-Xmx%sg' % mem_gb, '-jar', config.picard_fpath, 'BuildBamIndex', 'INPUT=%s' % final_bam_fpath, 'OUTPUT=%s' % final_bam_fpath + '.bai', 'TMP_DIR=%s' % config.picard_tmp_dirpath, 'VALIDATION_STRINGENCY=LENIENT'], stderr=open(log_fpath, 'a')) return final_bam_fpath
import utils import global_config if __name__ == '__main__': """ Start all the aal-pip processes""" processes = [ "cpu_load_wd.py", "file_svr.py", "super.py", "svr_proxy.py", "modem_svr.py", "hf_mgr.py", "sc_mgr.py", "fg_mgr.py", "cases_mgr.py", "usb_mgr.py", "gps_mgr", "hw_mgr.py" ] print('AAL-PIP software version %s' % global_config.sw_version_number) for process in reversed(processes): if utils.process_is_running(process): print '%s is already running' % process continue print 'Starting %s' % process utils.start_process(process) utils.wait(2.0) for i in range(3): if utils.path_exists('/dev/sda1'): if not utils.path_exists('/mnt/usbflash'): utils.call_subprocess('mkdir -p /mnt/usbflash') utils.call_subprocess('mount /dev/sda1 /mnt/usbflash') if not utils.path_exists('/mnt/usbflash/data'): utils.call_subprocess('mkdir -p /mnt/usbflash/data') break utils.wait(2)
def process_files(ref_fpath, sample_ids, bam_fpaths, scratch_dirpath, output_dirpath, project_id, sample_files, sample_names): log_fpath = os.path.join(output_dirpath, project_id + '.log') num_threads = str(config.max_threads) print 'Calling variants...' raw_vcf_fpaths = [process_single_file(ref_fpath, sample_ids[i], bam_fpaths[i], output_dirpath, scratch_dirpath) for i in range(len(bam_fpaths))] n_jobs = min(len(raw_vcf_fpaths), config.max_threads) g_vcf_fpaths = Parallel(n_jobs=n_jobs)(delayed(merge_vcfs)(output_dirpath, sample_ids[i], raw_vcf_fpaths[i], ref_fpath) for i in range(len(raw_vcf_fpaths))) raw_vcf_fpath = os.path.join(scratch_dirpath, project_id + '.raw.vcf') vcf_fpath = os.path.join(output_dirpath, project_id + '.vcf') if reduced_workflow: raw_vcf_fpath = vcf_fpath print 'Joint genotyping...' variants = ['-V %s' % g_vcf_fpaths[i] for i in range(len(g_vcf_fpaths))] variants = (' '.join(variants)).split() cmd = ['java', '-jar', gatk_fpath, '-T', 'GenotypeGVCFs', '-R', ref_fpath, '-nt', num_threads, '-o', raw_vcf_fpath, '-stand_call_conf', config.low_call_conf if config.low_emit else config.stand_call_conf, '-stand_emit_conf', config.low_emit_conf if config.low_emit else config.stand_emit_conf] utils.call_subprocess(cmd + variants, stderr=open(log_fpath, 'a')) if not reduced_workflow: print 'Filtering variants...' mem_gb = str(config.max_memory) recal_fpath = os.path.join(scratch_dirpath, project_id + '_SNP.recal') tranches_fpath = os.path.join(scratch_dirpath, project_id + '_SNP.tranches') raw_indels_vcf_fpath = os.path.join(scratch_dirpath, project_id + '_raw_indels.vcf') recal_indel_fpath = os.path.join(scratch_dirpath, project_id + '_INDEL.recal') tranches_indel_fpath = os.path.join(scratch_dirpath, project_id + '_INDEL.tranches') # variant filtering return_code = utils.call_subprocess( ['java', '-Xmx%sg' % mem_gb, '-jar', gatk_fpath, '-T', 'VariantRecalibrator', '-R', ref_fpath, '-input', raw_vcf_fpath, '-resource:hapmap,known=false,training=true,truth=true,prior=15.0', hapmap_fpath, '-resource:omni,known=false,training=true,truth=true,prior=12.0', omni_fpath, '-resource:1000G,known=false,training=true,truth=false,prior=10.0', tg_indels_fpath, '-resource:dbsnp,known=true,training=false,truth=false,prior=2.0', dbsnp_fpath, '-an', 'DP', '-an', 'QD', '-an', 'FS', '-an', 'MQRankSum', '-an', 'ReadPosRankSum', '-mode', 'SNP', '-recalFile', recal_fpath, '-tranchesFile', tranches_fpath], stderr=open(log_fpath, 'a')) if return_code != 0: print_variant_filtering_warning(raw_vcf_fpath, vcf_fpath) else: utils.call_subprocess( ['java', '-Xmx%sg' % mem_gb, '-jar', gatk_fpath, '-T', 'ApplyRecalibration', '-R', ref_fpath, '-input', raw_vcf_fpath, '-mode', 'SNP', '--ts_filter_level', '99.5', '-recalFile', recal_fpath, '-tranchesFile', tranches_fpath, '-o', raw_indels_vcf_fpath], stderr=open(log_fpath, 'a')) return_code = utils.call_subprocess( ['java', '-Xmx%sg' % mem_gb, '-jar', gatk_fpath, '-T', 'VariantRecalibrator', '-R', ref_fpath, '-input', raw_indels_vcf_fpath, '-resource:mills,known=true,training=true,truth=true,prior=12.0', mills_fpath, '-resource:dbsnp,known=true,training=false,truth=false,prior=2.0', dbsnp_fpath, '-an', 'DP', '-an', 'QD', '-an', 'FS', '-an', 'MQRankSum', '-an', 'ReadPosRankSum', '-mode', 'INDEL', '--maxGaussians', '4', '-recalFile', recal_indel_fpath, '-tranchesFile', tranches_indel_fpath], stderr=open(log_fpath, 'a')) if return_code != 0: print_variant_filtering_warning(raw_vcf_fpath, vcf_fpath) else: utils.call_subprocess( ['java', '-Xmx%sg' % mem_gb, '-jar', gatk_fpath, '-T', 'ApplyRecalibration', '-R', ref_fpath, '-input', raw_indels_vcf_fpath, '-mode', 'INDEL', '--ts_filter_level', '99.0', '-recalFile', recal_indel_fpath, '-tranchesFile', tranches_indel_fpath, '-o', vcf_fpath], stderr=open(log_fpath, 'a')) report_vars_fpath = os.path.join(scratch_dirpath, project_id + '.var.txt') utils.call_subprocess(['java', '-jar', gatk_fpath, '-T', 'VariantEval', '-R', ref_fpath, '-eval', vcf_fpath, '-noST', '-noEV', '-EV', 'CountVariants', '-ST', 'Sample', '-o', report_vars_fpath], stderr=open(log_fpath, 'a')) report_tstv_fpath = os.path.join(scratch_dirpath, project_id + '.tv.txt') utils.call_subprocess(['java', '-jar', gatk_fpath, '-T', 'VariantEval', '-R', ref_fpath, '-eval', vcf_fpath, '-noST', '-noEV', '-EV', 'TiTvVariantEvaluator', '-ST', 'Sample', '-o', report_tstv_fpath], stderr=open(log_fpath, 'a')) printReport(report_vars_fpath, report_tstv_fpath, sample_names, sample_ids, sample_files, output_dirpath) for g_vcf_fpath in g_vcf_fpaths: utils.call_subprocess(['bgzip', '-f', g_vcf_fpath], stderr=open(log_fpath, 'a')) utils.call_subprocess(['tabix', '-p', 'vcf', g_vcf_fpath + '.gz'], stderr=open(log_fpath, 'a')) utils.call_subprocess(['bgzip', '-f', vcf_fpath], stderr=open(log_fpath, 'a')) utils.call_subprocess(['tabix', '-p', 'vcf', vcf_fpath + '.gz'], stderr=open(log_fpath, 'a')) return vcf_fpath
class StoreHskp: """Store housekeeping info in a data file""" def __init__(self, log, exit_callback=None): self._log = log self._server_proxy = None self._data_file = None self._data_file_path = None self._data_file_state = 1 self._data_file_hdr_row = 'Year,Month,Day,Hour,Minute,Second,Modem_on,FG_on,SC_on,CASES_on,HF_On,Htr_On,Garmin_GPS_on,Overcurrent_status_on,T_batt_1,T_batt_2,T_batt_3,T_FG_electronics,T_FG_sensor,T_router,V_batt_1,V_batt_2,V_batt_3,I_input,P_input,lat,long,sys_time_error_secs,UTC_sync_age_secs,Uptime_secs,CPU_load_1_min,CPU_load_5_min,CPU_load_15_min\n' utils.make_dirs(super_config.hskp_temp_dir, self._log) def run(self, time_stamp): self._store_hskp_record(time_stamp) def stop(self): self._close_data_file() if self._data_file_path is not None: self._close_data_file() save_file_thread = SaveFileThread(self._data_file_path, True, self._log) save_file_thread.join() def _store_hskp_record(self, time_stamp): data_row = self._get_data_row(time_stamp) if data_row is None: return if self._data_file_state == 1: # Open new data file and write the header row to it self._data_file_path = "".join( (super_config.hskp_temp_dir, 'hskp_', utils.time_stamp_str(time_stamp), '.dat.csv')) if not self._open_data_file(): return self._write_to_data_file(self._data_file_hdr_row) self._write_to_data_file(data_row) self._data_file_state = 2 return if self._data_file_state is 2: self._write_to_data_file(data_row) end_of_hour = (time_stamp.minute == 59) and (time_stamp.second == 45) if end_of_hour: self._data_file.close() # Spin off a thread to execute the XMLRPC command. # If it's a big file, it will take a while for the USB mgr # to copy the file to temp storage. compress = True save_file_thread = SaveFileThread(self._data_file_path, compress, self._log) # save_file_thread deletes data file after storage self._data_file_path = None self._data_file_state = 1 return self._log.error('StoreHskp._store_hskp: unknown state value') def _open_data_file(self): """Open self._data_file. Return True if successful""" try: self._data_file = open(self._data_file_path, 'wb') except IOError: self._log.error('Could not open %s' % self._data_file_path) self._data_file = None return False return True def _close_data_file(self): """Close self._data_file""" if self._data_file: try: self._data_file.close() except IOError: self._log.error('Could not close %s' % self._data_file_path) self._data_file = None def _write_to_data_file(self, s): """Write a string to self._data_file""" if self._data_file: try: self._data_file.write(s) except IOError: self._log.error('Could not write to file %s', self._data_file) def _get_data_row(self, time_stamp): """Return a CSV string containing all the hskp values Return None if status data was not available """ dummy_lock = utils.Lock(self._log) [hw_status, self._server_proxy ] = utils.get_full_hw_status(self._server_proxy, dummy_lock, self._log) if hw_status is None: self._log.error( 'StoreHskp._get_data_row: Could not get full status from hw_mgr' ) return None parts = [] # Build up a row of CSV data try: parts.append(','.join([ \ '%d' % time_stamp.year, '%d' % time_stamp.month, '%d' % time_stamp.day, '%d' % time_stamp.hour, '%d' % time_stamp.minute, '%d' % time_stamp.second, str(hw_status['irid_pwr']), str(hw_status['fg_pwr']), str(hw_status['sc_pwr']), str(hw_status['cases_pwr']), str(hw_status['hf_pwr']), str(hw_status['htr_pwr']), str(hw_status['gps_pwr']), str(hw_status['ovr_cur_status']), '%.2f' % hw_status['batt_1_temp'], '%.2f' % hw_status['batt_2_temp'], '%.2f' % hw_status['batt_3_temp'], '%.2f' % hw_status['fg_elec_temp'], '%.2f' % hw_status['fg_sens_temp'], '%.2f' % hw_status['router_temp'], '%.2f' % hw_status['batt_1_volt'], '%.2f' % hw_status['batt_2_volt'], '%.2f' % hw_status['batt_3_volt'], '%.3f' % hw_status['in_current'], '%.3f' % hw_status['in_power'], '%.6f' % hw_status['lat'], '%.6f' % hw_status['long'], '%.6f' % hw_status['sys_time_error'], '%d,' % hw_status['sync_age']])) except Exception, e: self._log.error('StoreHskp._get_data_row exception: %s' % e) return None # get uptime_secs and idle_secs [ut, std_error] = utils.call_subprocess('cat /proc/uptime') fields = ut.split() parts.append('%d,' % int(float(fields[0]))) ut = hw_status['uptime'].replace(',', '') fields = ut.split() last = len(fields) - 1 parts.append(','.join([ \ '%.2f' % float(fields[last-2]), '%.2f' % float(fields[last-1]), '%.2f\n' % float(fields[last])])) data_row = ''.join(parts) #self._log.info(data_row) return data_row