def filaBPE(self): # Verifica todas as threadings ativas threads = threading.enumerate() # Coloca as threadings que tem o inicio do nome com 'bern' em uma lista threadsNames = [] position = 0 for th in threads: if (th.name[:4] == 'bern') and (th.name != self.bpeName): threadsNames_aux = {'id': position, 'name': th.name} threadsNames.append(threadsNames_aux) position += 1 # ordena pelo nome, que esta ligado ao datetime que foi criado threadsNames.sort(key=lambda k: k['name']) if len(threadsNames) != 0: # Um processamento por vez log('Waiting free threads') for tname in threadsNames: threads[tname['id']].join( MAX_PROCESSING_TIME * 60) # TODO: float seconds arg time out if threads[tname['id']].is_alive(): # TODO: kill docker and remove dir log('Thread time out')
def getBlq(self): # Salva o arquivo BLQ no servidor (DATAPOOL\REF52) try: i = 0 for blqFile in self.pathBlqTempFiles: if blqFile: with open(blqFile, 'r') as tmpFile, open( path.join(self.DATAPOOL_DIR, 'REF52', 'SYSTEM.BLQ'), 'w') as destination: aux = tmpFile.read() destination.write(aux) i += 1 return True except Exception as e: log('Erro ao copiar arquivo BLQ para o Datapool') log(str(e)) erroMsg = sys.exc_info() log(str(erroMsg[0])) log(str(erroMsg[1])) return False
def newend_f(**kwargs): try: end_f(**kwargs) if not DEBUG: rmtree(kwargs['BASE_DIR'], ignore_errors=True) except Exception as e: log('Erro ao rodar endFunction') log(str(e)) erroMsg = sys.exc_info() log(str(erroMsg[0])) log(str(erroMsg[1]))
def finishing_process(**kwargs): status = kwargs['status'] id = kwargs['id'] msg = kwargs['msg'] result = kwargs['result'] try: filename = kwargs['filename'] except: filename = None log('finishing process: {}'.format(id)) # Pegar processo pelo id proc = Proc_Request.objects.get(id=id) # finalizar processo proc.finish_process() # Enviando email send_result_email(proc.email, msg, result, filename)
def check_line(): ''' Verifica se a fila está liberada e começa o processamento do próximo da fila ''' log('Checking line') # Processos sendo execultados procs_running = Proc_Request.objects.filter(proc_status='running') if not procs_running: # Processos aguardando para serem execultados proc_waiting = Proc_Request.objects.filter(proc_status='waiting') # Verifica se o servidor está online if proc_waiting and is_connected(): run_next(proc_waiting.pk) else: if len(procs_running) > 1: # WARNING: Situação perigosa. Um processo sendo iniciado antes do # outro ser finalizado. log('WARNING!!! check_line(): Mais de um processo sendo execultado' ) # raise something t_now = timezone.localtime(timezone.now()) for proc in procs_running: dtime = (t_now - proc.started_at) if dtime.total_seconds() / 60 > MAX_PROCESSING_TIME: msg = 'Tempo máximo de processamento excedido' log('ERROR!!! check_line(): ' + msg) args = { 'status': 'Erro', 'id': proc.id, 'msg': msg, 'result': None, } finishing_process(**args)
def send_mail_template(subject, template_name, context, recipient_list, pathFile, from_email=DEFAULT_FROM_EMAIL, fail_silently=False, pathFileName=None): message_html = render_to_string(template_name, context) message_txt = striptags(message_html) email = EmailMultiAlternatives(subject=subject, body=message_txt, from_email=from_email, to=recipient_list) email.attach_alternative(message_html, "text/html") if pathFile and not DEBUG: with open(pathFile, 'rb') as rfile: if not pathFileName: pathFileName = rfile.name afile = rfile.read() email.attach(pathFileName, afile) try: email.send(fail_silently=fail_silently) return True except Exception as e: log('Erro no envio do email.') erroMsg = sys.exc_info() log(str(erroMsg[0])) log(str(erroMsg[1])) return False
def getResult(self): if self.prcType in ['relativo', 'rede']: prcFile = 'RLT' + str(self.dateFile.year)[-2:] prcFile += '{:03d}'.format(date2yearDay(self.dateFile)) + '0.PRC' prcPathFile = path.join(self.CAMPAIGN_DIR, 'OUT', prcFile) snxFile = 'F1_' + str(self.dateFile.year)[-2:] snxFile += '{:03d}'.format(date2yearDay(self.dateFile)) + '0.SNX' snxFilePath = path.join(self.CAMPAIGN_DIR, 'SOL', snxFile) outFile = 'F1_' + str(self.dateFile.year)[-2:] outFile += '{:03d}'.format(date2yearDay(self.dateFile)) + '0.OUT' outFilePath = path.join(self.CAMPAIGN_DIR, 'OUT', outFile) resultListFiles = [prcPathFile, snxFilePath, outFilePath] elif self.prcType == 'rapido': prcFile = 'RLT' + str(self.dateFile.year)[-2:] prcFile += '{:03d}'.format(date2yearDay(self.dateFile)) + '0.PRC' prcPathFile = path.join(self.CAMPAIGN_DIR, 'OUT', prcFile) snxFile = 'F1_' + str(self.dateFile.year)[-2:] snxFile += '{:03d}'.format(date2yearDay(self.dateFile)) + '0.SNX' snxFilePath = path.join(self.CAMPAIGN_DIR, 'SOL', snxFile) snxFile2 = 'P1_' + str(self.dateFile.year)[-2:] snxFile2 += '{:03d}'.format(date2yearDay(self.dateFile)) + '0.SNX' snxFilePath2 = path.join(self.CAMPAIGN_DIR, 'SOL', snxFile2) outFile = 'F1_' + str(self.dateFile.year)[-2:] outFile += '{:03d}'.format(date2yearDay(self.dateFile)) + '0.OUT' outFilePath = path.join(self.CAMPAIGN_DIR, 'OUT', outFile) outFile2 = 'P1_' + str(self.dateFile.year)[-2:] outFile2 += '{:03d}'.format(date2yearDay(self.dateFile)) + '0.OUT' outFilePath2 = path.join(self.CAMPAIGN_DIR, 'OUT', outFile2) resultListFiles = [ snxFilePath, snxFilePath2, outFilePath, outFilePath2 ] elif self.prcType == 'ppp': prcFile = 'PPP' + str(self.dateFile.year)[-2:] prcFile += '{:03d}'.format(date2yearDay(self.dateFile)) + '0.PRC' prcPathFile = path.join(self.CAMPAIGN_DIR, 'OUT', prcFile) snxFile = 'RED' + str(self.dateFile.year)[-2:] snxFile += '{:03d}'.format(date2yearDay(self.dateFile)) + '0.SNX' snxFilePath = path.join(self.CAMPAIGN_DIR, 'SOL', snxFile) outFile = 'RED' + str(self.dateFile.year)[-2:] outFile += '{:03d}'.format(date2yearDay(self.dateFile)) + '0.OUT' outFilePath = path.join(self.CAMPAIGN_DIR, 'OUT', outFile) kinFile = 'KIN' + '{:03d}'.format(date2yearDay( self.dateFile)) + '0' kinFile += self.headers[0]['MARKER NAME'][:4] + '.SUM' kinFilePath = path.join(self.CAMPAIGN_DIR, 'OUT', kinFile) resultListFiles = [ prcPathFile, snxFilePath, outFilePath, kinFilePath ] else: return False resultZipFile = path.join(self.RESULTS_DIR, self.bpeName + '.zip') with ZipFile(resultZipFile, 'x', ZIP_DEFLATED) as rZipFile: for file in resultListFiles: if path.isfile(file): rZipFile.write(file, path.basename(file)) else: log('Arquivo ' + path.basename(file) + ' não encontrado.') # Verifica se arquivo está vazio if rZipFile.namelist(): if path.isfile(snxFilePath): self.coord_result = self.get_coord_result(snxFilePath) else: self.coord_result = self.get_coord_result(snxFilePath2) return resultZipFile else: return False
def runBPE(self): # TODO: Rever esse método. utilizar o finaly para mandar email deve melhar. run_stdout, run_stderr = [False, False] try: # confere se a fila de bpe threadings está liberada self.filaBPE() # Download dos arquivos de base da RBMC if self.prcType == 'rede': self.getRBMC() # Salva arquivo rinex em DATAPOOL if not self.getRinex(): msg = 'Erro ao salvar o arquivo RINEX no servidor' log(msg) raise Exception(msg) # Salva arquivo BLQ em DATAPOOL if not self.getBlq(): msg = 'Erro ao salvar o arquivo BLQ no servidor' log(msg) raise Exception(msg) # Download das efemérides precisas if not self.getEphem(): if len(self.headers) > 1: msg = 'Erro no processamento dos arquivos: ' else: msg = 'Erro no processamento do arquivo: ' for rnxHeader in self.headers: msg += path.basename(rnxHeader['RAW_NAME']) + ' ' msg += '. \n' msg += 'Falha no download das efemérides precisas do CODE.' log(msg) self.endFunction(status=False, id=self.proc_id, msg=msg, result=None, BASE_DIR=self.BASE_DIR) return [ False, '{} Falha no download das efemérides precisas'.format( self.bpeName) ] # Gera os arquivos do bernese com dados da estação self.setSTAfiles() if self.osname == 'LINUX': host = HOST else: host = 'host.docker.internal' arg = 'docker --host={} run --network=internal --rm --name {} '.format( host, self.bpeName) arg += '-v gnssufv_temp:/home/TEMP -v gnssufv_log:/home/LOG ' arg += '-e BPENAME={} '.format(self.bpeName) arg += 'gnssufv/bernese:latest bash -c ' if self.osname == 'LINUX': arg += "'source /home/BERN52/GPS/EXE/LOADGPS.setvar " else: arg += '"source /home/BERN52/GPS/EXE/LOADGPS.setvar ' if self.prcType == 'ppp': arg += '&& perl ${U}/SCRIPT/pppdemo_pcs.pl ' elif self.prcType in ['relativo', 'rede']: arg += '&& perl ${U}/SCRIPT/rltufv_pcs.pl ' elif self.prcType == 'rapido': arg += '&& perl ${U}/SCRIPT/rapsta_pcs.pl ' else: raise Exception('prcType not defined in ApiBernese') arg += str(self.dateFile.year) + ' ' arg += '{:03d}'.format(date2yearDay(self.dateFile)) + '0' if not hasattr(self, 'datum'): log('Datum não definido') self.datum = 'IGS14' arg += ' V_REFINF ' + self.datum # arg += ' V_PCV I' + self.datum[-2:] if self.hoi_correction: arg += ' V_HOIFIL HOI\$YSS+0' # TODO: adicionar outros argumentos aqui if self.osname == 'LINUX': arg += "'" else: arg += '"' logMsg = 'Rodando BPE: ' + self.bpeName if len(self.headers) > 1: logMsg += ' - Arquivos: ' else: logMsg += ' - Arquivo: ' for rnxHeader in self.headers: logMsg += path.basename(rnxHeader['RAW_NAME']) + ' ' log(logMsg) with Popen(arg, stdout=PIPE, stderr=PIPE, stdin=DEVNULL, cwd=self.BASE_DIR, shell=True) as pRun: run_stdout, run_stderr = pRun.communicate() except Exception as e: log('Erro ao rodar BPE: ' + self.bpeName) log(str(e)) erroMsg = sys.exc_info() log(str(erroMsg[0])) log(str(erroMsg[1])) for tb in traceback.format_exc(erroMsg[2]): log(tb) log('BPE: ' + self.bpeName + ' finalizado') if run_stderr: result = False else: try: result = self.getResult() except Exception as e: result = False log('Erro ao pegar o resultado da BPE: ' + self.bpeName) log(str(e)) erroMsg = sys.exc_info() log(str(erroMsg[0])) log(str(erroMsg[1])) if result: if len(self.headers) > 1: msg = 'Arquivos ' else: msg = 'Arquivo ' resultFileName = '' for rnxHeader in self.headers: msg += path.basename(rnxHeader['RAW_NAME']) + ' ' resultFileName += path.basename( rnxHeader['RAW_NAME'])[:-4] + '_' resultFileName += '.zip' if len(self.headers) > 1: msg += 'processados com sucesso.\n' else: msg += 'processado com sucesso.\n' msg += 'Em anexo o resultado do processamento.\n\n' if self.coord_result: msg += 'Estação: ' + self.coord_result[0] + '\n' msg += 'Coordenadas Estimadas (X Y Z) (m):' msg += '\t{:.4f}\t{:.4f}\t{:.4f}\n'.format( *self.coord_result[1:4]).replace('.', ',') msg += 'Desvios (X Y Z) (m): ' msg += '\t{:.4f}\t{:.4f}\t{:.4f}\n\n'.format( *self.coord_result[4:7]).replace('.', ',') msg += 'Sistema de Referência e Época: ' if self.prcType == 'ppp': msg += self.datum + ' ' msg += '{:d}'.format(self.dateFile.year) + ',' msg += '{:4.2f}'.format( date2yearDay(self.dateFile) / 365.0)[2:] elif self.prcType in ['relativo', 'rapido']: msg += 'O mesmo da coordenada de referência definida na submissão' elif self.prcType == 'rede': msg += 'SIRGAS2000, 2000.4' self.endFunction(status=True, id=self.proc_id, msg=msg, result=result, filename=resultFileName, BASE_DIR=self.BASE_DIR) # Sucessful end runBPE return [True, 'Finalizado com sucesso'] else: log('Arquivo com resultado do processamento não encontrado. Pegando pasta completa como resultado. ' ) try: result = self.get_full_result() except Exception as e: result = None log('Erro pegar a pasta como resultado da BPE: ' + self.bpeName) log(str(e)) erroMsg = sys.exc_info() log(str(erroMsg[0])) log(str(erroMsg[1])) log('RUNBPE STDOUT: {}\n STDERR: {}'.format( run_stdout.decode(errors='replace'), run_stderr.decode(errors='replace'))) if len(self.headers) > 1: msg = 'Erro no processamento dos arquivos ' else: msg = 'Erro no processamento do arquivo ' for rnxHeader in self.headers: msg += path.basename(rnxHeader['RAW_NAME']) + ' ' # RUNBPE.pm alterado na linha 881 bernErrorFile = path.join(self.CAMPAIGN_DIR, 'RAW', 'BERN_MSG_ERROR.txt') if path.isfile(bernErrorFile): msg += '. \nDetalhes sobre o erro no arquivo em anexo.' # send_result_email(self.email,msg, bernErrorFile) self.endFunction(status=False, id=self.proc_id, msg=msg, result=bernErrorFile, filename='ERRO_BERN.txt', BASE_DIR=self.BASE_DIR) else: msg += '. \nErro desconhecido.' # send_result_email(self.email,msg) self.endFunction(status=False, id=self.proc_id, msg=msg, result=result, filename='campaign.zip', BASE_DIR=self.BASE_DIR) return [ False, 'Erro no processamento do {}. Resultado: {}'.format( self.bpeName, str(result)) ]
def getRBMC(self): rnxDate = self.dateFile ano = str(self.dateFile.year) dia = '{:03d}'.format(date2yearDay(self.dateFile)) if rnxDate.year > 1999: anoRed = rnxDate.year - 2000 else: anoRed = rnxDate.year - 1900 ftp_link = 'ftp://geoftp.ibge.gov.br/informacoes_sobre_posicionamento_geodesico/rbmc/dados/{}/{}/'.format( ano, dia) bases = self.bases_rbmc.split() if self.headers[0]['MARKER NAME'] in bases: bases.remove(self.headers[0]['MARKER NAME']) rinex_datapool_dir = path.join(self.DATAPOOL_DIR, 'RINEX') bases_ok = [i for i in bases] i = 1 for base in bases: erro = False file_name = base.lower() + dia + '1.zip' file_link = (ftp_link + file_name) zfile_target = path.join(rinex_datapool_dir, file_name) try: with urllib.request.urlopen(file_link) as response, open( zfile_target, 'wb') as outFile: data = response.read() if data: outFile.write(data) else: erro = True except: erro = True if not erro: rnxO_file_name = file_name[:-3] + '{:02d}o'.format(anoRed) rnxD_file_name = file_name[:-3] + '{:02d}d'.format(anoRed) with ZipFile(zfile_target) as zfile: if rnxO_file_name in zfile.namelist(): extract_file = zfile.extract(rnxO_file_name, path=rinex_datapool_dir) target_file = path.join( rinex_datapool_dir, file_name[:-5].upper() + '0.{:02d}O'.format(anoRed)) rename(extract_file, target_file) elif rnxD_file_name in zfile.namelist(): extract_file = zfile.extract(rnxD_file_name, path=rinex_datapool_dir) target_file = path.join( rinex_datapool_dir, file_name[:-5].upper() + '0.{:02d}D'.format(anoRed)) rename(extract_file, target_file) status = system('CRX2RNX {}'.format(target_file)) if not status: remove(target_file) else: erro = True else: erro = True if erro: log('Erro no download do arquivo de base da estação {}'. format(rnxO_file_name)) remove(zfile_target) header = self.getHeader(target_file[:-1] + 'O') i += 1 header['ID'] = i header['FLAG'] = 'B' header['PLATE'] = self.tectonic_plate_base self.headers.append(header) else: bases_ok.remove(base) if not bases_ok: raise Exception('Nenhum arquivo de base encontrado') return True
def getEphem(self): # TODO Se não achar efemérides do CODE pegar do IGS if not DOWNLOAD_EPHEM: return True rnxDate = self.dateFile try: weekDay = date2gpsWeek(rnxDate) if rnxDate.year > 1999: anoRed = rnxDate.year - 2000 else: anoRed = rnxDate.year - 1900 sClkFile = 'COD{}.CLK.Z'.format(weekDay) sEphFile = 'COD{}.EPH.Z'.format(weekDay) sIonFile = 'COD{}.ION.Z'.format(weekDay) sErpFile = 'COD{}.ERP.Z'.format(weekDay) sErpWFile = 'COD{}7.ERP.Z'.format(weekDay[:4]) sP1C1File = 'P1C1{:02d}{:02d}.DCB.Z'.format(anoRed, rnxDate.month) sP1P2File = 'P1P2{:02d}{:02d}.DCB.Z'.format(anoRed, rnxDate.month) cod_datapool_dir_global = path.join(self.GLOBAL_DIR, 'GPSDATA', 'DATAPOOL', 'COD') cod_datapool_dir_local = path.join(self.DATAPOOL_DIR, 'COD') bsw52_datapool_dir_global = path.join(self.GLOBAL_DIR, 'GPSDATA', 'DATAPOOL', 'BSW52') bsw52_datapool_dir_local = path.join(self.DATAPOOL_DIR, 'BSW52') # Verifica se existe a efemeride do dia, se não pega a efemeride da semana try: with urllib.request.urlopen( 'http://ftp.aiub.unibe.ch/CODE/{:04d}/{}'.format( rnxDate.year, sErpFile)) as f: pass except Exception as e: log(str(e)) log('EPH do dia não encontrado, utilizando EPH da semana') sErpFile = sErpWFile sfileList = [ sClkFile, sEphFile, sIonFile, sErpFile, sP1C1File, sP1P2File ] for sfile in sfileList: codURL = ('http://ftp.aiub.unibe.ch/CODE/{:04d}/{}'.format( rnxDate.year, sfile)) if sfile in [sIonFile, sP1C1File, sP1P2File]: target_dir_global = bsw52_datapool_dir_global target_dir_local = bsw52_datapool_dir_local else: target_dir_global = cod_datapool_dir_global target_dir_local = cod_datapool_dir_local pathFile_global = path.join(target_dir_global, sfile) pathFile_local = path.join(target_dir_local, sfile) if not self.hasFile( pathFile_global ): # verifica se os arquivos já estão no servidor with urllib.request.urlopen(codURL) as response, open( pathFile_global, 'wb') as outFile: data = response.read() if not data: raise ('Erro no download de: ' + sfile) outFile.write(data) copyfile(pathFile_global, pathFile_local) # leitura do sistema de referencia das ephemerides if not hasattr(self, 'datum'): command = '7z x {} -o{} -y'.format( str(path.join(cod_datapool_dir_local, sEphFile)), cod_datapool_dir_local) status = run(command, stdout=PIPE, stderr=PIPE, shell=True) if not status.returncode: with open(path.join(cod_datapool_dir_local, sEphFile[:-2]), 'r') as f: self.datum = f.readline().split()[8] log('Datum lido das efemérides: ' + self.datum) else: log('Erro ao ler o datum do arquivo .EPH') return False ref52_dir_global = path.join(self.GLOBAL_DIR, 'GPSDATA', 'DATAPOOL', 'REF52') ref52_dir_local = path.join(self.DATAPOOL_DIR, 'REF52') copyfile( path.join(ref52_dir_global, '{}_R.CRD'.format(self.datum)), path.join(ref52_dir_local, '{}_R.CRD'.format(self.datum))) copyfile( path.join(ref52_dir_global, '{}_R.VEL'.format(self.datum)), path.join(ref52_dir_local, '{}_R.VEL'.format(self.datum))) copyfile(path.join(ref52_dir_global, '{}.FIX'.format(self.datum)), path.join(ref52_dir_local, '{}.FIX'.format(self.datum))) return True except Exception as e: log('Erro no download das efemérides precisas') log(str(e)) erroMsg = sys.exc_info() log(str(erroMsg[0])) log(str(erroMsg[1])) # for tb in traceback.format_exc(erroMsg[2]): log(tb) return False
def getRinex(self): # Salva o arquivo no servidor (DATAPOOL\RINEX) try: i = 0 for rnxFile in self.pathRnxTempFiles: verRinex = self.headers[i]['version'] if verRinex == 2: rinex_dir = 'RINEX' elif verRinex == 3: rinex_dir = 'RINEX3' else: raise Exception('Rinex version ' + str(verRinex)) new_rinex_path_name = path.join(self.DATAPOOL_DIR, rinex_dir, setRnxName(self.headers[i])) with open(rnxFile, 'r') as tmpFile, open(new_rinex_path_name, 'w') as destination: aux = tmpFile.read() destination.write(aux) if 'CRINEX VERS / TYPE' in self.headers[i]: cmd = 'CRX2RNX -f {}'.format(new_rinex_path_name) status = run(cmd, stdout=PIPE, stderr=PIPE, shell=True) if status.returncode != 1: remove(new_rinex_path_name) else: log('Erro em ApiBernese -> crx2rnx') if path.isfile(new_rinex_path_name[:-1] + 'O'): remove(new_rinex_path_name[:-1] + 'O') log(new_rinex_path_name[:-1] + 'O removido') raise Exception('Erro ao descompactar hatanaka. ' + str(status.stdout) + str(status.stderr)) i += 1 return True except Exception as e: log('Erro ao copiar Rinex para o Datapool. ' + str(e)) log(str(e)) erroMsg = sys.exc_info() log(str(erroMsg[0])) log(str(erroMsg[1])) return False
def readRinexObs(rnxFile): ''' rnxFile > deve ser do tipo file binary, ou seja, o arquivo de ser aberto no modo binário antes da função ser chamada. PS: arquivo do form ja vem aberto. return > boolean, string, dictionary bolean - sucesso na Leitura string - mensagem de erroMsg dictionary - cabeçalho do arquivo rinex ''' try: header = {} erroMsg = '' # rnxTempName='' # Capture header info for i, bl in enumerate(rnxFile): l = bl.decode() if "END OF HEADER" in l: i += 1 # skip to data break if l[60:80].strip() not in header: #Header label header[l[60:80].strip( )] = l[:60] # don't strip for fixed-width parsers # string with info else: header[l[60:80].strip()] += " " + l[:60] #concatenate to the existing string verRinex = float(header['RINEX VERSION / TYPE'][:9]) # %9.2f header['version'] = floor(verRinex) if floor(verRinex) not in [2, 3]: erroMsg = 'Sem suporte para a versão Rinex ' + str(verRinex) return False, erroMsg, header # fim com erro de readRinexObs() # list with x,y,z cartesian if 'APPROX POSITION XYZ' in header and len( header['APPROX POSITION XYZ'].split()) == 3: header['APPROX POSITION XYZ'] = [ float(i) for i in header['APPROX POSITION XYZ'].split() ] else: # TODO: usar rtklib para estimar coordenada aproximada raise Exception('Erro em APPROX POSITION XYZ') if 'ANTENNA: DELTA H/E/N' in header and len( header['ANTENNA: DELTA H/E/N'].split()) == 3: header['ANTENNA DELTA H/E/N'] = [ float(i) for i in header['ANTENNA: DELTA H/E/N'].split() ] else: header['ANTENNA DELTA H/E/N'] = [0.0, 0.0, 0.0] if 'REC # / TYPE / VERS' in header: header['REC # / TYPE / VERS'] = [ header['REC # / TYPE / VERS'][:19], header['REC # / TYPE / VERS'][20:39], header['REC # / TYPE / VERS'][40:] ] else: raise Exception('Favor inserir o os dados sobre o receptor. ') if 'ANT # / TYPE' in header: header['ANT # / TYPE'] = [ header['ANT # / TYPE'][:19], header['ANT # / TYPE'][20:40] ] else: raise Exception('Favor inserir os dados sobre a antena. ') if 'MARKER NAME' in header: header['MARKER NAME'] = header['MARKER NAME'][:4].strip().upper() else: header['MARKER NAME'] = 'NULL' if 'MARKER NUMBER' in header: header['MARKER NUMBER'] = header['MARKER NUMBER'][:9].strip( ).upper() else: header['MARKER NUMBER'] = '00000M001' header['RAW_NAME'] = rnxFile.name #observation types # v2.xx # header['# / TYPES OF OBSERV'] = header['# / TYPES OF OBSERV'].split() # header['# / TYPES OF OBSERV'][0] = int(header['# / TYPES OF OBSERV'][0]) # v3.xx # header['SYS / # / OBS TYPES'] = #turn into int number of observations # header['INTERVAL'] = float(header['INTERVAL'][:10]) # TODO ler intervalo de observação # primeira observação é facil mas e a ultima??? return True, erroMsg, header # fim com sucesso de readRinexObs() except Exception as e: erroMsg = 'Erro ao ler o cabecalho do arquivo Rinex: ' + rnxFile.name + '. ' erroMsg += str(e) log(erroMsg) sysErroMsg = sys.exc_info() log(str(sysErroMsg[0])) log(str(sysErroMsg[1])) # traceback.print_tb(erroMsg[2]) return False, erroMsg, header
def run_next(proc_pk=False): ''' Roda o proximo processo que estiver aguardando na fila ''' if not proc_pk: log('run_next() execultado sem próximo na fila') else: proc_waiting = Proc_Request.objects.get(pk=proc_pk) context = { 'proc_id': proc_waiting.id, 'email': proc_waiting.email, 'proc_method': proc_waiting.proc_method, 'endFunction': finishing_process, 'hoi_correction': proc_waiting.hoi_correction, } proc_details = proc_waiting.get_proc_details() file_root = RINEX_UPLOAD_TEMP_DIR if proc_details.blq_file: context['blq_file'] = os.path.join(file_root, proc_details.blq_file.name) else: context['blq_file'] = '' if proc_waiting.proc_method == 'ppp': context['rinex_file'] = os.path.join(file_root, proc_details.rinex_file.name) context['tectonic_plate'] = proc_details.tectonic_plate rinex_files_names = proc_details.rinex_file.name elif proc_waiting.proc_method in ['relativo', 'rapido']: context['rinex_base_file'] = os.path.join( file_root, proc_details.rinex_base_file.name) context['rinex_rover_file'] = os.path.join( file_root, proc_details.rinex_rover_file.name) rinex_files_names = proc_details.rinex_base_file.name + ', ' + proc_details.rinex_rover_file.name context['tectonic_plate_base'] = proc_details.tectonic_plate_base context['tectonic_plate_rover'] = proc_details.tectonic_plate_rover context['coord_ref'] = [ proc_details.coord_ref.X, proc_details.coord_ref.Y, proc_details.coord_ref.Z ] context['datum'] = proc_details.coord_ref.datum elif proc_waiting.proc_method == 'rede': context['rinex_rover_file'] = os.path.join( file_root, proc_details.rinex_rover_file.name) rinex_files_names = proc_details.rinex_rover_file.name context['tectonic_plate_base'] = proc_details.tectonic_plate_base context['tectonic_plate_rover'] = proc_details.tectonic_plate_rover context['bases_rbmc'] = proc_details.bases_rbmc else: log('Em check_line(): proc_method não definido no processo n° ' + str(proc_waiting.id)) raise Exception('Em check_line(): proc_method não definido no ' 'processo n° ' + str(proc_waiting.id)) try: # registra no BD o começo do Processamento proc_waiting.start_process() # Nova instancia do processamento no bernese newBPE = ApiBernese(**context) # Abre nova thread para a iniciar o processamento # threading.Thread(name=newBPE.bpeName,target=newBPE.runBPE).start() log(str(proc_waiting) + ' started') # Inicia o processamento return newBPE.runBPE() except Exception as e: log('Erro em check_line ao solicitar processamento: ' + str(proc_waiting)) log(str(e)) e_Msg = sys.exc_info() log(str(e_Msg[0])) log(str(e_Msg[1])) # finish process finishing_process( status=False, id=proc_waiting.id, msg='Erro no processamento de ' + rinex_files_names, result=None, ) return False, str(e)