def do(): """ This does the "work" of the daemon """ logger = logging.getLogger('daemon_log') logger.setLevel(logging.INFO) fh = logging.handlers.TimedRotatingFileHandler(conf.Daemon.LogFile, when="midnight", backupCount=4) fh.setLevel(logging.INFO) formatstr = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' formatter = logging.Formatter(formatstr) fh.setFormatter(formatter) logger.addHandler(fh) while True: try: importlib.reload(checker) c = checker.Checker(logger) c.check() time.sleep(1) except Exception as e: logger.error(sys.exc_info())
def __init__(self, master): self.master = master self.checker = checker.Checker() self.createWidgets() self.quit = self.exit self.destroy = self.exit
def startingAnalisys(self): """[summary] """ check = checker.Checker(self) stage1 = check.stage1() if stage1 == True or self.force == True: self.dayKline = scalperConf.client.get_historical_klines( self.pair, scalperConf.Client.KLINE_INTERVAL_1HOUR, "1 day ago UTC") act = Decimal( scalperConf.client.get_symbol_ticker( symbol=self.pair)["price"]) self.qtys["evalPrice"] = act stage2 = check.stage2() if stage2 == True or self.force == True: if check.checkRules() == True: self.openTrade() self.monitor = True mesARR = [ f"{datetime.now()}: {self.pair} MONITOR", f"--DAY min/med/max: {check.minDay:{self.data['precision']}} / {check.medDay:{self.data['precision']}} / {check.maxDay:{self.data['precision']}}", f"--Day grow: {check.growDay} %", f"--Entrada: {act:{self.data['precision']}}", f"--Limit: {((act/100)*scalperConf.percentLimit):{self.data['precision']}}", f"--Stop: {((act/100)*scalperConf.percentStop):{self.data['precision']}}" ] logger.log(mesARR, printFLAG=True) else: logger.log([ f"{datetime.now()}: {self.pair} NO SE CUMPLEN LAS REGLAS DE TRADING" ], printFLAG=True) self.monitor = False
def main(): """ Calls into the common chain checking logic to check chain integrity. """ node_ = sys.argv[2] owner_ = sys.argv[3] if owner_ == 'undefined': owner_ = None if node_ == 'undefined': node_ = None args = Args(bc_uri=sys.argv[1], ev_uri=sys.argv[1], bc_type='local', ev_type='local', bc_col='blockchain', ev_col='events', ev_db=None, bc_db=None, owner=owner_, node=node_) chcheck = ch.Checker(args, False) chcheck.do_checking() sys.stdout.write(str(chcheck))
def OnStartClick(self, e): if self.checking: self.workerThread.stop() self.checking = False return print self.mailInput.GetValue() cities = [] cities.append(self.fromCityInput.GetValue()) cities.append(self.toCityInput.GetValue()) time_limit = [ self.timeInputFrom.getValue(), self.timeInputTo.getValue() ] email = self.mailInput.GetValue() ticket_t = self.typeCheckBoxGroup.getValue() train_c = self.classCheckBoxGroup.getValue() print train_c print self.startDateCombo.getValue().isoformat( ), self.endDateCombo.getValue().isoformat() startDay = self.startDateCombo.getValue() endDay = self.endDateCombo.getValue() s_info = info( [startDay.isoformat(), endDay.isoformat()], time_limit, cities, ticket_t, train_c, email) if self.rememberCheckbox.IsChecked(): s_info.save() self.workerThread = checker.Checker(s_info, self) self.workerThread.start() self.checking = True
def solve2_swap(self, T=100, r=0.99): from math import exp from random import random arrangement = {} c = checker.Checker() # initial arrangement queue2gate = {gate_cato: {} for gate_cato in range(len(self.gate_cato_list))} temp = self.mini_temp() for idx in temp: arrangement[idx] = 0 for i in range(len(self.gate_cato_list)): gate_cato = self.gate_cato_list[i] for j in range(len(gate_cato['plane_queue'])): queue2gate[i][j] = j for plane_idx in gate_cato['plane_queue'][j]: arrangement[plane_idx] = gate_cato['gate_list'][j] tar_proc_time = c.get_proc_time(arrangement) old_proc_time = None while old_proc_time is None or old_proc_time > tar_proc_time: old_proc_time = tar_proc_time for i in range(len(self.gate_cato_list)): queue = self.gate_cato_list[i]['plane_queue'] gate = self.gate_cato_list[i]['gate_list'] for q1 in range(len(queue)): for q2 in range(q1 + 1, len(queue)): g1 = queue2gate[i][q1] g2 = queue2gate[i][q2] new_arrangement = dict(arrangement) # swap g1 and g2 for plane_idx in queue[q1]: new_arrangement[plane_idx] = gate[g2] for plane_idx in queue[q2]: new_arrangement[plane_idx] = gate[g1] new_proc_time = c.get_proc_time(new_arrangement) if new_proc_time < tar_proc_time: print('proc time: {} -> {}'.format(tar_proc_time, new_proc_time)) tar_proc_time = new_proc_time arrangement = dict(new_arrangement) queue2gate[i][q1] = g2 queue2gate[i][q2] = g1 break elif new_proc_time > tar_proc_time: dE = tar_proc_time - new_proc_time if exp(dE / T) > random(): print('{} cooling: proc time: {} -> {}'.format(exp(dE / T),tar_proc_time, new_proc_time)) tar_proc_time = new_proc_time arrangement = dict(new_arrangement) queue2gate[i][q1] = g2 queue2gate[i][q2] = g1 break T *= r if old_proc_time > tar_proc_time: break if old_proc_time > tar_proc_time: break return arrangement
def test_init(self): cb1 = cb.Checkerboard() ch1 = ch.Checker('black', cb1) with self.subTest('Testing checker __init__'): self.assertEqual(ch1.color, 'black') self.assertEqual(ch1.checkerboard, cb1) self.assertEqual(ch1.position, ())
def check(): if request.method == 'POST': code = request.form['code'] # print(code) code1 = checker.Checker(code) code1.fillSymbols() symDub = code1.getSymbolDuplicates() charDub = code1.getCharachterDuplicates() return render_template("index.html", symbols=symDub, characters=charDub)
def solve3_cooling(self): from math import exp from random import random arrangement = {} c = checker.Checker() # initial arrangement queue2gate = {gate_cato: {} for gate_cato in range(len(self.gate_cato_list))} temp = self.mini_temp() for idx in temp: arrangement[idx] = 0 for i in range(len(self.gate_cato_list)): gate_cato = self.gate_cato_list[i] for j in range(len(gate_cato['plane_queue'])): queue2gate[i][j] = j for plane_idx in gate_cato['plane_queue'][j]: arrangement[plane_idx] = gate_cato['gate_list'][j] tar_tense = c.get_all_time(arrangement) old_tense = None T = 1 r = 0.01 while old_tense is None or old_tense > tar_tense: old_tense = tar_tense for i in range(len(self.gate_cato_list)): queue = self.gate_cato_list[i]['plane_queue'] gate = self.gate_cato_list[i]['gate_list'] for q1 in range(len(queue)): for q2 in range(q1 + 1, len(queue)): g1 = queue2gate[i][q1] g2 = queue2gate[i][q2] new_arrangement = dict(arrangement) # swap g1 and g2 for plane_idx in queue[q1]: new_arrangement[plane_idx] = gate[g2] for plane_idx in queue[q2]: new_arrangement[plane_idx] = gate[g1] new_tense = c.get_all_time(new_arrangement) if new_tense < tar_tense: print('tense: {} -> {}'.format(tar_tense, new_tense)) tar_tense = new_tense arrangement = dict(new_arrangement) queue2gate[i][q1] = g2 queue2gate[i][q2] = g1 break elif new_tense > tar_tense: dE = tar_tense - new_tense if T == 0: continue if exp(dE / T) > random(): print('{} cooling: tense: {} -> {}'.format(exp(dE / T), tar_tense, new_tense)) tar_tense = new_tense arrangement = dict(new_arrangement) queue2gate[i][q1] = g2 queue2gate[i][q2] = g1 break T *= r
def test_place_checker(self): cb1 = cb.Checkerboard() ch1 = ch.Checker('black', cb1) cb1.place_checker((5, 0), ch1) with self.subTest('Testing place checker'): self.assertTrue(cb1.squares[5][0]) self.assertEqual(ch1, cb1.squares[5][0]) self.assertTrue(ch1.position == (5, 0)) self.assertTrue(ch1.checkerboard == cb1)
def crawl(url, link_score, search_terms): chk = checker.Checker() q = urlqueue.URLQueue() file_mgr = filemgr.FileManager() # Check validity before connecting to the URL if (chk.check_scheme(url) and chk.check_robot(url) and chk.check_cgi(url)) is False: return # Omit index.html .htm .shtml if it is on the root url = chk.omit_index(url) real_url = '' handler = None try: # Escape some of the illegal char in URL quoted_url = urllib.quote(url, retrieve.quote_chars) handler = urllib2.urlopen(quoted_url) real_url = handler.geturl() maintype = handler.info().getmaintype() subtype = handler.info().getsubtype() except urllib2.HTTPError as e: debug.print_info() if e.code == 401: print("%s: Authentication Required!!" % quoted_url) elif e.code == 404: print("%s: Page Not Found!!" % quoted_url) file_mgr.found_404() print e except exceptions.KeyboardInterrupt: print 'Force Exit!!' sys.exit(1) except: pass #debug.print_info() #print quoted_url #print sys.exc_info()[0] else: if (chk.check_suffix(real_url) and chk.check_mime(maintype, subtype)): # Download the whole page from URL time_page = read_page(handler) # Check whether a page has been visited from different URLs. if time_page: page = time_page['page'] crawl_time = time_page['time'] if chk.check_page(page): # Retrieve all the outgoing URL from the page stats = parser.parse(real_url, page, search_terms) if stats: file_mgr.save_file(handler, page, crawl_time, link_score, stats['score'], len(stats['terms'])) q.add(stats['urls'], stats['score']) #q.print_info() handler.close()
def manualChecker(request): if request.user.is_authenticated: try: c = checker.Checker() c.runCheck(request.user.username) c.clean() return http.HttpResponse( status=200, content= "OK! Check done. You may press the back button to go back. ") except Exception as e: return http.HttpResponse(status=500, content=e) else: return http.HttpResponseForbidden(content="Forbidden; Please Login")
def setup_new_board(self): """ Setup a new board with 12 checkers on each side in starting positions """ logger.info('setup_new_board()') self.squares = [[None for j in range(8)] for i in range(8)] self.black_checkers = [ch.Checker('black', self) for i in range(12)] self.white_checkers = [ch.Checker('white', self) for i in range(12)] """ Place checkers in starting squares """ i = 0 for row in range(3): for column in range(8): if self.dark_square((row, column)): self.place_checker((row, column), self.white_checkers[i]) i += 1 i = 0 for row in range(5, 8): for column in range(8): if self.dark_square((row, column)): self.place_checker((row, column), self.black_checkers[i]) i += 1
def main(): if platform.architecture()[0].startswith('32'): # https://docs.microsoft.com/en-us/windows/desktop/api/psapi/nf-psapi-enumprocessmodules # If this function is called from a 32-bit application running on WOW64, it can only enumerate # the modules of a 32-bit process. If the process is a 64-bit process, this function fails and # the last error code is ERROR_PARTIAL_COPY (299). print('refusing to run with python 32 bits', file=sys.stderr) return 1 if not interop.is_admin(): interop.elevate(__file__) return -1 with checker.Checker('poe.key') as c: Application(c).mainloop()
def verify_pacs(bi): """Take a list of rpm filenames and verify their signatures. In case of failure, exit. """ pac_list = [i.fullfilename for i in bi.deps] if not conf.config['builtin_signature_check']: return verify_pacs_old(pac_list) if not pac_list: return if not bi.keys: raise oscerr.APIError("can't verify packages due to lack of GPG keys") print "using keys from", ', '.join(bi.prjkeys) import checker failed = False checker = checker.Checker() try: checker.readkeys(bi.keys) for pkg in pac_list: try: checker.check(pkg) except Exception, e: failed = True print pkg, ':', e except: checker.cleanup() raise if failed: checker.cleanup() sys.exit(1) checker.cleanup()
def run(): ''' Start a console version of this application. ''' # Command line parser options. usage = "usage: %prog [-c|-d|-j|--convert|--rconvert] [options] arg1 arg2 ..." parser = optparse.OptionParser(version=checker.ABOUT, usage=usage) parser.add_option("--download", "-d", action="store_true", dest="download", help=_("Actually download the file(s) in the metalink")) parser.add_option("--check", "-c", action="store_true", dest="check", help=_("Check the metalink file URLs")) #parser.add_option("--file", "-f", dest="filevar", metavar="FILE", help=_("Metalink file to check or file to download")) parser.add_option("--timeout", "-t", dest="timeout", metavar="TIMEOUT", help=_("Set timeout in seconds to wait for response (default=10)")) parser.add_option("--os", "-o", dest="os", metavar="OS", help=_("Operating System preference")) parser.add_option("--no-segmented", "-s", action="store_true", dest="nosegmented", help=_("Do not use the segmented download method")) parser.add_option("--lang", "-l", dest="language", metavar="LANG", help=_("Language preference (ISO-639/3166)")) parser.add_option("--country", dest="country", metavar="LOC", help=_("Two letter country preference (ISO 3166-1 alpha-2)")) parser.add_option("--pgp-keys", "-k", dest="pgpdir", metavar="DIR", help=_("Directory with the PGP keys that you trust (default: working directory)")) parser.add_option("--pgp-store", "-p", dest="pgpstore", metavar="FILE", help=_("File with the PGP keys that you trust (default: ~/.gnupg/pubring.gpg)")) parser.add_option("--gpg-binary", "-g", dest="gpg", help=_("(optional) Location of gpg binary path if not in the default search path")) parser.add_option("--convert-jigdo", "-j", action="store_true", dest="jigdo", help=_("Convert Jigdo format file to Metalink")) parser.add_option("--port", dest="port", help=_("Streaming server port to use (default: No streaming server)")) parser.add_option("--html", dest="html", help=_("Extract links from HTML webpage")) parser.add_option("--convert", dest="convert", action="store_true", help="Conversion from 3 to 4 (IETF RFC)") parser.add_option("--rconvert", dest="rev", action="store_true", help="Reverses conversion from 4 (IETF RFC) to 3") parser.add_option("--output", dest="output", metavar="OUTFILE", help=_("Output conversion result to this file instead of screen")) parser.add_option("--rss", "-r", action="store_true", dest="rss", help=_("RSS/Atom Feed Mode, implies -d")) parser.add_option("--testable", action="store_true", dest="only_testable", help=_("Limit tests to only the URL types we can test (HTTP/HTTPS/FTP)")) parser.add_option("-w", dest="writedir", default=os.getcwd(), help=_("Directory to write output files to (default: current directory)")) (options, args) = parser.parse_args() #if options.filevar != None: # args.append(options.filevar) if len(args) == 0: parser.print_help() return socket.setdefaulttimeout(10) proxy.set_proxies() if options.os != None: download.OS = options.os if options.language != None: download.LANG = [].extend(options.language.lower().split(",")) if options.country != None: download.COUNTRY = options.country if options.pgpdir != None: download.PGP_KEY_DIR = options.pgpdir if options.pgpstore != None: download.PGP_KEY_STORE = options.pgpstore if options.port != None: download.PORT = int(options.port) if options.gpg != None: GPG.DEFAULT_PATH.insert(0, options.gpg) if options.timeout != None: socket.setdefaulttimeout(int(options.timeout)) if options.country != None and len(options.country) != 2: print _("Invalid country length, must be 2 letter code") return if options.jigdo: print download.convert_jigdo(args[0]) return if options.convert: text = download.parse_metalink(args[0], ver=4).generate() if options.output: handle = open(options.output, "w") handle.write(text) handle.close() else: print text return if options.rev: text = download.parse_metalink(args[0], ver=3).generate() if options.output: handle = open(options.output, "w") handle.write(text) handle.close() else: print text return if options.html: handle = download.urlopen(options.html) text = handle.read() handle.close() page = checker.Webpage() page.set_url(options.html) page.feed(text) for item in page.urls: if item.endswith(".metalink"): print "=" * 79 print item mcheck = checker.Checker() mcheck.check_metalink(item) results = mcheck.get_results() print_totals(results) return if options.check: failure = False for item in args: print "=" * 79 print item mcheck = checker.Checker(options.only_testable) mcheck.check_metalink(item) results = mcheck.get_results() result = print_totals(results) failure |= result sys.exit(int(failure)) if options.download: for item in args: progress = ProgressBar() result = download.get(item, options.writedir, handlers={"status": progress.download_update, "bitrate": progress.set_bitrate, "time": progress.set_time}, segmented = not options.nosegmented) progress.download_end() if not result: sys.exit(-1) if options.rss: for feed in args: progress = ProgressBar() result = download.download_rss(feed, options.writedir, handlers={"status": progress.download_update, "bitrate": progress.set_bitrate, "time": progress.set_time}, segmented = not options.nosegmented) progress.download_end() if not result: sys.exit(-1) sys.exit(0)
def main(): """ Parses arguments, setting appropriate defaults, and then invokes checking. """ parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter, description=""" SPARKL Chain Checking Tool. Examples: For local blockchain and local events, do: python -m checker For local blockchain and mongo events, do: python -m checker --evtype=mongodb For mongo blockchain and mongo events, do: python -m checker --evtype=mongodb --bctype=mongodb where: for local, defaults are: evuri = bcuri = {local_uri} for mongo, defaults are: evuri = bcuri = {mongo_uri} evdb = bcdb = {mongo_db} evcol = {mongo_evcol} bccol = {mongo_bccol}. """.format(local_uri=LOCAL_URI, mongo_uri=MONGO_URI, mongo_db=MONGO_DB, mongo_evcol=MONGO_EVCOL, mongo_bccol=MONGO_BCCOL)) parser.add_argument("--evuri", action="store", dest="ev_uri", help="events db uri, for non-local") parser.add_argument("--evdb", action="store", dest="ev_db", default=MONGO_DB, help="events db, for non-local") parser.add_argument("--evcol", action="store", dest="ev_col", default=MONGO_EVCOL, help="events db col, for non-local") parser.add_argument("--evtype", action="store", dest="ev_type", default=ch.LOCAL_DBTYPE, help="events db type") parser.add_argument("--bcuri", action="store", dest="bc_uri", help="blockchain db uri, for non-local") parser.add_argument("--bcdb", action="store", dest="bc_db", default=MONGO_DB, help="blockchain db, for non-local") parser.add_argument("--bccol", action="store", dest="bc_col", default=MONGO_BCCOL, help="blockchain db col, for non-local") parser.add_argument("--bctype", action="store", dest="bc_type", default=ch.LOCAL_DBTYPE, help="blockchain db type") parser.add_argument("--user", action="store", dest="owner", help="user filter (defaults to all users)") parser.add_argument("--node", action="store", dest="node", help="node filter (defaults to all nodes)") args = parser.parse_args() if not args.bc_uri: if args.bc_type == ch.LOCAL_DBTYPE: args.bc_uri = LOCAL_URI elif args.bc_type == ch.MONGO_DBTYPE: args.bc_uri = MONGO_URI if not args.ev_uri: if args.ev_type == ch.LOCAL_DBTYPE: args.ev_uri = LOCAL_URI elif args.ev_type == ch.MONGO_DBTYPE: args.ev_uri = MONGO_URI chcheck = ch.Checker(args) chcheck.do_checking() if chcheck.counts.result: print("Blocks Checked: " + str(chcheck.counts.get_checked())) print("Blocks Bubbled: " + str(chcheck.counts.get_bubbled())) print("Blocks Skipped: " + str(chcheck.counts.get_skipped())) print("Blocks Referenced: " + str(chcheck.counts.get_refd())) sys.exit(0) else: sys.exit(1)
def setUp(self) -> None: self.parser = prs.BPMNParser() self.checker = chk.Checker()
import checker as cK import createFile as cF import os timeLimit = 2.0 # timeLimit = float(input('Set time limit: ')) #set time cK.Checker(cF.createExe(), timeLimit)
def get_checker(self, project): return checker.Checker(project)
def create_new_network_checker(network_subnet): logging.info("Creating new checker instance to scan network") network_checker = checker.Checker(network_subnet) network_checker.devices_in_subnet()
import printer import sys import os import symbol import checker import executor import debug # following files generated each time. need to delete. to_clean = ['parser.out', 'parsetab.py'] for file_to_clean in to_clean: if os.path.exists(file_to_clean): os.remove(file_to_clean) # writer = printer.WriteVisitor() type_checker = checker.Checker() runner = executor.Executor( table=symbol.table_instance(), print_engine=printer.PrinterFactory().return_printer()) reserved = { 'set': 'SET', 'read_graph': 'READ_GRAPH', 'read_picture': 'READ_PICTURE', 'end': 'END', 'fit': 'FIT', 'scatter': 'SCATTER', 'read_molecule': 'READ_MOLECULE', 'write_molecule': 'WRITE_MOLECULE', 'mark': 'MARK', 'polygon': 'POLYGON',
import time from dotenv import load_dotenv import os import flask import telebot import meta import checker import data_base app = flask.Flask(__name__) load_dotenv() bot = telebot.TeleBot(os.getenv('API_TOKEN')) db = data_base.DataBase() check = checker.Checker() @app.route(os.getenv("WEBHOOK_URL_PATH"), methods=['POST']) def webhook(): if flask.request.headers.get('content-type') == 'application/json': json_string = flask.request.get_data().decode('utf-8') update = telebot.types.Update.de_json(json_string) bot.process_new_updates([update]) return '' else: flask.abort(403) @bot.message_handler(commands=['start']) def send_welcome(message):
def work_flow(): """The work flow of blending several TC OSW. """ load_configs.setup_logging() logger = logging.getLogger(__name__) # CONFIG try: CONFIG = load_configs.load_config() except Exception as msg: logger.exception(f'Exception occurred when loading confi: {msg}') os.makedirs(CONFIG['logging']['dir'], exist_ok=True) # read commandline arguments, first full_cmd_arguments = sys.argv # - further arguments argument_list = full_cmd_arguments[1:] try: arguments, values = getopt.getopt(argument_list, '', gnuOptions) except getopt.error as err: # output error, and return with an error code print(str(err)) sys.exit(2) input_custom_period = False input_custom_region = False specify_basin = False basin = None do_match_smap = False do_regression = False reg_instructions = None smogn_target = None interval = None do_simulate = False do_classify = False classify_instruction = None tag = None do_compare = False draw_sfmr = False max_windspd = None force_align_smap = False do_sfmr = False sfmr_instructions = None do_ibtracs = False ibtracs_instructions = None do_validation = False do_check = False do_sta_ibtracs = False do_sta_era5_smap = False do_smart_compare = False do_merra2 = False do_match_sfmr = False do_combine = False # evaluate given options for current_argument, current_value in arguments: if current_argument in ('-p', '--period'): input_custom_period = True period_parts = current_value.split(',') if len(period_parts) != 2: logger.error((f"""Inputted period is wrong: """ f"""need 2 parameters""")) elif current_argument in ('-r', '--region'): input_custom_region = True region_parts = current_value.split(',') if len(region_parts) != 4: logger.error((f"""Inputted region is wrong: """ f"""need 4 parameters""")) elif current_argument in ('-b', '--basin'): specify_basin = True basin_parts = current_value.split(',') if len(basin_parts) != 1: logger.error((f"""Inputted basin is wrong: """ f"""must 1 parameters""")) basin = basin_parts[0] elif current_argument in ('-e', '--match_smap'): do_match_smap = True elif current_argument in ('-g', '--reg'): do_regression = True reg_instructions = current_value.split(',') elif current_argument in ('--smogn_target'): smogn_target = current_value.split(',')[0] elif current_argument in ('--interval'): interval = current_value.split(',')[:2] elif current_argument in ('--simulate'): do_simulate = True simulate_instructions = current_value.split(',') elif current_argument in ('--classify'): do_classify = True classify_instructions = current_value.split(',') elif current_argument in ('--tag'): tag = current_value.split(',')[0] elif current_argument in ('-c', '--compare'): do_compare = True compare_instructions = current_value.split(',') elif current_argument in ('--draw_sfmr'): head = current_value.split(',')[0] if head == 'True': draw_sfmr = True elif head == 'False': draw_sfmr = False else: logger.error('draw_sfmr must be "True" or "False"') sys.exit(1) elif current_argument in ('--max_windspd'): head = current_value.split(',')[0] max_windspd = float(head) elif current_argument in ('--force_align_smap'): head = current_value.split(',')[0] if head == 'True': force_align_smap = True elif head == 'False': force_align_smap = False else: logger.error('force_align_smap must be "True" or "False"') sys.exit(1) elif current_argument in ('-s', '--sfmr'): do_sfmr = True elif current_argument in ('-i', '--ibtracs'): do_ibtracs = True elif current_argument in ('-v', '--validate'): do_validation = True validate_instructions = current_value elif current_argument in ('-k', '--check'): do_check = True elif current_argument in ('--sta_ibtracs'): do_sta_ibtracs = True elif current_argument in ('--sta_era5_smap'): do_sta_era5_smap = True sources = current_value.split(',') elif current_argument in ('--smart_compare'): do_smart_compare = True elif current_argument in ('--merra2'): do_merra2 = True elif current_argument in ('--match_sfmr'): do_match_sfmr = True elif current_argument in ('--combine'): do_combine = True if not specify_basin: logger.error('Must specify basin') exit() if input_custom_period: # Period parts # yyyy-mm-dd-HH-MM-SS period = [ datetime.datetime.strptime(period_parts[0], '%Y-%m-%d-%H-%M-%S'), datetime.datetime.strptime(period_parts[1], '%Y-%m-%d-%H-%M-%S') ] else: period = [ datetime.datetime(2015, 4, 1, 0, 0, 0), datetime.datetime.now() ] train_test_split_dt = datetime.datetime(2019, 1, 1, 0, 0, 0) if input_custom_region: # Area parts custom_region = [] for part in region_parts: custom_region.append(float(part)) else: region = [-90, 90, 0, 360] # Period logger.info(f'Period: {period}') # Region logger.info(f'Region: {region}') # MySQL Server root password passwd = '399710' # Download and read try: if do_combine: combine_table.TableCombiner(CONFIG, period, region, basin, passwd) if do_match_sfmr: match_era5_sfmr.matchManager(CONFIG, period, region, basin, passwd, False) if do_merra2: merra2.MERRA2Manager(CONFIG, period, False) if do_smart_compare: smart_compare.SmartComparer(CONFIG, period, basin, passwd) if do_sta_era5_smap: sta_era5_smap.Statisticer(CONFIG, period, basin, sources, passwd) if do_sta_ibtracs: sta_ibtracs.Statisticer(CONFIG, period, basin, passwd) if do_check: checker.Checker(CONFIG) if do_validation: validate.ValidationManager(CONFIG, period, basin, validate_instructions) if do_match_smap: match_era5_smap.matchManager(CONFIG, period, region, basin, passwd, False, work=True) if do_classify: classify.Classifier(CONFIG, period, train_test_split_dt, region, basin, passwd, False, classify_instructions, smogn_target) if do_simulate: simulate.TCSimulator(CONFIG, period, region, basin, passwd, False, simulate_instructions) if do_regression: # if tag is None: # logger.error('No model tag') # exit() regression.Regression(CONFIG, period, train_test_split_dt, region, basin, passwd, False, reg_instructions, smogn_target, tag) if do_compare: # if ('smap_prediction' in compare_instructions # and tag is None): # logger.error('No model tag') # exit() compare_tc.TCComparer(CONFIG, period, region, basin, passwd, False, compare_instructions, draw_sfmr, max_windspd, force_align_smap) # tag) # ccmp_ = ccmp.CCMPManager(CONFIG, period, region, passwd, # work_mode='fetch_and_compare') # era5_ = era5.ERA5Manager(CONFIG, period, region, passwd, # work=True, save_disk=False, 'scs', # 'surface_all_vars') # isd_ = isd.ISDManager(CONFIG, period, region, passwd, # work_mode='fetch_and_read') # grid_ = grid.GridManager(CONFIG, region, passwd, run=True) # satel_scs_ = satel_scs.SCSSatelManager(CONFIG, period, # region, passwd, # save_disk=False, # work=True) # coverage_ = coverage.CoverageManager(CONFIG, period, # region, passwd) if do_ibtracs: ibtracs_ = ibtracs.IBTrACSManager(CONFIG, period, region, basin, passwd) # cwind_ = cwind.CwindManager(CONFIG, period, region, passwd) # stdmet_ = stdmet.StdmetManager(CONFIG, period, region, passwd) if do_sfmr: sfmr_ = sfmr.SfmrManager(CONFIG, period, region, passwd) # satel_ = satel.SatelManager(CONFIG, period, region, passwd, # save_disk=False) # compare_ = compare_offshore.CompareCCMPWithInStu( # CONFIG, period, region, passwd) pass except Exception as msg: logger.exception('Exception occured when downloading and reading') try: # new_reg = reg_scs.NewReg(CONFIG, period, test_period, # region, passwd, save_disk=True) # ibtracs_ = ibtracs.IBTrACSManager(CONFIG, test_period, # region, passwd) # hwind_ = hwind.HWindManager(CONFIG, test_period, region, passwd) # era5_ = era5.ERA5Manager(CONFIG, test_period, region, passwd, # work=True, save_disk=False) pass except Exception as msg: logger.exception('Exception occured when downloading and reading') logger.info('SWFusion complete.')