def main(): global global_fail_count global global_success_count global global_file_list log_config = config.log_config task = config.task clean_sql_file = task['clean_sql_file'] common.init_log(log_config['filename']) start_time, stop_time, _ = parse_args(['default']) # write create table sql table_name = task['table_name'] tmp_sql_file_dir = task['tmp_sql_file_dir'] tmp_sql_file = common.gen_tmp_file_name(tmp_sql_file_dir) s3 = S3Helper(**config.amazon_s3_config) p = start_time while p <= stop_time: main_loop(s3, p, tmp_sql_file, table_name) p += datetime.timedelta(hours=1) # 执行 SQL mysql_config = config.mysql_config common.execute_mysql_sql(mysql_config["user"], mysql_config["passwd"], mysql_config["dbname"], tmp_sql_file) # END if os.path.exists(tmp_sql_file): if clean_sql_file: try: os.remove(tmp_sql_file) except Exception as e: logging.error("main():Delete the tmp sql file: %s:[%s]" % (tmp_sql_file, e)) else: logging.info("keep the tmp sql file @ %s" % tmp_sql_file) else: logging.warning("There is No tmp sql file:%s" % tmp_sql_file) if not task['keep_s3_file']: for _file in global_file_list: try: os.remove(_file) except Exception as e: logging.error("main():Delete local Tmp S3 File: %s:[%s]" % (_file, e)) return
#Brief: %s is used for getting specific infos from shitu server #Author: [email protected] #usage: python %s cfg_file ''' % (name, name) if __name__ == '__main__': filename = 'database.cfg' conf = MyConf(filename) if 0 != conf.parse(): print "MyConf.parse error from file:%s" % (filename) exit(1) common.init_log(conf.log_file, conf.log_level) job = JobFactory.create(conf.language) if not job: common.logger.error("job create failed for language:%s" % (conf.language)) exit(1) common.logger.info("job create success for language:%s" % (conf.language)) if job.init(conf) != 0: common.logger.warn("job runner init failed") exit(1) common.logger.info("job runner init success") ret = job.run() common.logger.info("job runner run over, ret:%d" % (ret))
def setLog(self, log_file): if log_file is None: log_file = "log.txt" self._log = common.init_log(log_file=log_file)
from common import logger app = Flask(__name__) @app.route('/hello') @app.route('/hello/<name>') def hello(name=None): logger.info("request /hello") return render_template('hello.html', name=name) @app.route('/course/videos') def videos(): logger.info('/course/videos') mark = request.args.get('mark', '') logger.info('mark: '+mark) if(mark == 0): logger.info("0: TODO...") pass elif(mark == 1): logger.info("1: TODO...") pass return "mark="+mark; @app.route('/download/<filename>') def download_file(filename): directory = r"C:\Users\007\PycharmProjects\test\static" return send_from_directory(directory, filename, as_attachment=True) if __name__ == '__main__': common.init_log() app.run(debug=False)
def setLog(self,log_file): if log_file is None: log_file = "log.txt" self._log = common.init_log(log_file=log_file)
#********************************************************************************** #---------------------------------------------------------------------------------- #-- start.py entry(Don't running this scripts,Please run start.cmd,Thanks!) #---------------------------------------------------------------------------------- #********************************************************************************** if len(sys.argv) > 1: lpypara = sys.argv[1] if helps(lpypara): exit() try: #1.init; get ini config parameter,init dirs,log,set loglevel by arg passed refresh_ini() init_dirs() init_log(lpypara) #2.refresh parameters of db refresh_paras(gparaslst) #3.prevent script runing again if isrunning(): exit() #4.create thread:mails analyzing&downloading logaq('--starting--', 'i') logaq('create thread Thread_mailsa', 'i') t_mailsa = threading.Thread(target=maila_main, args=(gmailslst, gparaslst, gmutex)) t_mailsa.setName('Thread_mailsa1') t_mailsa.setDaemon(False) t_mailsa.start()
def publishfromconfig(configFiles,globalLoginInfo,combinedApp=None,log_file=None,dateTimeFormat=None): publishTools = None log = None webmaps = None cred_info = None loginInfo = None config = None resultFS = None resultMaps = None resultApps = None combinedResults = None if log_file is None: log_file = "log.txt" if dateTimeFormat is None: dateTimeFormat = '%Y-%m-%d %H:%M' env.overwriteOutput = True log = common.init_log(log_file=log_file) scriptStartTime = datetime.datetime.now() try: webmaps = [] if log is None: print "Log file could not be created" print "********************Script Started********************" print "Script started at %s" % scriptStartTime.strftime(dateTimeFormat) print "-----Portal Credentials-----" cred_info = None if not globalLoginInfo is None and os.path.isfile(globalLoginInfo): loginInfo = common.init_config_json(config_file=globalLoginInfo) if 'Credentials' in loginInfo: cred_info = loginInfo['Credentials'] print "Credentials loaded" if cred_info is None: print "Credentials not found" cred_info = {} cred_info['Username'] = '' cred_info['Password'] = '' cred_info['Orgurl'] = 'http://www.arcgis.com' print "-----Portal Credentials complete-----" # start report processing (moved out from under ArcREST logic. no AGO crednetials needed to run reports) for configFile in configFiles: config = common.init_config_json(config_file=configFile) if config is not None: if 'ReportDetails' in config: if reportToolsInstalled == False: print "Report section is included in the config file but the solutionreporttools cannot be located" else: reportConfig = config['ReportDetails'] # This code checks to see if you want to export the data from SDE to a local GDB. The parameter is set in config file. # Could be performance gain to run locally. If you choose this option, both the report and the data prep in memory config # are modified so they can point to the local temp location. if 'RunReport' in reportConfig and (str(reportConfig['RunReport']).upper() =="TRUE" or str(reportConfig['RunReport']).upper() =="YES"): reportConfig = ReportTools.reportDataPrep(reportConfig) print "-----Report Section Starting-----" startTime = datetime.datetime.now() print "Processing reports in config %s, starting at: %s" % (configFile,startTime.strftime(dateTimeFormat)) ReportTools.create_report_layers_using_config(config=reportConfig) print "Reports in config %s completed, time to complete: %s" % (configFile, str(datetime.datetime.now() - startTime)) print "-----Report Section Complete-----" if 'PublishingDetails' in config: publishingConfig = config['PublishingDetails'] if 'PublishData' in publishingConfig: publishData = publishingConfig['PublishData'] else: print "PublishingDetails is missing the PublishData parameter: type string, values, True or False" publishData = 'TRUE' if (str(publishData).upper() =="TRUE" or str(publishData).upper() =="YES"): print " " print "-----Publishing Section Starting-----" startTime = datetime.datetime.now() print "Processing publishing in config %s, starting at: %s" % (configFile,startTime.strftime(dateTimeFormat)) publishTools = publishingtools.publishingtools(username = cred_info['Username'], password=cred_info['Password'],org_url=cred_info['Orgurl'], token_url=None, proxy_url=None, proxy_port=None) if publishTools.valid == False : print "Error creating publishing tools: %s" % publishTools.message else: print "Publishing tools created: %s" % publishTools.message if 'FeatureServices' in publishingConfig: startSectTime = datetime.datetime.now() print " " print "Creating Feature Services: %s" % str(startSectTime.strftime(dateTimeFormat)) resultFS = publishTools.publishFsFromMXD(fs_config=publishingConfig['FeatureServices']) print "Feature Services published, time to complete: %s" % str(datetime.datetime.now() - startSectTime) if 'ExistingServices' in publishingConfig: startSectTime = datetime.datetime.now() print " " print "Updating Existing Feature Services: %s" % str(startSectTime.strftime(dateTimeFormat)) resultES = publishTools.updateFeatureService(efs_config=publishingConfig['ExistingServices']) print "Updating Existing Feature Services completed, time to complete: %s" % str(datetime.datetime.now() - startSectTime) if 'MapDetails' in publishingConfig: startSectTime = datetime.datetime.now() print " " print "Creating maps: %s" % str(startSectTime.strftime(dateTimeFormat)) resultMaps = publishTools.publishMap(maps_info=publishingConfig['MapDetails'],fsInfo=resultFS) for maps in resultMaps: if 'MapInfo' in maps: if 'Results' in maps['MapInfo']: if 'id' in maps['MapInfo']['Results']: webmaps.append(maps['MapInfo']['Results']['id']) print "Creating maps completed, time to complete: %s" % str(datetime.datetime.now() - startSectTime) if 'AppDetails' in publishingConfig: startSectTime = datetime.datetime.now() print " " print "Creating apps: %s" % str(startSectTime.strftime(dateTimeFormat)) resultApps = publishTools.publishApp(app_info=publishingConfig['AppDetails'],map_info=resultMaps) print "Creating apps completed, time to complete: %s" % str(datetime.datetime.now() - startSectTime) print "Publishing complete in config %s completed, time to complete: %s" % (configFile, str(datetime.datetime.now() - startTime)) print "-----Publishing Section Complete-----" else: print "Config %s not found" % configFile if combinedApp: if os.path.exists(combinedApp): print " " startSectTime = datetime.datetime.now() print "Creating combind result: %s" % str(startSectTime.strftime(dateTimeFormat)) config = common.init_config_json(config_file=combinedApp) combinedResults = publishTools.publishCombinedWebMap(maps_info=config['PublishingDetails']['MapDetails'],webmaps=webmaps) if 'PublishingDetails' in config: publishingConfig = config['PublishingDetails'] if 'PublishData' in publishingConfig: publishData = publishingConfig['PublishData'] else: print "PublishingDetails is missing the PublishData parameter: type string, values, True or False" publishData = 'TRUE' if (str(publishData).upper() =="TRUE" or str(publishData).upper() =="YES"): if 'AppDetails' in publishingConfig: resultApps = publishTools.publishApp(app_info=publishingConfig['AppDetails'],map_info=combinedResults) print "Creating combind result completed, time to complete: %s" % str(datetime.datetime.now() - startSectTime) except(TypeError,ValueError,AttributeError),e: print e
def main(): duration_list = ['hour', 'day'] # parse args task_name_list = config.task_list.keys() start_time, stop_time, task_name, duration = common.parse_args( task_name_list, duration_list) task = config.task_list[task_name] # init log common.init_log(task['log']['filename'], task['log']['debug']) logging.info('======\n%s\n' % datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S %Z')) logging.info(" TASK [%s] START ..." % task_name) # init t_timedelta t_timedelta = 'hour' summary_suffix_format = '_%Y%m%d' if duration == 'hour': t_timedelta = datetime.timedelta(hours=1) summary_suffix_format = '_hourly_%Y%m%d' elif duration == 'day': t_timedelta = datetime.timedelta(days=1) summary_suffix_format = '_daily_%Y%m' else: logging.info('main():bad param `duration`:%s' % duration) return # make tmp sql file _prefix = task['tmp_summary_sql_file_name'] _suffix = task['tmp_summary_sql_file_name_suffix'] global_tmp_sql_file = common.make_file_name(_prefix, _suffix) logging.info('Global Tmp Sql File:%s' % global_tmp_sql_file) # delete old same name file common.delete_files(global_tmp_sql_file) summary_dimension_1_name_list = task['summary_dimension_1_name_list'] summary_dimension_2_name_list = task['summary_dimension_2_name_list'] summary_metric_name_list = task['summary_metric_name_list'] # current module ref this_module = sys.modules[__name__] # create dimension table create_table.create_table_dimension_enum(global_tmp_sql_file, task['dimension_table_name']) # summary function create_summary_table_function = getattr( create_table, task['create_summary_table_function']) compute_summary_function = getattr(this_module, task['compute_summary_function']) # main loop # save summary_table_name dump_table_name_list = [task['dimension_table_name']] # foreach time range p = start_time sql_list = [] while p < stop_time: # prepare origin_table_name _prefix = task['raw_data_table_name'] _suffix = task['raw_data_table_name_suffix'] _format_suffix = p.strftime(_suffix) origin_table_name = '%s%s' % (_prefix, _format_suffix) print 'origin_table_name', origin_table_name # prepare summary_table_name _prefix = task['summary_data_table_name'] _format_suffix = p.strftime(summary_suffix_format) summary_table_name = '%s%s' % (_prefix, _format_suffix) # save summary_table_name dump_table_name_list.append(summary_table_name) # create summary table tmp_sql = create_summary_table_function(None, summary_table_name) sql_list.append(tmp_sql) # summary compute sql tmp_sql_list = compute_summary_function(p, duration, origin_table_name, summary_table_name, summary_dimension_1_name_list, summary_dimension_2_name_list, summary_metric_name_list) sql_list += tmp_sql_list # compute ctr tmp_sql = update_summary_ctr(summary_table_name) sql_list.append(tmp_sql) # extract dimension_enum category = task['category'] dimension_name_list = list( set(summary_dimension_1_name_list + summary_dimension_2_name_list)) tmp_sql_list = compute_dimension_enum(origin_table_name, task['dimension_table_name'], dimension_name_list, category) sql_list += tmp_sql_list # next p += t_timedelta # End While # filter duplication sql or None good_sql_list = [] for sql in sql_list: if not sql or not isinstance(sql, (unicode, str)) \ or sql in good_sql_list: continue good_sql_list.append(sql) # write sql to file with open(global_tmp_sql_file, 'a') as f: big_sql = '\n'.join(good_sql_list) f.write(big_sql) f.write('\n') # 执行 SQL local_mysql_auth = config.local_mysql_auth common.execute_mysql_sql(local_mysql_auth['host'], local_mysql_auth['port'], local_mysql_auth['user'], local_mysql_auth['passwd'], local_mysql_auth['dbname'], global_tmp_sql_file) # clean some files if os.path.exists(global_tmp_sql_file): if task['keep_summary_sql_file']: logging.info("keep the tmp sql file @ %s" % global_tmp_sql_file) else: try: os.remove(global_tmp_sql_file) except Exception as e: logging.error("main():Delete the tmp sql file: %s:[%s]" % (global_tmp_sql_file, e)) else: logging.warning("There is No tmp sql file:%s" % global_tmp_sql_file) # dump to summary data to remote server _prefix = task['mysql_dump_file_name'] _suffix_fmt = task['mysql_dump_file_name_suffix'] dump_file_name = common.make_file_name(_prefix, _suffix_fmt) common.delete_files(dump_file_name) local_mysql_auth = config.local_mysql_auth dump_table_name_list = list(set(dump_table_name_list)) # dump tables into dump file for dump_table_name in dump_table_name_list: common.execute_mysql_dump(local_mysql_auth['host'], local_mysql_auth['port'], local_mysql_auth['user'], local_mysql_auth['passwd'], local_mysql_auth['dbname'], dump_table_name, dump_file_name) # 执行 Dump SQL remote_mysql_auth = config.remote_mysql_auth common.execute_mysql_sql(remote_mysql_auth['host'], remote_mysql_auth['port'], remote_mysql_auth['user'], remote_mysql_auth['passwd'], remote_mysql_auth['dbname'], dump_file_name) if task['keep_mysql_dump_file']: logging.info("keep the Dump sql file @ %s" % global_tmp_sql_file) else: common.delete_files(dump_file_name) # End return
def publishfromconfig(configFiles,globalLoginInfo,combinedApp=None,log_file=None,dateTimeFormat=None): publishTools = None log = None webmaps = None cred_info = None loginInfo = None config = None resultFS = None resultMaps = None resultApps = None combinedResults = None if log_file is None: log_file = "log.txt" if dateTimeFormat is None: dateTimeFormat = '%Y-%m-%d %H:%M' env.overwriteOutput = True log = common.init_log(log_file=log_file) try: webmaps = [] if log is None: print "Log file could not be created" print "********************Script Started********************" scriptStartTime = datetime.datetime.now() print "Script started at %s" % scriptStartTime.strftime(dateTimeFormat) print "-----Portal Credentials-----" cred_info = None if os.path.isfile(globalLoginInfo): loginInfo = common.init_config_json(config_file=globalLoginInfo) if 'Credentials' in loginInfo: cred_info = loginInfo['Credentials'] print "Credentials loaded" if cred_info is None: print "Credentials not found" cred_info = {} cred_info['Username'] = '' cred_info['Password'] = '' cred_info['Orgurl'] = 'http://www.arcgis.com' print "-----Portal Credentials complete-----" # start report processing (moved out from under ArcREST logic. no AGO crednetials needed to run reports) for configFile in configFiles: config = common.init_config_json(config_file=configFile) if config is not None: if 'ReportDetails' in config: if reportToolsInstalled == False: print "Report section is included in the config file but the solutionreporttools cannot be located" else: reportConfig = config['ReportDetails'] # This code checks to see if you want to export the data from SDE to a local GDB. The parameter is set in config file. # Could be performance gain to run locally. If you choose this option, both the report and the data prep in memory config # are modified so they can point to the local temp location. if 'RunReport' in reportConfig and (str(reportConfig['RunReport']).upper() =="TRUE" or str(reportConfig['RunReport']).upper() =="YES"): reportConfig = ReportTools.reportDataPrep(reportConfig) print "-----Report Section Starting-----" startTime = datetime.datetime.now() print "Processing reports in config %s, starting at: %s" % (configFile,startTime.strftime(dateTimeFormat)) ReportTools.create_report_layers_using_config(config=reportConfig) print "Reports in config %s completed, time to complete: %s" % (configFile, str(datetime.datetime.now() - startTime)) print "-----Report Section Complete-----" if 'PublishingDetails' in config: publishingConfig = config['PublishingDetails'] if 'PublishData' in publishingConfig: publishData = publishingConfig['PublishData'] else: print "PublishingDetails is missing the PublishData parameter: type string, values, True or False" publishData = 'TRUE' if (str(publishData).upper() =="TRUE" or str(publishData).upper() =="YES"): print " " print "-----Publishing Section Starting-----" startTime = datetime.datetime.now() print "Processing publishing in config %s, starting at: %s" % (configFile,startTime.strftime(dateTimeFormat)) publishTools = publishingtools.publishingtools(username = cred_info['Username'], password=cred_info['Password'],org_url=cred_info['Orgurl'], token_url=None, proxy_url=None, proxy_port=None) if publishTools.valid == False : print "Error creating publishing tools: %s" % publishTools.message else: print "Publishing tools created: %s" % publishTools.message if 'FeatureServices' in publishingConfig: startSectTime = datetime.datetime.now() print " " print "Creating Feature Services: %s" % str(startSectTime.strftime(dateTimeFormat)) resultFS = publishTools.publishFsFromMXD(fs_config=publishingConfig['FeatureServices']) print "Feature Services published, time to complete: %s" % str(datetime.datetime.now() - startSectTime) if 'ExistingServices' in publishingConfig: startSectTime = datetime.datetime.now() print " " print "Updating Existing Feature Services: %s" % str(startSectTime.strftime(dateTimeFormat)) resultES = publishTools.updateFeatureService(efs_config=publishingConfig['ExistingServices']) print "Updating Existing Feature Services completed, time to complete: %s" % str(datetime.datetime.now() - startSectTime) if 'MapDetails' in publishingConfig: startSectTime = datetime.datetime.now() print " " print "Creating maps: %s" % str(startSectTime.strftime(dateTimeFormat)) resultMaps = publishTools.publishMap(maps_info=publishingConfig['MapDetails'],fsInfo=resultFS) for maps in resultMaps: if 'MapInfo' in maps: if 'Results' in maps['MapInfo']: if 'id' in maps['MapInfo']['Results']: webmaps.append(maps['MapInfo']['Results']['id']) print "Creating maps completed, time to complete: %s" % str(datetime.datetime.now() - startSectTime) if 'AppDetails' in publishingConfig: startSectTime = datetime.datetime.now() print " " print "Creating apps: %s" % str(startSectTime.strftime(dateTimeFormat)) resultApps = publishTools.publishApp(app_info=publishingConfig['AppDetails'],map_info=resultMaps) print "Creating apps completed, time to complete: %s" % str(datetime.datetime.now() - startSectTime) print "Publishing complete in config %s completed, time to complete: %s" % (configFile, str(datetime.datetime.now() - startTime)) print "-----Publishing Section Complete-----" else: print "Config %s not found" % configFile if combinedApp: if os.path.exists(combinedApp): print " " startSectTime = datetime.datetime.now() print "Creating combind result: %s" % str(startSectTime.strftime(dateTimeFormat)) config = common.init_config_json(config_file=combinedApp) combinedResults = publishTools.publishCombinedWebMap(maps_info=config['PublishingDetails']['MapDetails'],webmaps=webmaps) if 'PublishingDetails' in config: publishingConfig = config['PublishingDetails'] if 'PublishData' in publishingConfig: publishData = publishingConfig['PublishData'] else: print "PublishingDetails is missing the PublishData parameter: type string, values, True or False" publishData = 'TRUE' if (str(publishData).upper() =="TRUE" or str(publishData).upper() =="YES"): if 'AppDetails' in publishingConfig: resultApps = publishTools.publishApp(app_info=publishingConfig['AppDetails'],map_info=combinedResults) print "Creating combind result completed, time to complete: %s" % str(datetime.datetime.now() - startSectTime) except(TypeError,ValueError,AttributeError),e: print e
def main(): global global_fail_count global global_success_count global global_file_list # parse args task_name_list = config.task_list.keys() start_time, stop_time, task_name, _ = common.parse_args(task_name_list) task = config.task_list[task_name] # init task app local data task['app'] = {} # init log common.init_log(task['log']['filename'], task['log']['debug']) logging.info('======\n%s\n' % datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S %Z')) logging.info(" TASK [%s] START ..." % task_name) # init parser json function task['app']['parser_json'] = getattr(parser, task['parser_function_name']) # make tmp sql file _prefix = task['tmp_sql_file_name'] _suffix = task['tmp_sql_file_name_suffix'] global_tmp_sql_file = common.make_file_name(_prefix, _suffix) logging.info('Global Tmp Sql File:%s' % global_tmp_sql_file) # save global_tmp_sql_file to task . task['app']['global_tmp_sql_file'] = global_tmp_sql_file # delete old same name file common.delete_files(global_tmp_sql_file) # init s3 helper, download s3 file to local amazon_s3_auth = task['amazon_s3_auth'] s3 = S3Helper(**amazon_s3_auth) aws_appid_list = task['aws_appid_list'] tmp_local_file_dir = task['tmp_local_file_dir'] keep_s3_file = task['keep_s3_file'] # main loop # foreach time range p = start_time while p < stop_time: # prepare raw data table name _prefix = task['raw_data_table_name'] _suffix = task['raw_data_table_name_suffix'] _format_suffix = p.strftime(_suffix) table_name = '%s%s' % (_prefix, _format_suffix) # save table_name to task . task['app']['table_name'] = table_name # create table if not exits create_raw_data_table_function = task['create_raw_data_table_function'] create_table_function = getattr(create_table, create_raw_data_table_function) create_table_function(global_tmp_sql_file, table_name) # foreach time range for aws_appid in aws_appid_list: key_path = 'awsma/events/%s/' % aws_appid local_s3_file_list = common.download_from_s3( s3, key_path, p, tmp_local_file_dir) # main_loop(task, local_s3_file_list) # delete local s3 files if not keep_s3_file: common.delete_files(local_s3_file_list) p += datetime.timedelta(hours=1) # exit() # 执行 SQL local_mysql_auth = config.local_mysql_auth common.execute_mysql_sql(local_mysql_auth['host'], local_mysql_auth['port'], local_mysql_auth['user'], local_mysql_auth['passwd'], local_mysql_auth['dbname'], global_tmp_sql_file) # END if os.path.exists(global_tmp_sql_file): if task['keep_sql_file']: logging.info("keep the tmp sql file @ %s" % global_tmp_sql_file) else: try: os.remove(global_tmp_sql_file) except Exception as e: logging.error("main():Delete the tmp sql file: %s:[%s]" % (global_tmp_sql_file, e)) else: logging.warning("There is No tmp sql file:%s" % global_tmp_sql_file) return
print ''' #Brief: %s is used for getting specific infos from shitu server #Author: [email protected] #usage: python %s cfg_file ''' % (name, name) if __name__ == '__main__': filename = 'database.cfg' conf = MyConf(filename) if 0 != conf.parse(): print "MyConf.parse error from file:%s" % (filename) exit(1) common.init_log(conf.log_file, conf.log_level) job = JobFactory.create(conf.language) if not job: common.logger.error("job create failed for language:%s" % (conf.language)) exit(1) common.logger.info("job create success for language:%s" % (conf.language)) if job.init(conf) != 0: common.logger.warn("job runner init failed") exit(1) common.logger.info("job runner init success") ret = job.run() common.logger.info("job runner run over, ret:%d" % (ret))