def __init__(self): # Config self.config = ConfigParser.RawConfigParser() self.config.read(config_filename) self.db_reload_seconds = 0 tidy_client_id = self.config.get('TidyHQ', 'client_id') tidy_client_secret = self.config.get('TidyHQ', 'client_secret') tidy_member_group = self.config.get('TidyHQ', 'group_id') tidy_domain_prefix = self.config.get('TidyHQ', 'domain_prefix') self.tidy_username = self.config.get('TidyHQ', 'username') self.tidy_password = self.config.get('TidyHQ', 'password') self.db_reload_seconds = self.config.getint('TidyHQ', 'update_interval_sec') self.push_apikey = self.config.get('Pushover', 'access_token') self.push_userkey = self.config.get('Pushover', 'user_key') log_filename = self.config.get('Logging', 'sync_filename') log_filesize = self.config.get('Logging', 'size_bytes') log_backup_count = self.config.get('Logging', 'backup_count') #print "Pushover token: " + self.push_apikey self.log = Logger(log_filename, log_filesize, log_backup_count, self.push_apikey, self.push_userkey, "HHSAccess") self.tidyhq = TidyHQController(tidy_client_id, tidy_client_secret, tidy_member_group, tidy_domain_prefix)
def __init__(self): # get the logger self.logger = Logger(log_path=Config.logdir + '/Report.log', log_level='debug', log_name='Report') self.logger.debug("logger test ok") # init sqlite self.sqlite = Sqlite() self.cap = Capture() self.logger.debug("init over")
def main(): global loop global log global port log = Logger(Log.DEBUG) try: load_config() except (ConfigPathError, ConfigError) as e: log.Error("Error in Loading configuration {0} ".format(e)) exit(255) except Exception as e: log.Error("Unknown Error in Loading configuration {0} ".format(e)) exit(255) try: load_sessions() except (ConfigPathError, ConfigError, SessionCreateError) as e: log.Error("Error in connecting to nodes {0} ".format(e)) exit(255) except Exception as e: log.Error("Unknown Error in connecting to nodes {0} ".format(e)) exit(255) register_signals() connection = Connection(port=int(port)) connection.start_listener() while loop: try: connection.connection_handler() except Exception as e: log.Error(e) break connection.stop_listener() del connection
def create_folder(self, folder_location) : # 현재 위치에 결과 저장 self.folder = Path.cwd() if folder_location is None else Path(folder_location).resolve() if not self.folder.exists() : self.logger.FileNotExistsError(sys._getframe().f_lineno, self.folder) result_folder = self.folder / "Results" self.words_folder = result_folder / "words" self.logs_folder = result_folder / "logs" for folder in [result_folder, self.words_folder, self.logs_folder] : if not folder.exists() : folder.mkdir() current_time = korea_time("%Y%m%d_%H%M%S") log_file = self.logs_folder / "logs - {}.txt".format(current_time) self.logger = Logger("Model.py", log_file) return current_time
def __init__(self): # get the logger self.logger = Logger(log_path=Config.logdir+'/Report.log', log_level='debug', log_name='Report') self.logger.debug("logger test ok") # init sqlite self.sqlite = Sqlite() self.cap = Capture() self.logger.debug("init over")
class Base: def __init__(self, master): self.name = None # "name" self.commands = [] # ["\command", ...] self.master = master self.LOG = Logger('./log', 'plugins') return def initialize(self): return def __call__(self, cmd: str, arg: Optional[str] = None) -> bool: return False def on_stop(self): return def log_info(self, msg): self.LOG.info("[%s]: %s" % (self.name, msg)) return def log_warn(self, msg): self.LOG.warn("[%s]: %s" % (self.name, msg)) return def log_error(self, msg): self.LOG.error("[%s]: %s" % (self.name, msg)) return
def task2(self, s_url, begin, end): logger = Logger() path = '//div[@class="bd"]/ul/li[@class="position_shares"]/div/' driver = webdriver.Chrome() for url in s_url[begin:end]: logger.info(url) driver.get(url) temp = random.randint(9, 20) time.sleep(temp) list_fund_code = [url[26:32] for i in xrange(10)] try: list_stock_name = self.get_text( driver.find_elements_by_xpath( path + 'table/tbody/tr/td[1]')[0:10]) list_stock_per = self.get_text( driver.find_elements_by_xpath( path + 'table/tbody/tr/td[2]')[0:10]) sum = self.get_text( driver.find_elements_by_xpath(path + 'p/span[2]')) list_per_sum = [sum[0] for i in xrange(10)] except Exception as e: logger.error(e) continue dict_df = { u'基金代码': pd.Series(list_fund_code), u'股票名称': pd.Series(list_stock_name), u'股票占比': pd.Series(list_stock_per), u'前十持仓占比合计': pd.Series(list_per_sum) } df = pd.DataFrame(dict_df, columns=[u'基金代码', u'股票名称', u'股票占比', u'前十持仓占比合计']) print(df) # df.to_excel('./Data3/fund_position_' + str(url[26:32]) + '.xlsx', index=False) driver.close() pass
def __init__(self): self.last_tag_scanned = 0 self.tag_scan_count = 0 self.tag_scan_repeat_message = 3 self.config = ConfigParser.RawConfigParser() self.config.read(self.config_filename) self.push_apikey = self.config.get('Pushover', 'access_token') self.push_userkey = self.config.get('Pushover', 'user_key') log_filename = self.config.get('Logging', 'tag_filename') log_filesize = self.config.get('Logging', 'size_bytes') log_backup_count = self.config.get('Logging', 'backup_count') self.log = Logger(log_filename, log_filesize, log_backup_count, self.push_apikey, self.push_userkey, "HHSAccess") debug_nopigpio = self.config.getboolean('Debug', 'nopigpio') self.dc = dc(nopigpio=debug_nopigpio) self.dc.set_tag_scanned_callback(self.tag_scanned) self.dc.set_alarm_sounding_callback(self.alarm_sounding) self.dc.set_alarm_armed_callback(self.alarm_armed) self.db = sdb("members.db") self.log.log_and_notify("Startup completed", "System startup")
class Report: def __init__(self): # get the logger self.logger = Logger(log_path=Config.logdir+'/Report.log', log_level='debug', log_name='Report') self.logger.debug("logger test ok") # init sqlite self.sqlite = Sqlite() self.cap = Capture() self.logger.debug("init over") def get_available_name(self): try: result = self.sqlite.get_available_yuming_infos() return result except Exception,ex: self.logger.error("get_available_name error:%s" %(ex)) return []
class Report: def __init__(self): # get the logger self.logger = Logger(log_path=Config.logdir + '/Report.log', log_level='debug', log_name='Report') self.logger.debug("logger test ok") # init sqlite self.sqlite = Sqlite() self.cap = Capture() self.logger.debug("init over") def get_available_name(self): try: result = self.sqlite.get_available_yuming_infos() return result except Exception, ex: self.logger.error("get_available_name error:%s" % (ex)) return []
import json import time import uuid from datetime import datetime from Log import Logger from Config import Config from Sqlite import Sqlite from capture import Capture import flask from werkzeug import secure_filename # set the logger logger = Logger(log_path=Config.logdir+'/server.log', log_level=Config.loglevel, log_name='Server') #sqlite = Sqlite('yuming.db') sqlite = Sqlite() class Server: def __init__(self): # get the logger self.logger = logger self.logger.debug("logger test ok") # init sqlite self.sqlite = sqlite self.cap = Capture() self.logger.debug("init over") def get_name_info_all(self):
from sys import path path.insert(0,"/root/Git/PowerAPI-Redfish/lib/python") import Util from Log import Logger,Log import json log = Logger(Log.ERROR) path.append("/root/Git/PowerAPI-Redfish/scripts") cp=Util.get_config_path() Util.LoadConfiguration(cp) co=Util.getConfigObj() #print co dev = co['ilo4']['chassis'] #print dev att = dev['AvgPower'] att.load(dev) #obj = att.getmodobj() #print obj #URL = att.getURL() URL1 = Util.getURL('ilo4','chassis#1','AvgPower','get') print URL1 URL2 = Util.getURL('ilo4','chassis#1','AvgPower','set') print URL2 Param1 = Util.getParam('ilo4','chassis#1','AvgPower','get') print Param1 Param2 = Util.getParam('ilo4','chassis#1','AvgPower','set') print Param2
#!/usr/bin/python2.7 from inputargs import Parser from api import API from Log import Logger logger = Logger() parser = Parser() args = parser.argparser.parse_args() api = API(args) def main(): if args.debug: logger.setlevel('DEBUG') if args.verbose: logger.setlevel('INFO') if args.quiet: logger.setlevel('ERROR') if hasattr(args, 'mngr_subparser'): if args.mngr_subparser == 'get': api.manager_get() if hasattr(args, 'mngr_subparser'): if args.mngr_subparser == 'set': api.manager_set() if hasattr(args, 'ini_subparser'): if args.ini_subparser == 'get':
# -*- coding: utf-8 -*- """ @Project : show_project @File : check_frame_number_timestamp.py @Author : 王白熊 @Data : 2021/3/25 14:45 """ import pandas as pd from Log import Logger import os logger = Logger('check_frame_number_timstamp').getlog() def check_path(file_path): if not os.path.exists(str(file_path)): os.makedirs(str(file_path)) logger.info('创建目录:{}'.format(file_path)) return str(file_path) def check_frame_number_timstamp(file_name, mode=0): if file_name.endswith('csv'): df = pd.read_csv(file_name) elif file_name.endswith('xlsx'): df = pd.read_excel(file_name) else: logger.warning('不支持的格式') return track_ids = df.groupby('id') df.sort_values(by='id')
from __future__ import print_function import cexapi import re import time import json import sys ## just place till P3 import urllib2 version = "0.9.2" ## Get Loggin obect from Log import Logger log = Logger() settings = [] class CexControl: def __init__(self): ## Initialize class Trading = "GUI" class Coin: def __init__(self, Name, Threshold, Reserve): self.Name = Name self.Threshold = Threshold
# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,WHETHER IN # CONTRACT,STRICT LIABILITY,OR TORT(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY # WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import sys import os from generic import generic import Util import re from ExceptionCollection import ParamInResponseGetError Proc_str = "" ProcDimm = "" alist = [] from Log import Logger, Log log = Logger(Log.DEBUG) import string import json class Memory_Temperature(generic): def get(self, session=None, entity=None, obj=None, attribute=None): dev = obj.replace("#", "") dev = dev.split(".") r = re.compile("([a-zA-Z]+)([0-9]+)") d = dict([r.match(string).groups() for string in dev]) Proc_Str = "P{0}".format(d["cpu"]) ProcDimm = Proc_Str + " DIMM"
if __name__ == '__main__': seed = 12345 random.seed(seed) np.random.seed(seed) torch.manual_seed(seed) n_gpu = torch.cuda.device_count() torch.backends.cudnn.deterministic = True torch.backends.cudnn.benchmark = False if n_gpu > 0: torch.cuda.manual_seed(seed) silence_step = 0 # 从哪步开始可以进行evaluate skip_step = 20 # 每隔多少步做evaluate config = DicToObj(**config_model) formatter = '%(asctime)s %(levelname)s %(message)s' config = generate_name_path(config, path_parser) train_logger = Logger(filename=config.saved_path + '/train.log', fmt=formatter).logger tb_logger = Tensorboard_Logger(config.saved_path) exe = Executor(model_name="stocknet", config=config, silence_step=silence_step, skip_step=skip_step, train_logger=train_logger, tb_logger=tb_logger) exe.apply(xavier_init) exe.train_and_dev(do_continue=False) exe.restore_and_test()
from Player import Player import argparse import gym parser = argparse.ArgumentParser() parser.add_argument('-train', default = False, type=bool, help = 'Flag indicating whether to train or not') parser.add_argument('-test', default = False, type=bool, help = 'Flag indicating whether to train or not') parser.add_argument('-game', default = 'PongDeterminisitic-v0',type=str, help = 'Flag indicating which game the agent should play') parser.add_argument('-learning_rate', default=0.00025 , type=float, help="Learning rate to be used when applying gradients") parser.add_argument('-decay', default=0.99 , type=float, help="Decay rate to be used by the RMSProp optimizer") args = parser.parse_args() gameType = args.game info = gym.spec(gameType).make() actionSpace = info.action_space.n log = Logger(tf, "tensorBoard/"+gameType) learningRate = args.learning_rate <= delete decay = args.decay <= delete imageDims = 84 numFrames = 4 savePath = "savedModels/" + args.game + ".ckpt" def loadWeights(path, sess, saver,train = True): try: saver.restore(sess, path) print("Successfully Restored Model!!" except: if(train): sess.run(tf.global_variables_initializer()) print("No model available for restoration")
@File : analysis_drsu.py @Author : 王白熊 @Data : 2020/11/10 16:36 """ import os import time import math import pandas as pd from pandas import DataFrame import numpy as np from glob import glob from Log import Logger from constant import const from analysis_drsu_single import TrackDrsu logger = Logger('DrsuScene').getlog() # drsu 场景,由多个track_id组成 class DrsuScene(object): def __init__(self, file_path, ort=True, use_time=False): """ :param file_path: drsu路径,到obs_data_trackid这一层 :param ort:摄像机朝向是否为x方向 """ if not os.path.exists(file_path): raise FileNotFoundError('drsu数据文件夹:%s不存在' % file_path) self.data_path = file_path self.df = DataFrame() self.bk_df = DataFrame() # self.match_type = const.MATCH_TYPE_NOT
class TagReader(): config_filename = "config.cfg" def __init__(self): self.last_tag_scanned = 0 self.tag_scan_count = 0 self.tag_scan_repeat_message = 3 self.config = ConfigParser.RawConfigParser() self.config.read(self.config_filename) self.push_apikey = self.config.get('Pushover', 'access_token') self.push_userkey = self.config.get('Pushover', 'user_key') log_filename = self.config.get('Logging', 'tag_filename') log_filesize = self.config.get('Logging', 'size_bytes') log_backup_count = self.config.get('Logging', 'backup_count') self.log = Logger(log_filename, log_filesize, log_backup_count, self.push_apikey, self.push_userkey, "HHSAccess") debug_nopigpio = self.config.getboolean('Debug', 'nopigpio') self.dc = dc(nopigpio=debug_nopigpio) self.dc.set_tag_scanned_callback(self.tag_scanned) self.dc.set_alarm_sounding_callback(self.alarm_sounding) self.dc.set_alarm_armed_callback(self.alarm_armed) self.db = sdb("members.db") self.log.log_and_notify("Startup completed", "System startup") #member, is_allowed = self.db.is_allowed(39160494) #self.tag_scanned(0, 99412070) #print str((member, is_allowed)) def unlock_door(self, contact_name): self.tag_scan_count = 0 self.dc.unlock_door() self.log.new_occupant(contact_name) pass def open_door(self, contact_name): self.tag_scan_count = 0 self.dc.unlock_door() self.log.new_occupant(contact_name) def tag_scanned(self, bits, rfid): self.log.info("Tag scanned: " + str(rfid)) contact, is_allowed = self.db.is_allowed(rfid) print contact, is_allowed contact_name = "Unknown" if contact is not None: contact_name = str(contact[1]) + " " + str(contact[2]) info_str = "Contact found: " + contact_name if is_allowed is True: info_str += " - allowed." else: info_str += " - not allowed." self.log.notify( contact_name + " tried to enter but not allowed.", "not allowed in") self.log.info(info_str) if is_allowed: self.open_door(contact_name) else: self.log.info("Unknown ID.") if not is_allowed: #self.log.invalid_tag_retries(rfid, contact_name) # Check for repeat scans if (rfid == self.last_tag_scanned): self.tag_scan_count += 1 if (self.tag_scan_count >= self.tag_scan_repeat_message): self.log.invalid_tag_retries(rfid, contact_name) else: self.tag_scan_count = 0 self.last_tag_scanned = rfid pass def alarm_sounding(self): self.log.alarm_sounding() pass def alarm_armed(self): self.log.alarm_armed() pass def reload_db(self): pass def run(self): while (True): time.sleep(30 * 60 * 60) self.log.info("Heartbeat")
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED # TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,WHETHER IN # CONTRACT,STRICT LIABILITY,OR TORT(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY # WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #This code is to read the configuration for all the objects and attributes #!/usr/bin/env python import sys from os import environ as env, path import yaml from Log import Logger from ExceptionCollection import ConfigPathError, ConfigFileError log = Logger() class ConfigParser(object): def __init__(self): self.config = dict() def __iter__(self): return iter(self.config) def __getitem__(self, key): return self.config[key] def __setitem__(self, key, value): self.config[key] = value
def do_redirect(self): print "##-DO_redirect", self.send_response(301) self.send_header( 'Location', 'http://localhost:%s/%d.html' % (PORT_NUMBER, (myReporter.cur_index + 1) % 9)) self.end_headers() if __name__ == "__main__": global Config, myReporter, myLogger, myServer, PORT_NUMBER myConfig = Config(sys.argv[1] if (len(sys.argv) > 1) else None) PORT_NUMBER = myConfig.get("port") global gl_template_list #gl_template_list = myConfig.get("template") gl_template_list = myConfig.get("debug") global myCounter myCounter = CounterPool() try: myParser = Parser(PORT_NUMBER, False, myConfig) myParser.load_templates(gl_template_list) myReporter = Reporter(PORT_NUMBER) myLogger = Logger(PORT_NUMBER) myServer = HTTPServer(('', PORT_NUMBER), myHandler) print 'Started httpserver on port ', PORT_NUMBER myServer.serve_forever() except KeyboardInterrupt: print '^C received, shutting down the web server' myServer.socket.close()
#!/usr/bin/env python # -*- coding: utf-8 -*- import json from datetime import datetime from datetime import timedelta import time from Log import Logger from Config import Config from Sqlite import Sqlite from capture import Capture from Email import Email logger = Logger(log_path=Config.logdir + '/sync.log', log_level=Config.loglevel, log_name='sync') class Sync: def __init__(self): self.logger = logger self.sqlite = Sqlite() self.cap = Capture() self.report_available_yuming = [] self.logger.debug('init over') '''
# *_*coding:utf-8 *_* from Log import Logger account = '1001524403' log = Logger('log/all.log', level='info') product_list = ["YMH9", "HSIH9", "SSIH9", ] mouth_mark = {1:'F', 2: 'G', 3: 'H', 4: 'J', 5: 'K', 6: 'M', 7: 'N', 8: 'Q', 9: 'U', 10: 'V', 11: 'X', 12: 'Z' } em_user = '******' pwd = 'qxkrwbmoxosdbhfi' address = ['*****@*****.**', ] smtp_server = 'smtp.qq.com' CommandLine = 'python D:\haitao\get_futuers_SP_data_\get_data_futures.py' restart_process_path = 'D:\haitao\get_futuers_SP_data_\get_data_futures.py'
import sqlite3 from Log import Logger LOG = Logger('./log', 'database') # Strings str_query_tables = "SELECT name FROM sqlite_master WHERE type='table'" # Database instance class DatabaseUtils: def __init__(self, db='./app.db'): LOG.info('Connecting to database: {}'.format(db)) self.con = self.connect_db(db) self.cur = self.con.cursor() LOG.info('Database connected.') return def table_exists(self, table_name): self.cur.execute(str_query_tables) tables = self.cur.fetchall() if (table_name, ) in tables: return 1 else: return 0 def execute_cmd(self, cmd, args=None, fetch_res=False): flag = 0 try: self.cur.execute(cmd, args) self.con.commit()
#!/usr/bin/env python # -*- coding: utf-8 -*- import ConfigParser import time import json import urllib2 import xml.dom.minidom import sqlite3 from Log import Logger from Config import Config from Sqlite import Sqlite logger = Logger(log_path=Config.logdir + '/capture.log', log_level=Config.loglevel, log_name='Capture') class Capture: def __init__(self): # get the logger self.logger = logger self.logger.debug("logger test ok") # init sqlite self.sqlite = Sqlite() self.now_cap_type = None
''' Created on Nov 6, 2015 @author: turbinyan ''' import re from Log import Logger log = Logger(__name__) # self.page = htmlStream def debugDumpTree(doc_tree): for element in doc_tree.iter(): log.debug('element = %s attribute= %s text=%s' % (element.tag, element.attrib, repr(element.text))) def debugDumpDiv(div_list): if len(div_list) == 0: log.debug("empty div list %s" % div_list.__name__) return for div in div_list: log.debug('div = %s attribute= %s' % (div.tag, div.attrib)) debugDumpTree(div) def debugDumpTree2File(doc_tree, file_name): with open(name=file_name, mode='w+') as fp: for element in doc_tree.iter(): fp.write('element = %s attribute= %s text=%s' %
import sys from time import time import os import signal import telepot from telepot.loop import MessageLoop from Database import DatabaseUtils from Modules import Currency from Log import Logger LOG = Logger('./log', 'bot') MODULES = [Currency] str_create_user_table = """ CREATE TABLE User ( id integer PRIMARY KEY, username text, firstname text, lastname text, language text NOT NULL, timestamp text NOT NULL)""" """ {'message_id': 5, 'from': {'id': 592685641, 'is_bot': False, 'first_name': 'Shan', 'last_name': 'Wu', 'username': '******', 'language_code': 'en'}, 'chat': {'id': 592685641, 'first_name': 'Shan', 'last_name': 'Wu', 'username': '******', 'type': 'private'}, 'date': 1597950931, 'text': '/currency 1 w e', 'entities': [{'offset': 0, 'length': 9, 'type': 'bot_command'}]} """
import json import time import uuid from datetime import datetime from Log import Logger from Config import Config from Sqlite import Sqlite from capture import Capture import flask from werkzeug import secure_filename # set the logger logger = Logger(log_path=Config.logdir + '/server.log', log_level=Config.loglevel, log_name='Server') #sqlite = Sqlite('yuming.db') sqlite = Sqlite() class Server: def __init__(self): # get the logger self.logger = logger self.logger.debug("logger test ok") # init sqlite self.sqlite = sqlite self.cap = Capture() self.logger.debug("init over")
#!/usr/bin/env python # -*- coding: utf-8 -*- import ConfigParser import time import json import urllib2 import xml.dom.minidom import sqlite3 from Log import Logger from Config import Config from Sqlite import Sqlite logger = Logger(log_path=Config.logdir+'/capture.log', log_level=Config.loglevel, log_name='Capture') class Capture: def __init__(self): # get the logger self.logger = logger self.logger.debug("logger test ok") # init sqlite self.sqlite = Sqlite() self.now_cap_type = None self.logger.debug("init over")
# Formats supported: # # 'xls': 'Excel xls' # 'xlsb': 'Excel 2007 xlsb file', # 'xlsx': 'Excel xlsx file', # 'ods': 'Openoffice.org ODS file', # 'zip': 'Unknown ZIP file', # None: 'Unknown file type', import sys import argsv # create logger from Log import Logger log = Logger(3) # confirm args try: _args = argsv.Confirm(sys.argv, log).all except ValueError as e: if e.args[0].split("-")[0] == "in": raise e # check if input file exist # noinspection PyUnboundLocalVariable if not os.path.isfile(_args[argsv.Args.input.name]): raise FileNotFoundError("in-03", "Can't found file") # define output dir
placename = placelist[3] filename = 'data' + os.sep + placename + '.csv' dataframe = read_csv(filename, usecols=[1], engine='python', skipfooter=0) dataset = dataframe.values dataset = dataset.astype('float32') dataset, fittingPlot, func, fittingFig = dataHandle(dataset) fittingPredictPlot, fittingPredictFig = Fitting.predict(func, dataset) #create file if os.path.exists('result') == False: os.mkdir('result') path = 'result' + os.sep + placename if os.path.exists(path): shutil.rmtree(path) os.mkdir(path) #save log sys.stdout = Logger(path + os.sep + placename + '.txt') # normalize the dataset scaler = MinMaxScaler(feature_range=(0, 1)) dataset = scaler.fit_transform(dataset) # set train and test look_back = 40 train_size = int(len(dataset) * 1) train = dataset[0:train_size, :] test_size = look_back test = dataset[0:test_size, :] future_size = look_back future = dataset[len(dataset) - future_size:len(dataset), :] # reshape into X=t and Y=t+1 trainX, trainY = create_dataset(train, look_back) testX = numpy.array([[test[:, 0]]]) futureX = numpy.array([[future[:, 0]]])
#!/usr/bin/env python # -*- coding: utf-8 -*- from Log import Logger from Config import Config import sqlite3 logger = Logger(log_path=Config.logdir + '/sqlite.log', log_level=Config.loglevel, log_name='Sqlite') class Sqlite: def __init__(self, sqlite_path=Config.sqlite_path): # get the logger self.logger = logger self.logger.debug("logger test ok") # init sqlite3 self.sqlite_path = sqlite_path self.logger.debug("sqlite_path:%s" % (self.sqlite_path)) self.sqlite = sqlite3.connect(self.sqlite_path, check_same_thread=False) # create must tables if not exist table_yuming = '''CREATE TABLE IF NOT EXISTS yuming ( name char(16) PRIMARY KEY NOT NULL, state char(16) DEFAULT 'unknown', registration_date datetime DEFAULT NULL,
import pandas as pd import numpy as np from pandas import Series from scipy import optimize from Log import Logger from constant import const import math import random import glob import os import matplotlib.pyplot as plt plt.rcParams['font.sans-serif'] = ['SimHei'] # 正常显示中文标签 plt.rcParams['axes.unicode_minus'] = False # 用来正常显示负号 logger = Logger('TrackDrsu').getlog() FRAME_LOSS = 10 TRACK_ID_MIN = 11126 DISTANCE_MAX = 220 def target_func(x, A, B): return A * x + B class TrackDrsu(object): def __init__(self, file_drsu, ort=True): self.df = round(pd.read_csv(file_drsu), 1) self.track_id = int(os.path.basename(file_drsu).split('_')[1].strip('.csv')) self.ort = ort self.frame_num = self.df.shape[0]