def build_auth_url(authority=None, scopes: list = None, state=None): redirect_uri = os.environ.get('AZURE_REDIRECT_PATH') get_logger("pmtredir.admin").info("B %s", redirect_uri) return MicrosoftGraph.build_msal_app(authority=authority).get_authorization_request_url( scopes or [], state=state or str(uuid.uuid4()), redirect_uri=redirect_uri )
def get_json(session, url): headers = { "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", "accept-encoding": "gzip, deflate, sdch, br", "accept-language": "zh-CN,zh;q=0.8,en;q=0.6,zh-TW;q=0.4", "Cache-Control": "max-age=0", "connection": "keep-alive", "host": 'api.douban.com', "upgrade-insecure-requests": "1", "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) " "Chrome/58.0.3029.110 Safari/537.36" } time.sleep(5) res = session.get(url, headers=headers) if res.status_code == requests.codes.ok: res_json = json.loads(res.text) return res_json else: logger = get_logger() logger.error('[Error] url: %s status_code: %s' % (url, res.status_code)) return None
def get_bs(session, url, referer): host = re.match(r'.*//(.*?)/.*', url).group(1) headers = { "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", "accept-encoding": "gzip, deflate, sdch, br", "accept-language": "zh-CN,zh;q=0.8,en;q=0.6,zh-TW;q=0.4", "connection": "keep-alive", "host": host, "referer": referer, "upgrade-insecure-requests": "1", "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) " "Chrome/58.0.3029.110 Safari/537.36" } # proxies = { # 'http': 'http://10.10.1.10:3128', # 'https': 'http://10.10.1.10:1080', # } time.sleep(5) res = session.get(url, headers=headers) if res.status_code == requests.codes.ok: bs_obj = BeautifulSoup(res.text, "html.parser") return bs_obj else: logger = get_logger() logger.error('[Error] url: %s status_code: %s' % (url, res.status_code)) return None
def __init__(self): """ Futu Trading Engine Constructor """ self.config = configparser.ConfigParser() self.config.read("config.ini") self.quote_ctx = OpenQuoteContext( host=self.config['FutuOpenD.Config'].get('Host'), port=self.config['FutuOpenD.Config'].getint('Port')) self.trade_ctx = OpenHKTradeContext( host=self.config['FutuOpenD.Config'].get('Host'), port=self.config['FutuOpenD.Config'].getint('Port')) self.username = self.config['FutuOpenD.Credential'].get('Username') # self.password = self.config['FutuOpenD.Credential'].get('Password') self.password_md5 = self.config['FutuOpenD.Credential'].get( 'Password_md5') self.futu_data = data_engine.DatabaseInterface( database_path=self.config['Database'].get('Database_path')) self.default_logger = logger.get_logger("futu_trade") self.trd_env = TrdEnv.REAL if self.config.get( 'FutuOpenD.Config', 'TrdEnv') == 'REAL' else TrdEnv.SIMULATE # Futu-Specific Variables self.market_list = [ Market.HK, Market.US, Market.SH, Market.SZ, Market.HK_FUTURE, Market.SG, Market.JP ] self.security_type_list = [ SecurityType.BOND, SecurityType.BWRT, SecurityType.STOCK, SecurityType.WARRANT, SecurityType.IDX, SecurityType.ETF, SecurityType.FUTURE, SecurityType.PLATE, SecurityType.PLATESET ] self.reference_type_list = [ SecurityReferenceType.WARRANT, SecurityReferenceType.FUTURE ]
def __init__(self, input_data: dict, rsi_1=6, rsi_2=12, rsi_3=24, lower_rsi=30, upper_rsi=70, observation=100): """ Initialize RSI-Threshold Strategy Instance :param input_data: :param rsi_1: RSI Period 1 (Default = 6) :param rsi_2: RSI Period 2 (Default = 12) :param rsi_3: RSI Period 3 (Default = 24) :param lower_rsi: Lower RSI Threshold (Default = 30) :param upper_rsi: Upper RSI Threshold (Default = 70) :param observation: Observation Period in Dataframe (Default = 100) """ self.RSI_1 = rsi_1 self.RSI_2 = rsi_2 self.RSI_3 = rsi_3 self.LOWER_RSI = lower_rsi self.UPPER_RSI = upper_rsi self.OBSERVATION = observation self.default_logger = logger.get_logger("rsi_threshold") super().__init__(input_data) self.parse_data()
def __init__(self, input_data: dict, fast_k=9, slow_k=3, slow_d=3, over_buy=80, over_sell=20, observation=100): """ Initialize KDJ-Cross Strategy Instance :param input_data: :param fast_k: Fast K-Period (Default = 9) :param slow_k: Slow K-Period (Default = 3) :param slow_d: Slow D-Period (Default = 3) :param over_buy: Over-buy Threshold (Default = 80) :param over_sell: Over-sell Threshold (Default = 20) :param observation: Observation Period in Dataframe (Default = 100) """ self.FAST_K = fast_k self.SLOW_K = slow_k self.SLOW_D = slow_d self.OVER_BUY = over_buy self.OVER_SELL = over_sell self.OBSERVATION = observation self.default_logger = logger.get_logger("kdj_cross") super().__init__(input_data) self.parse_data()
def read_embeddings_file(word_alphabet, filename): logger = get_logger('Embedding') logger.info("Reading embedding from %s" % filename) word_contents = word_alphabet.get_content()['instances'] lower_word_contents = [word.lower() for word in word_contents] file = open(filename, encoding="utf8") embeddings = dict() embeddings_size = 0 for line in file: tokens = line.strip().split() # ignore vector size and dimension at the beginning lines if exist if len(tokens) < 3: continue # get embedding dimension if embeddings_size == 0: embeddings_size = len(tokens) - 1 # all the lines must have same dimensionality assert embeddings_size == len(tokens) - 1 embeddings[tokens[0]] = tokens[1:] file.close() logger.info("Embeddings vector read from file.") logger.info("Number of words in embeddings: %d" % len(embeddings)) logger.info("Dimension of embedding vectors : %d" % embeddings_size) return embeddings, embeddings_size
def __init__(self, stock_list: list, start_date: date, end_date: date, observation: int = 100): # Program-Related self.config = configparser.ConfigParser() self.config.read("config.ini") self.default_logger = logger.get_logger("backtesting") # Backtesting-Related self.INITIAL_CAPITAL = 10 ** 6 self.capital = self.INITIAL_CAPITAL self.stock_list = stock_list self.strategy = None self.start_date = start_date self.end_date = end_date self.date_range = pd.date_range(self.start_date, self.end_date - timedelta(days=1), freq='d').strftime( "%Y-%m-%d").tolist() self.observation = observation # Transactions-Related self.input_data = None self.positions = {} self.transactions = pd.DataFrame(columns=['time_key', 'code', 'price', 'quantity', 'trd_side']) self.board_lot_mapping = HKEXInterface.get_board_lot_full() self.returns_df = pd.DataFrame(0, columns=self.stock_list, index=self.date_range) self.returns_df = self.returns_df.apply(pd.to_numeric) self.fixed_charge = self.config['Backtesting.Commission.HK'].getfloat('Fixed_Charge') self.perc_charge = self.config['Backtesting.Commission.HK'].getfloat('Perc_Charge')
def save(self, doc: dict) -> dict: """ Save a notes record """ logger = get_logger('db_intake') try: # Indexed by Entry number doc['entry_number'] = doc.get('Entry', {}).get('Number', 0) filter_ = {'entry_number': doc['entry_number']} # Remove '$'-prefixed fields doc['_$etag'] = doc.get('$etag', None) del doc['$etag'] doc['_$version'] = doc.get('$version', None) del doc['$version'] update_result = self.dbconn[COLLECTION_NAME].update_one( filter_, {'$set': doc}, upsert=True) except Exception as e: logger.exception(e) result = {'success': False, 'message': str(e)} return result matched = update_result.matched_count modified = update_result.modified_count up_id = update_result.upserted_id result = { 'success': True, 'message': f"Matched: {matched} Modified: {modified} ID (if inserted): {up_id}", 'matched': matched, 'modified': modified, 'upsert_id': up_id } return result
def get_music_collection(tracer): user_url = get_user_url('music', tracer.user_id) logger = get_logger() logger.info('[Start] Scraping music collection') urls = [user_url + "collect", user_url + "do", user_url + "wish"] for url in urls: status = url.split("/")[-1] bs = get_bs(tracer.session, url, user_url) if bs is not None: num = bs.find(id="wrapper").find("div", {"class": "info"}).h1.get_text() num = re.match(r'.*\((.*)\)', num).group(1) for i in range(int(math.ceil(int(num) / 15))): url_para = url url_referer = user_url if i != 0: url_para = url + "?start=%s&sort=time&rating=all&filter=all&mode=grid" % str(i * 15) url_referer = url + "?start=%s&sort=time&rating=all&filter=all&mode=grid" % str((i - 1) * 15) bs = get_bs(tracer.session, url_para, url_referer) if bs is not None: logger.info('[Parse] url: %s' % url_para) items = bs.find(id="content").find("div", {"class": "grid-view"}).findAll("div", {"class": "item"}) for item in items: my_music = parse_my_music(item, status) insert_collection(tracer.user_id, my_music, DATA_MY_MUSIC) tracer.music_collection.add(my_music[0]) get_music_page(tracer, user_url)
def enrich_url(base_url, client_doc) -> str: """ Add payment form API parameters for this client. """ url = base_url params = '' cl_name = make_client_name(client_doc, include_title=False) cl_street = client_doc.get('address', {}).get('street', ' ') cl_city = client_doc.get('address', {}).get('city', ' ') cl_state = client_doc.get('address', {}).get('state', ' ') cl_zip = client_doc.get('address', {}).get('postal_code', ' ') cl_amount = client_doc.get('payment_due', client_doc.get('target_retainer', '0.00')) cl_email = client_doc.get('email', ' ') cl_ref = client_doc.get('reference', client_doc.get('billing_id', ' ')) try: params = f'{params}?reference={cl_ref}' params = f'{params}&name={cl_name}' params = f'{params}&address1={cl_street}' params = f'{params}&city={cl_city}' params = f'{params}&state={cl_state}' params = f'{params}&postal_code={cl_zip}' params = f'{params}&amount={cl_amount}' params = f'{params}&email={cl_email}' except KeyError as e: logger = get_logger('payment_routes') logger.warn("Error creating redirect url:", e) encoded_params = urllib.parse.quote(params, safe='?&=') url = f'{base_url}{encoded_params}' return url
def save(self, fields: dict, user_email: str = None) -> dict: """ Save a client record, if the user is permitted to do so. """ try: doc = multidict2dict(fields) cleanup(doc) # Determine client name for status message # client_name = doc.get('name', {}).get('salutation', 'Client') client_name = make_client_name(doc) # Insert new client record if doc.get('_id', '0') == '0': if '_id' in doc: del doc['_id'] doc['active_flag'] = 'Y' if user_email is not None and user_email.lower( ) not in doc['admin_users']: doc['admin_users'].append(user_email.lower()) # Create a reference field doc['reference'] = f"Client ID {doc['billing_id']}" result = self.dbconn[COLLECTION_NAME].insert_one(doc) if result.inserted_id: message = f"Client record added for {client_name}" return {'success': True, 'message': message} message = "Failed to add new client record" return {'success': False, 'message': message} # Update existing client record filter_ = {'_id': ObjectId(doc['_id'])} del doc['_id'] if 'billing_id' in doc: doc['reference'] = f"Client ID {doc['billing_id']}" result = self.dbconn[COLLECTION_NAME].update_one( filter_, {'$set': doc}) if result.modified_count == 1: message = f"{client_name}'s record updated" return {'success': True, 'message': message} message = f"No updates applied to {client_name}'s record({result.modified_count})" return {'success': True, 'message': message} except Exception as e: get_logger('db_clients').exception(e) return {'success': False, 'message': str(e)}
def __init__(self, quote_ctx: OpenQuoteContext, trade_ctx: OpenHKTradeContext, trd_env: TrdEnv = TrdEnv.SIMULATE): self.default_logger = logger.get_logger('trading_util') self.toaster = ToastNotifier() self.quote_ctx = quote_ctx self.trade_ctx = trade_ctx self.trd_env = trd_env self.status_filter_list = [OrderStatus.WAITING_SUBMIT, OrderStatus.SUBMITTING, OrderStatus.SUBMITTED, OrderStatus.FILLED_PART]
def __init__(self, input_data: dict, ema_fast=5, ema_slow=8, ema_supp=13, observation=100): self.EMA_FAST = ema_fast self.EMA_SLOW = ema_slow self.EMA_SUPP = ema_supp self.OBSERVATION = observation self.default_logger = logger.get_logger("ema_ribbon") super().__init__(input_data) self.parse_data()
def __init__(self, input_data: dict, fast_period=12, slow_period=26, signal_period=9, observation=100): self.MACD_FAST = fast_period self.MACD_SLOW = slow_period self.MACD_SIGNAL = signal_period self.OBSERVATION = observation self.default_logger = logger.get_logger("macd_cross") super().__init__(input_data) self.parse_data()
def _send_email(from_email: str, clients: list, template: dict): boto_client = boto3.client( 'ses', region_name=os.environ.get('AWS_REGION'), aws_access_key_id=os.environ.get('AWS_ACCESS_KEY_ID'), aws_secret_access_key=os.environ.get('AWS_SECRET_ACCESS_KEY'), ) log = get_logger('email_sender') for client in clients: # Don't send if trust balance has never been updated if 'trust_balance_update' not in client: continue # Don't send if trust balance has not been updated since # the last time we sent an evergreen letter. if 'evergreen_sent_date' in client: if client['evergreen_sent_date'] > client['trust_balance_update']: continue destination = { 'ToAddresses': [client['email']], 'BccAddresses': [from_email] } # Convert client dict to json-string for use by SES template_data = _template_data(client) try: # Queue the email message for transmission. boto_client.send_templated_email(Source=from_email, Template=template, Destination=destination, TemplateData=template_data) except Exception as e: # Log an error to the client's record and go to the next client. message = f"Email failed {str(e)}" log.error(message) update = { '_id': client['_id'], 'email_date': datetime.now(), 'email_status': message, 'active_flag': client['active_flag'] } DATABASE.save(update, from_email) continue # On success, notate a succesful queueing to the client record update = { '_id': client['_id'], 'evergreen_sent_date': datetime.now(), 'active_flag': client['active_flag'], 'email_date': datetime.now(), 'email_status': 'OK' } DATABASE.save(update, from_email)
def __init__(self, s3_path: str = None, local_path: str = None): self.logger = get_logger('fcm') if not s3_path: self.s3_path = os.environ['DOCX_S3_PATH'] else: self.s3_path = s3_path if not local_path: self.local_path = os.environ['DOCX_PATH'] else: self.local_path = local_path
def __init__(self, db_name: str = DB_NAME): """ Instance initializer. """ if Database.logger is None: Database.logger = get_logger('database') self.client = None self.dbconn = Database.database_connections.get(db_name) self.logger = Database.logger self.last_inserted_id = None self.db_name = db_name self.connect()
def __init__(self, quote_ctx: OpenQuoteContext, trade_ctx: OpenHKTradeContext, input_data: dict = None, strategy: Strategies = MACDCross, trd_env: TrdEnv = TrdEnv.SIMULATE): self.config = configparser.ConfigParser() self.config.read("config.ini") self.default_logger = logger.get_logger('stock_quote') self.quote_ctx = quote_ctx self.trade_ctx = trade_ctx self.input_data = input_data self.strategy = strategy self.trd_env = trd_env self.trading_util = TradingUtil(self.quote_ctx, self.trade_ctx, self.trd_env) super().__init__()
def __init__(self, data: pd.DataFrame, feature_columns: List[str], initial_balance: float = 10000, commission: float = 0.0025, reward_function: str = 'sortino', returns_lookback: int = 100, trade_on_open: bool = True) -> None: """ :param data: pandas DataFrame, containing data for the simulation, 'open', 'high', 'low', 'close', 'volume' columns should be present :param feature_columns: initial trading balance :param initial_balance: initial trading balance :param commission: commission to be applied on trading :param reward_function: type of reward function, calmar, sortino and omega allowed :param returns_lookback: last values in portfolio to be used when computing the reward :param trade_on_open: Use next entry open price as price to open/close positions """ super(TradingEnv, self).__init__() self._logger = get_logger(self.__class__.__name__) self._feature_cols = feature_columns self._check_initial_data(data, cols=self._feature_cols) self._data = data self._scaled_data = log_and_difference(data, feature_columns) self._initial_balance = initial_balance self._commission = commission self._reward_function = reward_function self._returns_lookback = returns_lookback self._trade_on_open = trade_on_open # state and action spaces self._obs_shape = (1, len(self._feature_cols) ) # todo: add account info here self.observation_space = spaces.Box(low=0, high=1, shape=self._obs_shape, dtype=np.float32) self.action_space = spaces.Discrete(n=3) # placeholders self.current_step = 0 self.cash = self._initial_balance self.position = None self.position_history = [None] self.portfolio = [] self.viewer = None
def __init__(self, quote_ctx: OpenQuoteContext, trade_ctx: OpenHKTradeContext, input_data: dict = None, strategy_map: dict = None, trd_env: TrdEnv = TrdEnv.SIMULATE): if strategy_map is None: strategy_map = {} self.config = configparser.ConfigParser() self.config.read("config.ini") self.default_logger = logger.get_logger('cur_kline') self.quote_ctx = quote_ctx self.trade_ctx = trade_ctx self.input_data = input_data self.strategy_map = strategy_map self.trd_env = trd_env self.trading_util = TradingUtil(self.quote_ctx, self.trade_ctx, self.trd_env) super().__init__()
class Entry(object): if 'get_logger' in globals(): logger = get_logger('court_directory') else: logger = logging def __init__(self, fields: list) -> dict: """ Initialize all properties to None because not every *fields* list will have all the indices we reference. Some rows from the data source are mal-formed or at least not regularly-formed. The try/except will prevent us from blowing up on a missing index, but we need to initialize everything to None so that every instance of this class has every field defined. """ try: self.court_type = None self.court = None self.county = None self.prefix = None self.first_name = None self.middle_name = None self.last_name = None self.suffix = None self.title = None self.address = None self.city = None self.state = "TX" self.postal_code = None self.telephone = None self.email = None self.court_type = fields[0] self.court = fields[1] self.county = fields[2] self.prefix = fields[3] self.first_name = fields[4] self.middle_name = fields[5] self.last_name = fields[6] self.suffix = fields[7] self.title = fields[8] self.address = fields[9] self.city = fields[10] self.state = "TX" self.postal_code = fields[11] self.telephone = fields[12] self.email = fields[13] except IndexError: pass
def get_music_info(tracer, music_id, user_url): logger = get_logger() url = 'https://music.douban.com/subject/%s/' % music_id bs = get_bs(tracer.session, url, user_url) if bs is not None: try: music = parse_music_page(bs, url) insert_entry(music, DATA_MUSIC) logger.info('[Get] url: %s' % url) return True except Exception as e: logger.warning('[Error] url: %s error: %s' % (url, e)) traceback.print_exc() sys.exit(0) else: return False
def get_music_page(tracer, user_url): user_music_set = tracer.music_collection db_music_set = {item[0] for item in get_collection_list(DATA_MUSIC)} scrape_set = user_music_set - db_music_set logger = get_logger() # scraping failed_page = set() for music_id in scrape_set: if not get_music_info(tracer, music_id, user_url): failed_page.add(music_id) # rescraping if len(failed_page) != 0: logger.warning('[Rescrape] Rescraping music collection') tracer.reconnect() for music_id in failed_page: get_music_info(tracer, music_id, user_url)
def get_book_page(tracer, user_url): user_book_set = tracer.book_collection db_book_lst = get_collection_list(DATA_BOOK) db_book_set = {item[0] for item in db_book_lst} scrape_set = user_book_set - db_book_set logger = get_logger() # scraping failed_page = set() for book_id in scrape_set: if not get_book_info(tracer, book_id, user_url): failed_page.add(book_id) # rescraping if len(failed_page) != 0: logger.warning('[Rescrape] Rescraping book collection') tracer.reconnect() for book_id in failed_page: get_book_info(tracer, book_id, user_url)
def __init__(self): """ Email Engine Constructor """ self.config = configparser.ConfigParser() self.config.read("config.ini") self.port = self.config['Email'].get('Port') self.smtp_server = self.config['Email'].get('SmtpServer') self.sender = self.config['Email'].get('Sender') self.login = self.config['Email'].get('Login') self.password = self.config['Email'].get('Password') # Create a secure SSL context self.context = ssl.create_default_context() self.default_logger = logger.get_logger("email")
def get_movie_page(tracer, user_url): db_movie_set = {item[0] for item in get_collection_list(DATA_MOVIE)} scrape_set = {(movie_id, title) for movie_id, title in tracer.movie_collection if movie_id not in db_movie_set} logger = get_logger() # scraping failed_page = set() for movie_id, title in scrape_set: if not get_movie_info(tracer, movie_id, user_url, title): failed_page.add((movie_id, title)) # rescraping if len(failed_page) != 0: logger.warning('[Rescrape] Rescraping movie collection') tracer.reconnect() for movie_id, title in failed_page: get_movie_info(tracer, movie_id, user_url, title)
def read_data_files(train_file_name, dev_file_name=None, test_file_name=None): logger = get_logger("Read Datasets") word_alphabet = Alphabet('word', default_value=True, singleton=True) char_alphabet = Alphabet('character', default_value=True) tag_alphabet = Alphabet('tag') morph_alphabet = Alphabet('morpheme') logger.info('Word, Char, Tag and Morpheme Alphabets will be created') word_alphabet.add('_PAD') char_alphabet.add('_PAD_CHAR') tag_alphabet.add('_PAD_TAG') morph_alphabet.add('_PAD_MORPH') train_sentences, train_labels, train_morphemes = \ read_file(train_file_name, word_alphabet, char_alphabet, tag_alphabet, morph_alphabet) logger.info("Training data was read. Number of sentences: %d" % len(train_sentences)) dev_sentences, dev_labels, dev_morphemes, test_sentences, test_labels, test_morphemes = [], [], [], [], [], [] if dev_file_name is not None: dev_sentences, dev_labels, dev_morphemes = \ read_file(dev_file_name, word_alphabet, char_alphabet, tag_alphabet, morph_alphabet) logger.info( "Cross validation (dev) data was read. Number of sentences: %d" % len(dev_sentences)) if test_file_name is not None: test_sentences, test_labels, test_morphemes = \ read_file(test_file_name, word_alphabet, char_alphabet, tag_alphabet, morph_alphabet) logger.info("Test data was read. Number of sentences: %d" % len(test_sentences)) word_alphabet.close() char_alphabet.close() tag_alphabet.close() morph_alphabet.close() logger.info("Word Alphabet Size: %d" % word_alphabet.size()) logger.info("Character Alphabet Size: %d" % char_alphabet.size()) logger.info("Tag Alphabet Size: %d" % tag_alphabet.size()) logger.info("Morpheme Alphabet Size: %d" % morph_alphabet.size()) return (word_alphabet, char_alphabet, tag_alphabet, morph_alphabet), (train_labels, dev_labels, test_labels), \ (train_sentences, dev_sentences, test_sentences), (train_morphemes, dev_morphemes, test_morphemes)
def __init__(self): """ Futu Trading Engine Constructor """ self.config = configparser.ConfigParser() self.config.read("config.ini") self.quote_ctx = OpenQuoteContext( host=self.config['FutuOpenD.Config'].get('Host'), port=self.config['FutuOpenD.Config'].getint('Port')) self.trade_ctx = OpenHKTradeContext( host=self.config['FutuOpenD.Config'].get('Host'), port=self.config['FutuOpenD.Config'].getint('Port')) self.username = self.config['FutuOpenD.Credential'].get('Username') # self.password = self.config['FutuOpenD.Credential'].get('Password') self.password_md5 = self.config['FutuOpenD.Credential'].get( 'Password_md5') self.futu_data = data_engine.DatabaseInterface( database_path=self.config['Database'].get('Database_path')) self.default_logger = logger.get_logger("futu_trade") self.trd_env = TrdEnv.REAL if self.config.get( 'FutuOpenD.Config', 'TrdEnv') == 'REAL' else TrdEnv.SIMULATE
def test_get_logger(): logger = get_logger() assert logger == logging.getLogger( 'job_123'), "logger instance is retrieved"
from openpyxl import load_workbook from excel_reader import get_excel_data from openpyxl.styles import Border, Side import os from booking.models import Activity from decimal import Decimal from util.logger import get_logger log = get_logger('excel_writer') def write_booking_from(detail, activities): ac_list = get_excel_data('util/tmp.xlsx') wb = load_workbook('util/KateTravelBookingForm.xlsx') sheet = wb['BookingPage'] sheet['A5'].value = detail['title'] sheet['B5'].value = detail['last_name'] sheet['C5'].value = detail['first_name'] sheet['G5'].value = int(detail['phone']) sheet['H5'].value = detail['email'] i = 10 for act in activities: sheet.cell(row=i, column=1).value = act['time'] act_detail = Activity.objects.get(id=act['id']) sheet.cell(row=i, column=4).value = '%s (%s)' % (act_detail.name, act_detail.during_time)
# -*- coding: utf8 -*- from django.shortcuts import render from booking.models import Location, Activity, Company, TimeTable from django.http import HttpResponse import json from django.core import serializers from util.excel_writer import write_booking_from from django.conf import settings from threading import Thread import uniout from util.logger import get_logger log = get_logger('KateTravel') # Create your views here. def activity_booking_page(request): locations = Location.objects.all() return render(request, 'booking/activity.html', {'locations': locations}) def get_activities(request): activities = list() location_id = request.GET.get('location_id') tmp_list = Activity.objects.filter(location_id=location_id) for act in tmp_list: act_dict = dict() act_dict['id'] = act.id