Esempio n. 1
0
    def split_samples(cls, all_sample_records):
        ks_shards = []
        # calculate sample records count
        sample_size = len(all_sample_records)

        # default shard number is 10
        shard_num = 10
        if KS_SHARD_NUM is not None:
            shard_num = KS_SHARD_NUM

        # calculate per shard page size
        shard_page_size = sample_size // KS_SHARD_NUM

        index = 0
        score_start = None
        score_end = None
        while index < shard_num:
            current_split_point = index * shard_page_size + shard_page_size
            if index == shard_num - 1:  # the last shard
                score_end = None
                shard = KSShardRule("", score_start, score_end)
                ks_shards.append(shard)
                score_start = score_end
            else:
                # format of sample ("batno", result, score)
                sample = all_sample_records[current_split_point]
                score_end = sample[1]
                shard = KSShardRule("", score_start, score_end)
                ks_shards.append(shard)
                score_start = score_end
            index = index + 1

        Logger.debug(to_json(ks_shards))

        return ks_shards
Esempio n. 2
0
    def save(self):
        ids = []

        variant_count = 0

        for product in self.data:
            id = dbProduct.upsert(product, self.shop_id)
            ids.append(id)
            pv = ProductVariant(id)
            variant_count = variant_count + len(pv.save(product['variants']))
        Logger.status_message(Logger.RUN_ID, Logger.SHOP_ID,
                              "Synced %s Product Variants" % (variant_count))
        return ids
Esempio n. 3
0
    def save(self):
        ids = []
        addresses_count = 0
        for customer in self.data:
            id = dbCustomer.upsert(customer, self.shop_id)
            ids.append(id)
            ca = CustomerAddress(id)
            addresses_count = addresses_count + len(
                ca.save(customer['addresses']))

        Logger.status_message(
            Logger.RUN_ID, Logger.SHOP_ID,
            "Synced %s Customer Addresses" % (addresses_count))
        return ids
Esempio n. 4
0
    def save(self):
        ids = []
        fulfillment_count = 0
        line_items_count = 0
        shipping_addresses_count = 0
        shipping_lines_count = 0
        refunds_count = 0
        for order in self.data:

            id = dbOrder.upsert(order, self.shop_id)
            ids.append(id)

            of = OrderFulfillment(id)
            fulfillment_count = fulfillment_count + len(
                of.save(order['fulfillments']))

            oli = OrderLineItems(id)
            line_items_count = line_items_count + len(
                oli.save(order['line_items']))

            refund = OrderRefund()
            refunds_count = refunds_count + len(refund.save(order['refunds']))

            if 'shipping_address' in order:
                shipping_address = OrderShippingAddress(id)
                shipping_address.save(order['shipping_address'])
                shipping_addresses_count = shipping_addresses_count + 1

            shipping_lines = OrderShippingLines(id)
            shipping_lines_count = shipping_lines_count + len(
                shipping_lines.save(order['shipping_lines']))

        Logger.status_message(
            Logger.RUN_ID, Logger.SHOP_ID,
            "Synced %s Order Fulfillments" % (fulfillment_count))
        Logger.status_message(
            Logger.RUN_ID, Logger.SHOP_ID,
            "Synced %s Order Line Items" % (line_items_count))
        Logger.status_message(
            Logger.RUN_ID, Logger.SHOP_ID,
            "Synced %s Order Shipping Addresses" % (shipping_addresses_count))
        Logger.status_message(
            Logger.RUN_ID, Logger.SHOP_ID,
            "Synced %s Order Shipping Lines" % (shipping_lines_count))
        Logger.status_message(Logger.RUN_ID, Logger.SHOP_ID,
                              "Synced %s Order Refunds" % (refunds_count))
        return ids
Esempio n. 5
0
    def __init__(self):
        CONFIG = ConfigHelper('config.conf')
        self.config: ConfigHelper = ConfigHelper('config.conf')
        self.logger: Logger = Logger(CONFIG, datetime, 'app-loging')
        self.db = DatabaseFactory('MySQL', self.config, self.logger)
        self.list_keyword_provinces = config.LIST_KEYWORD_PROVINCES
        self.list_keyword_cities = config.LIST_KEYWORD_CITIES
        self.list_keyword_districts = config.LIST_KEYWORD_DISTRICTS

        self.cleansing = Cleansing()
Esempio n. 6
0
    def cal_ks_by_strategy(cls, v_sample_id, v_model_id, v_strategy_id):
        # get data from data base
        current_page = 1
        all_sample_records = []
        while True:
            sample_page_list = ModelEstimateMysqlRepository.query_sample_model_strategy_records_paginate(
                                v_sample_id, v_model_id, v_strategy_id, GLOBAL_PAGE_SIZE, current_page, True)
            if sample_page_list is not None and len(sample_page_list) > 0:
                for item in sample_page_list:
                    all_sample_records.append(item)
                current_page = current_page + 1
            else:
                break

        Logger.debug(to_json(all_sample_records))

        # get sharding rules
        ks_shards = KSCalculateService.split_samples(all_sample_records)

        # calculate
        return KSCalculateService.cal_ks_by_default(v_sample_id, ks_shards, all_sample_records)
Esempio n. 7
0
    def cal_ks_by_model(cls, v_sample_id, v_model_id):
        current_page = 1
        all_sample_records = []
        while True:
            sample_page_list = ModelEstimateRepository\
                .query_sample_model_records_paginate(v_sample_id, v_model_id, GLOBAL_PAGE_SIZE, current_page, True)
            #  True means that sorting type is ascending
            if sample_page_list is not None and len(sample_page_list) > 0:
                for item in sample_page_list:
                    all_sample_records.append(item)
                current_page = current_page + 1
            else:
                break

        Logger.debug(to_json(all_sample_records))

        # get sharding rules
        ks_shards = KSCalculateService.split_samples(all_sample_records)

        # calculate
        return KSCalculateService.cal_ks_by_default(v_sample_id, ks_shards,
                                                    all_sample_records)
Esempio n. 8
0
    def save(self, data):
        ids = []
        refund_lis_count = 0
        refund_trans_count = 0
        for refund in data:
            id = dbOrderRefund.upsert(refund)
            ids.append(id)

            refund_li = OrderRefundLineItem(id)
            refund_lis_count = refund_lis_count + len(
                refund_li.save(refund['refund_line_items']))

            refund_trans = OrderRefundTransaction(id)
            refund_trans_count = refund_trans_count + len(
                refund_trans.save(refund['transactions']))

        Logger.status_message(
            Logger.RUN_ID, Logger.SHOP_ID,
            "Synced %s Order Refund Line Items" % (refund_lis_count))
        Logger.status_message(
            Logger.RUN_ID, Logger.SHOP_ID,
            "Synced %s Order Refund Transactions" % (refund_lis_count))

        return ids
    def save(self):
        ids = []
        weight_based_rates_count = 0
        price_based_rates_count = 0
        carrier_rates_count = 0
        for zone in self.data:
            id = dbShippingZone.upsert(zone, self.shop_id)
            ids.append(id)

            szc = ShippingZoneCountries(id)
            szc.save(zone['countries'])

            szwbsr = ShippingZoneWeightBasedShippingRates()
            weight_based_rates_count = weight_based_rates_count + len(szwbsr.save(zone['weight_based_shipping_rates']))
            szpbsr = ShippingZonePriceBasedShippingRates()
            price_based_rates_count = price_based_rates_count + len(szpbsr.save(zone['price_based_shipping_rates']))
            szcsrp = ShippingZoneCarrierShippingRateProviders()
            carrier_rates_count = carrier_rates_count + len(szcsrp.save(zone['carrier_shipping_rate_providers']))

        Logger.status_message(Logger.RUN_ID, Logger.SHOP_ID, "Synced %s Shipping Zone Weight Based Rates" % (weight_based_rates_count))
        Logger.status_message(Logger.RUN_ID, Logger.SHOP_ID, "Synced %s Shipping Zone Price Based Rates" % (price_based_rates_count))
        Logger.status_message(Logger.RUN_ID, Logger.SHOP_ID, "Synced %s Shipping Zone Carrier Shipping Rates" % (carrier_rates_count))

        return ids
Esempio n. 10
0
    def cal_ks_by_default(cls, v_sample_id, v_ks_shards, v_all_sample_records):
        # calculate
        if v_all_sample_records is None or len(v_all_sample_records) <= 0:
            return None

        ks_sharps_results = []

        # total sample count
        total_sample_size = len(v_all_sample_records)

        # calculate good and bad sample count
        v_good_total_count = 0
        v_bad_total_count = 0
        for sample in v_all_sample_records:
            # 0 : good, 1: bad
            if sample[2] == "0":
                v_good_total_count = v_good_total_count + 1
            else:
                v_bad_total_count = v_bad_total_count + 1

        # get sample detail information
        sample_info = SampleRepository.query_sample_by_id(v_sample_id)

        index = 0
        v_good_count = 0
        v_bad_count = 0
        v_ks = 0.0

        for ks_sharp in v_ks_shards:
            v_good_count_per_shard = 0
            v_bad_count_per_shard = 0
            v_total_count_per_shard = 0
            while index < total_sample_size:
                sample = v_all_sample_records[index]
                # exception , both start and end are None
                if ks_sharp.score_start is None and ks_sharp.score_end is None:
                    Logger.error("Error: both start and end are None!")
                    break
                # if current sample belong current shard
                if (ks_sharp.score_start is None and sample.score < ks_sharp.score_end) \
                        or (ks_sharp.score_end is None and sample.score >= ks_sharp.score_start) \
                        or (ks_sharp.score_start is not None
                            and ks_sharp.score_end is not None
                            and ks_sharp.score_start <= sample.score < ks_sharp.score_end):
                    # identify good and bad sample
                    if sample[2] == "0":
                        v_good_count = v_good_count + 1
                        v_good_count_per_shard = v_good_count_per_shard + 1
                    else:
                        v_bad_count = v_bad_count + 1
                        v_bad_count_per_shard = v_bad_count_per_shard + 1
                else:
                    break

                v_total_count_per_shard += 1
                index = index + 1
                # end while

            # KSShardResult(v_shard_name, v_count_bad, v_ratio_bad, v_count_good, v_ratio_good)
            v_ratio_good = round(v_good_count / v_good_total_count, 4)
            v_ratio_bad = round(v_bad_count / v_bad_total_count, 4)

            # calculate KS
            ks_per_shard = abs(v_ratio_good - v_ratio_bad)
            if ks_per_shard > v_ks:
                v_ks = round(ks_per_shard, 4)
            v_shard_name = ks_sharp.shard_name
            if v_shard_name is None or len(v_shard_name) <= 0:
                if ks_sharp.score_start is None:
                    v_shard_name = "Low - <" + str(ks_sharp.score_end)
                elif ks_sharp.score_end is None:
                    v_shard_name = str(ks_sharp.score_start) + " - High"
                else:
                    v_shard_name = str(ks_sharp.score_start) + " - <" + str(
                        ks_sharp.score_end)

            ks_shard_result = KSShardResult(v_shard_name,
                                            v_bad_count_per_shard, v_ratio_bad,
                                            v_good_count_per_shard,
                                            v_ratio_good,
                                            v_total_count_per_shard,
                                            round(ks_per_shard, 4))
            ks_sharps_results.append(ks_shard_result)
            # end for

        # print
        Logger.debug(to_json(ks_sharps_results))

        ks_result = KSResult(sample_info.id, sample_info.name,
                             ks_sharps_results, v_ks, total_sample_size,
                             v_good_count, v_bad_count)
        # return
        return ks_result
Esempio n. 11
0
#!/usr/bin/env python
import sys
from app.models import UserShop
import subprocess
from app.utils.logger import Logger
import simplejson as json
import datetime

if __name__ == "__main__":
    run_date = datetime.datetime.utcnow().strftime("%Y%m%d%H%M%S")

    shops = UserShop.get_all()
    for shop in shops:
        run_id = "%s-%s" % (str(shop.id), run_date)
        path = sys.path[0]
        proc = subprocess.run([''.join([path, '/venv/bin/python3']), ''.join([path, '/importer.py']), str(shop.id), run_id], \
            stdout=subprocess.PIPE, env={'PATH': path})
        print(proc)
        if proc.returncode != 0:
            resp = proc.stdout.decode('utf-8')
            print(resp)
            try:
                respObj = json.loads(resp)
                Logger.error_message(run_id, str(shop.id), respObj['message'])
            except:
                Logger.error_message(run_id, str(shop.id), resp)

    
Esempio n. 12
0
# -*- Coding: UTF-8 -*-
import uuid

import responder

from app.models.wordcloud import text_parser
from app.models.wordcloud import new_wordcloud_client
from app.utils.errors import InvalidKeyError
from app.utils.logger import Logger

# Settings
api = responder.API(cors=True, allowed_hosts=["*"])
LOGGER = Logger.get_logger(__file__)


@api.route("/api/v1/word")
class WordsResource(object):
    async def on_post(self, req, resp):
        @api.background.task
        def wordcloud_task(uid, words):
            text = text_parser(words)
            wcc = new_wordcloud_client(text)
            wcc.to_file(f"/app/storage/{uid}.png")

        uuid4 = str(uuid.uuid4())

        data = await req.media()
        if "text" not in data.keys():
            msg = "must to set 'text' in json key"
            resp.status_code = api.status_codes.HTTP_500
            resp.text = msg
Esempio n. 13
0
    def cal_roc_auc_by_all_sample(cls, v_sample_id, all_sample_records):

        if all_sample_records is None or len(all_sample_records) <= 0:
            return None

        # put sample result and pre estimate score to different list.
        y_true_result = []
        y_scores = []
        for item in all_sample_records:
            if item[1] is not None and item[
                    2] is not None:  # remove all invalid sample
                y_scores.append(item[1])
                y_true_result.append(int(item[2]))

        # call sklearn to calculate roc and auc, then put the result to RocAucResult and load as a json
        Logger.debug(to_json(y_true_result))
        Logger.debug(to_json(y_scores))
        fpr, tpr, _ = roc_curve(np.array(y_true_result), np.array(y_scores))
        Logger.debug(tpr)
        Logger.debug(fpr)

        auc = roc_auc_score(np.array(y_true_result), np.array(y_scores))
        Logger.debug(to_json(auc))

        # encapsulate fpr, tpr and auc to RocAucResult
        roc_points = []
        index = 0
        fpr_size = len(fpr)
        while index < fpr_size:
            point = XYPoints(round(fpr[index], 2),
                             round(tpr[index],
                                   2))  # fpr is x axis, tpr is y axis
            roc_points.append(
                json.loads(
                    json.dumps(point,
                               default=lambda o: o.__dict__,
                               sort_keys=True,
                               indent=4)))
            index = index + 1

        roc_auc = RocAucResult(None, None, roc_points, round(auc, 2))

        roc_auc.sample_id = v_sample_id
        sample_model = SampleMysqlRepository.query_sample_by_id(v_sample_id)
        roc_auc.sample_name = sample_model.name
        Logger.debug(
            to_json(
                json.loads(
                    json.dumps(roc_auc,
                               default=lambda o: o.__dict__,
                               sort_keys=True,
                               indent=4))))
        # return
        return roc_auc
Esempio n. 14
0
__author__ = 'erik'

from flask import Blueprint, render_template, redirect, url_for, request, g
from flask_login import login_required, current_user
from app.utils.logger import Logger
from app.forms.user_edit_form import UserDataForm
from pycountry import countries
from datetime import datetime, timedelta

log = Logger()
logger = log.get_logger()
user_page = Blueprint('user', __name__)

@user_page.route('/user/req_confirm/<_id>')
@login_required
def confirm(_id):
    if g.user.id == int(_id):
        from app.models.email import EmailConfirmation
        from app.sessions import Sessions
        email_conf_pre = EmailConfirmation.query.filter_by(user_id=_id).first()
        session = Sessions()
        current_time = datetime.now()
        expiration_date = current_time + timedelta(days=7)
        hash_c = session.make_hash_c(g.user.email)
        email_conf = EmailConfirmation(user_id=_id, requested_time=current_time,
                                       hash_c=hash_c, expiration_date=expiration_date)
        if email_conf_pre:
            session.update_email_conf(email_conf_pre, email_conf)
            from app.utils.mailer import send_confirm_email
            send_confirm_email(email_conf, g.user)