Example #1
0
from datetime import datetime
from pyspark.sql import SparkSession, Window
import pyspark.sql.functions as func
from pyspark.sql.types import StringType, TimestampType, FloatType, IntegerType, StructType, StructField, LongType
import os
import platform
import numpy as np
import socket
# 初始化日志
import loginit
import logging
loginit.setup_logging('./logconfig.yml')


# ema按储存过程翻译过来 与传统存在差异 可查看compute_mkt_adx_v2
def get_ema(v, period):
    for i in range(len(v)):
        if i == 0:
            ema_value = v[i]
        else:
            ema_value = 1 / period * v[i] + (period - 1) / period * ema_value
    return float(ema_value)


def get_dmi_realtime_signal(spark_df, cal_trend_ratio):
    dmi_period = 30  # 周期并非传统的14
    # 对于最后一行 x y z为空值
    udf_max = func.udf(
        lambda x, y, z: float(
            max(max(abs(x), abs(y)), abs(z))
            if x is not None and y is not None and z is not None else np.nan),
Example #2
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 21 00:37:59 2016

@author: lifujing
"""

import unittest, numpy as np, numpy.testing as npt
import logging as log
from econ import ls
import loginit

loginit.setup_logging()
logger = log.getLogger(__name__)
#test case: the individual unit of testing
#test suite: a collection of test cases, test suites, or both.
#test runner:  
class TestLsModel(unittest.TestCase):
    """
    assertEqual, assertTrue, assertFalse, 
    assertRaise(verify that a specific exception get raised) 
    setUp, tearDown: define instructions executing before/after test methods
    """
    def test_ls(self):
        # y = 0.5 + 2x
        x=np.array([[1],[2],[3]])
        y=np.array([[2.502],[4.496],[6.502]])
        model = ls.LSModel(x,y)
        # check regression
        beta = model.beta