def analyze_key_change(midi):
    
    # we have to do the first bar without comparison and second bar with single comparison 
    # so that we have previous two bars for analysis
    
    global logger
    if not logger:
        logger = log.get_logger(__name__)
    track = midi.tracks[0]
    noteList = [noteEvent for noteEvent in track.eventList if noteEvent.type == "note"]
    keys = []
    keyChange = [0. for i in range(6)]
    
    # process measure by measure
    for i in range(0,6*measure_duration,measure_duration):
        # first measure
        if i == 0:
            continue
#            relevantNotes = notes_in_measure(noteList, i, i+960)
#            keys.append(determine_key(relevantNotes))
        #second measure
        elif i == measure_duration:
            referenceNotes = notes_in_measure(noteList, i-measure_duration,i)
            referenceKey, referenceCorr = determine_key(referenceNotes, i)
            relevantNotes = notes_in_measure(noteList, i, i+measure_duration)
            newCorr = correlate(key_vectors[referenceKey],durations_to_array(get_durations(relevantNotes, i+measure_duration)))
            keyChange[i/measure_duration] = newCorr - referenceCorr
        else:
            referenceNotes = notes_in_measure(noteList, i-2*measure_duration,i)
            referenceKey, referenceCorr = determine_key(referenceNotes, i)
            relevantNotes = notes_in_measure(noteList, i, i+measure_duration)
            newCorr = correlate(key_vectors[referenceKey],durations_to_array(get_durations(relevantNotes, i+measure_duration)))
            keyChange[i/measure_duration] = newCorr - referenceCorr
    logger.info( "Key change: %s" % (keyChange))
    return keyChange
Exemple #2
0
def analyze_key_change(midi):

    # we have to do the first bar without comparison and second bar with single comparison
    # so that we have previous two bars for analysis

    global logger
    if not logger:
        logger = log.get_logger(__name__)
    track = midi.tracks[0]
    noteList = [
        noteEvent for noteEvent in track.eventList if noteEvent.type == "note"
    ]
    keys = []
    keyChange = [0. for i in range(6)]

    # process measure by measure
    for i in range(0, 6 * measure_duration, measure_duration):
        # first measure
        if i == 0:
            continue


#            relevantNotes = notes_in_measure(noteList, i, i+960)
#            keys.append(determine_key(relevantNotes))
#second measure
        elif i == measure_duration:
            referenceNotes = notes_in_measure(noteList, i - measure_duration,
                                              i)
            referenceKey, referenceCorr = determine_key(referenceNotes, i)
            relevantNotes = notes_in_measure(noteList, i, i + measure_duration)
            newCorr = correlate(
                key_vectors[referenceKey],
                durations_to_array(
                    get_durations(relevantNotes, i + measure_duration)))
            keyChange[i / measure_duration] = newCorr - referenceCorr
        else:
            referenceNotes = notes_in_measure(noteList,
                                              i - 2 * measure_duration, i)
            referenceKey, referenceCorr = determine_key(referenceNotes, i)
            relevantNotes = notes_in_measure(noteList, i, i + measure_duration)
            newCorr = correlate(
                key_vectors[referenceKey],
                durations_to_array(
                    get_durations(relevantNotes, i + measure_duration)))
            keyChange[i / measure_duration] = newCorr - referenceCorr
    logger.info("Key change: %s" % (keyChange))
    return keyChange
def analyze_melodic_accent(midi):
    global logger
    if not logger:
        logger = log.get_logger(__name__)
    track = midi.tracks[0]
    pitchList = [noteEvent.pitch for noteEvent in track.eventList if noteEvent.type == "note"]
    accentProbabilities = [1. for p in pitchList]
    for i in range(len(pitchList)):
        pitch3 = pitchList[i]
        pitch1=pitch2=None
        if i != 0:
            pitch2 = pitchList[i-1]
            if i != 1:
                pitch1 = pitchList[i-2]
        p2,p3 = __analyze_note_triplet(pitch1, pitch2, pitch3)
        
        accentProbabilities[i] *= p3
        if i != 0:
            accentProbabilities[i-1] *= p2
    return accentProbabilities
Exemple #4
0
def __lbdm_ioi(midi):
    global logger
    if not logger:
        logger = log.get_logger(__name__)
    track = midi.tracks[0]
    noteList = [noteEvent for noteEvent in track.eventList if noteEvent.type == "note"]
    boundaryStrengths = []
    intervals = []
    max_ioi = noteList[0].duration
    
    for i in range(len(noteList) - 1):
        first_note = noteList[i]
        second_note = noteList[i + 1]
        interval = second_note.time - first_note.time + 1
        intervals.append(interval)
        logger.debug("first: p:%d t:%f d:%f v:%d" % (first_note.pitch, first_note.time, first_note.duration, first_note.volume))
        logger.debug("second: p:%d t:%f d:%f v:%d" % (second_note.pitch, second_note.time, second_note.duration, second_note.volume))
        logger.debug("Interval: %f" % interval)
    # first strength is calculated separately since it has no preceding interval
    s = __strength(intervals[0], intervals[1], max_ioi) * intervals[0]
    boundaryStrengths.append(s)
    for i in range(len(intervals) - 2):
        firstInterval = intervals[i]
        secondInterval = intervals[i+1]
        thirdInterval = intervals[i+2]
        boundaryStrength = secondInterval * \
            (__strength(firstInterval, secondInterval, max_ioi) + \
             __strength(secondInterval, thirdInterval, max_ioi))
        boundaryStrengths.append(boundaryStrength)
        logger.debug("Interval: %f, Prev: %f, Next: %f, Strength: %f" % (secondInterval, firstInterval, secondInterval, boundaryStrength))
    # last strength also calculated separately
    s = __strength(intervals[-2], intervals[-1], max_ioi) * intervals[-1] 
    boundaryStrengths.append(s)
    
    # scale to 0-1
    boundaryStrengths = map(lambda x: x / max_ioi,boundaryStrengths)
        
    return boundaryStrengths
Exemple #5
0
def __lbdm_pitch(midi):
    global logger
    if not logger:
        logger = log.get_logger(__name__)
    track = midi.tracks[0]
    noteList = [noteEvent for noteEvent in track.eventList if noteEvent.type == "note"]
    boundaryStrengths = []
    intervals = []
    for i in range(len(noteList) - 1):
        first_note = noteList[i]
        second_note = noteList[i + 1]
        
        # add 15 to intervals so that we do not have 0 values 
        # which may cause div by 0 errors
        interval = first_note.pitch - second_note.pitch + 12
        intervals.append(interval)
        logger.debug("first: p:%d t:%d d:%d v:%d" % (first_note.pitch, first_note.time, first_note.duration, first_note.volume))
        logger.debug("second: p:%d t:%d d:%d v:%d" % (second_note.pitch, second_note.time, second_note.duration, second_note.volume))
        logger.debug("Interval: %f" % interval)
    
    # first strength is calculated separately since it has no preceding interval
    s = __strength(intervals[0], intervals[1], 12) * intervals[0]
    boundaryStrengths.append(s)
    for i in range(len(intervals) - 2):
        firstInterval = intervals[i]
        secondInterval = intervals[i+1]
        thirdInterval = intervals[i+2]
        boundaryStrength = secondInterval * \
            (__strength(firstInterval, secondInterval, 12) + \
             __strength(secondInterval, thirdInterval, 12))
        boundaryStrengths.append(boundaryStrength)
        logger.debug("Interval: %f, Prev: %f, Next: %f, Strength: %f" % (secondInterval, firstInterval, secondInterval, boundaryStrength))
    # last strength also calculated separately
    s = __strength(intervals[-2], intervals[-1], 12) * intervals[-1]
    boundaryStrengths.append(s)
    # scale to 0-1
    boundaryStrengths = map(lambda x: x / max(boundaryStrengths),boundaryStrengths)    
    return boundaryStrengths
Exemple #6
0
def analyze_melodic_accent(midi):
    global logger
    if not logger:
        logger = log.get_logger(__name__)
    track = midi.tracks[0]
    pitchList = [
        noteEvent.pitch for noteEvent in track.eventList
        if noteEvent.type == "note"
    ]
    accentProbabilities = [1. for p in pitchList]
    for i in range(len(pitchList)):
        pitch3 = pitchList[i]
        pitch1 = pitch2 = None
        if i != 0:
            pitch2 = pitchList[i - 1]
            if i != 1:
                pitch1 = pitchList[i - 2]
        p2, p3 = __analyze_note_triplet(pitch1, pitch2, pitch3)

        accentProbabilities[i] *= p3
        if i != 0:
            accentProbabilities[i - 1] *= p2
    return accentProbabilities
Exemple #7
0
def main():
    logs = log.get_logger()
    logs.info("-================Start.================-")
    result = requests.get(url="https://api.github.com/")
    logs.debug(result.text)
    logs.info("-================End.================-")
 def __init__(self):
     self.midi = mid.prepare_initial_midi("../../res/midi_text.txt", "../../res/sample.midi", self.defaultTempo)
     self.__jobServer = pp.Server()
     self.__logger = log.get_logger(__name__)
Exemple #9
0
import os
cur_dir = os.path.dirname(os.path.abspath(__file__))
dst_dir = os.path.dirname(cur_dir)
sys.path.append(cur_dir)
sys.path.append(dst_dir)

import baostock as bs
import pandas as pd
import multiprocessing
import threading
import datetime
import time
from concurrent.futures import ThreadPoolExecutor, as_completed

from tools import log
log = log.get_logger()


def all_stock_list():
    #### 登陆系统 ####
    lg = bs.login()
    # 显示登陆返回信息
    print('login respond error_code:' + lg.error_code)
    print('login respond  error_msg:' + lg.error_msg)

    #### 获取证券信息 ####
    date = datetime.datetime.now()
    print(date)
    start_date = (date + datetime.timedelta(days=-5)).strftime('%Y-%m-%d')

    rs = bs.query_all_stock(day='2020-07-16')
Exemple #10
0
 def __init__(self):
     self.midi = mid.prepare_initial_midi("../../res/midi_text.txt",
                                          "../../res/sample.midi",
                                          self.defaultTempo)
     self.__jobServer = pp.Server()
     self.__logger = log.get_logger(__name__)
Exemple #11
0
class APS():
    executors = {
        'default': ThreadPoolExecutor(10),
        'processpool': ProcessPoolExecutor(1),
    }
    jobstores = {
        'default':
        SQLAlchemyJobStore(url='mysql+pymysql://root:123456@localhost/iam')
    }
    job_defaults = {
        'coalesce': True,
        'max_instances': 10,
    }
    scheduler = BackgroundScheduler(jobstores=jobstores,
                                    executors=executors,
                                    job_defaults=job_defaults)

    log.setup_logging()
    logger = log.get_logger()

    # @classmethod
    # def write_error_logs(cls,file,errors):
    #     logger = logging.getLogger(__name__)
    #     logger.setLevel(level=logging.INFO)
    #     file_hanlder = logging.FileHandler(filename=file,mode='a', encoding='utf-8')
    #     file_hanlder.setLevel(logging.INFO)
    #     formatter = logging.Formatter('%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s')
    #     file_hanlder.setFormatter(formatter)
    #     logger.addHandler(file_hanlder)
    #     logger.info(errors)
    #     logger.removeHandler(file_hanlder)

    @classmethod
    def ajob(cls):
        print("11111:%s" % datetime.datetime.now())
        cls.logger.info(datetime.datetime.now())

    @classmethod
    def get_api_case_coverage(cls):
        curl = r'http://192.168.120.173:8000/getcommentdetail'
        connection = db.MysqlTools().connection
        cursor = connection.cursor()
        sql = 'select api_path,project_id from api_api;'
        cursor.execute(sql)
        api_list = cursor.fetchall()
        for api in api_list:
            try:
                comment = re.search(r'^.*?([^/\s]+?)/?$', api['api_path'])
                data = {
                    'changename': r'application/controllers/api',
                    'object_id': api['project_id'],
                    'comment': comment.group(1)
                }
                commentdetail = requests.get(url=curl, params=data).json()
                if 'if_cn' in commentdetail.keys():
                    count = (commentdetail['if_cn'] + commentdetail['for_cn'] +
                             commentdetail['switch_cn'] +
                             commentdetail['while_cn']) * 2
                    update_sql = "update api_api set api_case_coverage = %d where api_path = '%s';" % (
                        count, api['api_path'])
                    cursor.execute(update_sql)
                    connection.commit()
                else:
                    update_sql = "update api_api set api_case_coverage = 1 where api_path = '%s' ;" % (
                        api['api_path'])
                    cursor.execute(update_sql)
                    connection.commit()
            except Exception as e:
                print(e)
                cls.logger.error(e)
                continue
        connection.close()

    @classmethod
    def add_acc_tasks(cls, id):
        cls.scheduler.add_job(cls.get_api_case_coverage,
                              'interval',
                              seconds=3600,
                              id=id)

    @classmethod
    def start_aps(cls):
        cls.scheduler.start()
        cls.logger.info("任务开始!!!")

    @classmethod
    def stop_aps(cls):
        cls.scheduler.shutdown(wait=False)
        cls.logger.info("任务结束!!!")
Exemple #12
0
 def __init__(self, task_id, project_id):
     threading.Thread.__init__(self)
     self.task_id = task_id
     self.pid = project_id
     log.setup_logging()
     self.logger = log.get_logger()
Exemple #13
0
#!/usr/bin/env python3

from tools.log import get_logger
from tools.utils import get_time, calculate_time
from tools.utils import DefaultSerializer

from predict import predict
from preprocess import preprocess
from training import training

serializer = DefaultSerializer()
logger = get_logger('batch')

def run_preprocess():
    initial_time = get_time()
    logger.info('Preprocess start.')
    data = preprocess()
    serializer.dump(data, 'data')
    logger.info('Preprocess end: ' + str(calculate_time(initial_time)) + ' seconds.')

def run_training():
    initial_time = get_time()
    logger.info('Training start.')
    data = serializer.load('data')
    model = training(data)
    serializer.dump(model, 'model')
    logger.info('Training end: ' + str(calculate_time(initial_time)) + ' seconds.')

run_preprocess()
run_training()
Exemple #14
0
#!/usr/bin/python3
# -*- coding:utf8 -*-

from tools import log
import setproctitle
import os

logger = log.get_logger()
DEPLOY_TEMP_DIR = ""


def deploy_initialize():
    setproctitle.setproctitle("SCMT-DEPLOY")
    log.conf_logger("scmt-deploy")
    DEPLOY_TEMP_DIR = os.tem


def deploy_zookeeper(template_path, config_file):
    print("template_path:", template_path)
    print("config_file:", config_file)


if __name__ == "__main__":
    deploy_initialize()
    deploy_zookeeper("zookeeper-bin", )
Exemple #15
0
import numpy as np
import json

from tools.utils import get_time, calculate_time
from tools.log import get_logger

logger = get_logger('predict')

mocked_predict = {
    "prediction": "mocked_prediction"
}

#########################################
#   string to 2D numpy array formatter  #
#########################################

def format_input(message):
    initial_time = get_time()
    logger.info('Formatting input...')
    np_array = np.fromstring(message, dtype=float, sep=',')
    np_array = np_array.reshape(1, -1)
    logger.info('Formatting input end: ' + str(calculate_time(initial_time)) + ' seconds.')
    return np_array 

def predict(model, message):
    return mocked_predict
Exemple #16
0
# -*- coding: utf-8 -*-
# author:liucong

import smtplib, os, inspect
import time
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.header import Header
from tools import Config
from tools.log import get_logger

configPath = os.path.abspath(os.path.dirname(os.path.abspath(os.path.dirname(
    inspect.getfile(inspect.currentframe())) + os.path.sep + ".")) + os.path.sep + ".") + "\config.ini"

logger = get_logger('Email')


class Email(object):
    def __init__(self):
        self.smtp_server = Config.getEmail(configPath, "mail_host")
        self.username = Config.getEmail(configPath, "mail_user")
        self.password = Config.getEmail(configPath, "mail_pass")
        self.sender = Config.getEmail(configPath, "sender")
        # receivers为列表
        self.receivers = Config.getEmail(configPath, "receivers").split(',')
        self.addr_from = Config.getEmail(configPath, "from")
        self.addr_to = Config.getEmail(configPath, "to")

    # 设置邮件正文
    def set_content(self):
        send_time = time.strftime('%Y-%m-%d %H:%M:%S')
#!/usr/bin/env python3

from flask import Flask, json, request
import numpy as np

from tools.log import get_logger
from tools.utils import get_time, calculate_time
from tools.utils import DefaultSerializer

from predict import predict

logger = get_logger('online')

serializer = DefaultSerializer()

app = Flask(__name__)

MODEL = serializer.load('model')  

@app.route('/predict', methods=['POST'])
def post_predict():
    message_dict = request.json
    initial_time = get_time()
    logger.info('Prediction start.')
    prediction = predict(MODEL, message_dict['message'])
    logger.info('Prediction end: ' + str(calculate_time(initial_time)) + ' seconds.')
    return json.dumps(prediction), 201

if __name__ == '__main__':
    app.run(host='0.0.0.0', port=5000, debug=True)