コード例 #1
0
ファイル: apps.py プロジェクト: jiangnianshun/django-mdict
    def ready(self):
        init_mdict_list()

        # 函数在apps.py中会运行2次,通过--noreload关闭。
        run_once = os.environ.get('CMDLINERUNNER_RUN_ONCE')
        if run_once is None:
            os.environ['CMDLINERUNNER_RUN_ONCE'] = 'True'
            print_sys_info()
            check_readlib()
            if check_system() == 1:
                init_ws_server()
            if not check_apache():
                init_wd_server()
コード例 #2
0
ファイル: apps.py プロジェクト: jiangnianshun/django-mdict
def init_wd_server():
    if check_system() == 0:
        cmd = ['python3', 'wd_server.py']
        command = ' '.join(cmd)
        shell = True
    else:
        cmd = ['python', 'wd_server.py']
        command = ' '.join(cmd)
        shell = False
    print_log_info(['running watch dog server...'])
    try:
        subprocess.Popen(command, shell=shell, cwd=script_path)
    except Exception as e:
        print(e)
コード例 #3
0
def search_mdx_sug(dic_pk, sug_list, group, flag):
    global prpool, thpool
    cnum = get_cpu_num()
    sug = []
    if check_system() == 0 and dic_pk == -1:
        q_list = ((i, sug_list, group, False) for i in range(cnum))
        record_list = prpool.starmap(multiprocess_search_mdx, q_list)
        for r in record_list:
            sug.extend(r)
    elif check_system() == 1 and dic_pk == -1:
        try:
            sug.extend(ws_search(sug_list, group, 'sug'))
        except Exception as e:
            print(e)
            # if thpool is None:
            #     thpool = create_thread_pool()
            # q_list = ((i, sug_list, group) for i in range(cnum))
            # record_list = thpool.starmap(multithread_search_sug, q_list)
            # for r in record_list:
            #     sug.extend(r)
    else:  # 单个词典的查询提示
        sug.extend(loop_search_sug(dic_pk, sug_list, flag, group))

    return sug
コード例 #4
0
def init_obj(proc_flag):
    global init_vars, k_list

    # init_vars.mdict_odict = read_pickle_file(pickle_file_path, mdict_root_path)
    init_vars = load_cache(mdict_root_path)
    init_vars.mdict_odict, init_vars.indicator = sort_mdict_list(
        init_vars.mdict_odict)
    init_vars.mtime = os.path.getmtime(pickle_file_path)

    temp_list = []
    k_list = []
    if check_system() == 0:
        k_list = init_vars.indicator[proc_flag]
    else:
        for k in init_vars.indicator[proc_flag]:
            k_list.append(k)
            temp_list.append(init_vars.mdict_odict[k])

        init_vars.mdict_odict = temp_list
        gc.collect()
コード例 #5
0
ファイル: admin.py プロジェクト: jiangnianshun/django-mdict
def createAllIndex(modeladmin, request, queryset):
    try:
        cmd = ['mdict_es.py']
        if check_system() == 0:
            cmd.insert(0, 'python3')
        else:
            cmd.insert(0, 'python')

        all_dics = MdictDic.objects.all()
        if len(queryset) < len(all_dics):
            cmd.append('-c')
            for dic in queryset:
                cmd.append(str(dic.pk))
        else:
            cmd.append('-ca')

        command = ' '.join(cmd)
        # 直接传cmd,在ubuntu里可能不会正确运行脚本,而是打开了python解释器,需要转换为字符串。

        print('running script:', command)
        subprocess.Popen(command, shell=True, cwd=script_path)
    except Exception as e:
        print(e)
コード例 #6
0
def search_mdx_dic(query_list, record_list, group):
    global prpool, thpool
    # 查询mdx词典
    cnum = get_cpu_num()
    if check_system() == 0:
        # prpool = check_pool_recreate(pool)
        q_list = ((i, query_list, group) for i in range(cnum))
        a_list = prpool.starmap(multiprocess_search_mdx, q_list)
        for a in a_list:
            record_list.extend(a)

    else:
        try:
            record_list.extend(ws_search(query_list, group, 'dic'))
        except Exception as e:
            print('ws server connection failed', e)
            # if thpool is None:
            #     thpool = create_thread_pool()
            # q_list = ((i, query_list, group) for i in range(cnum))
            # a_list = thpool.starmap(multithread_search_mdx, q_list)
            # for a in a_list:
            #     record_list.extend(a)

    return record_list
コード例 #7
0
import os
import sys
import time
import pickle
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler

root_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(root_path)

from base.base_utils import ROOT_DIR
from base.base_sys import check_system
from mdict.mdict_utils.mdict_utils import set_mdict_path
from mdict.mdict_utils.init_utils import rewrite_cache

if check_system() == 0:
    cache_name = '.Linux.dat'
elif check_system() == 1:
    cache_name = '.Windows.dat'
else:
    cache_name = ''

cache_path = os.path.join(ROOT_DIR, '.cache', cache_name)
dir_status = False


class DirWatch:
    def __init__(self, target_dir=''):
        self.observer = Observer()
        self.target_dir = target_dir
コード例 #8
0
def multi_search_mdx(n, query_list, group_pk, is_mdx=True):
    global init_vars, k_list, all_dics
    r_list = []

    if check_system() == 0:
        k_list = init_vars.indicator[n]
    if init_vars is None:
        init_obj(n)
    else:
        if init_vars.mtime is None:
            init_obj(n)
        else:
            if not check_apache():
                now_mtime = os.path.getmtime(pickle_file_path)
                cache_size = os.path.getsize(pickle_file_path)
                if init_vars.mtime < now_mtime and cache_size > 0:
                    init_obj(n)

    count = 0
    for k in k_list:
        if check_system() == 0:
            temp_object = init_vars.mdict_odict[k]
        else:
            temp_object = init_vars.mdict_odict[count]
            count += 1

        mdx = temp_object.mdx
        mdd_list = temp_object.mdd_list
        g_id = temp_object.g_id
        dict_file = mdx.get_fname()

        if isinstance(all_dics, dict):
            if k not in all_dics.keys():
                dic = get_or_create_dic(dict_file)
                all_dics = get_all_dics()
            else:
                dic_tuple = all_dics[k]
                dic = dicObject(*dic_tuple)
        else:
            dic_list = all_dics.filter(mdict_file=dict_file)
            if len(dic_list) == 0:
                dic = get_or_create_dic(dict_file)
            else:
                dic = dic_list[0]

        if dic is not None and dic.mdict_enable:
            params = (mdx, mdd_list, get_dic_attrs(dic), copy.copy(query_list))
            # query_list需要浅拷贝
            if group_pk == 0:  # 默认查询全部词典
                if is_mdx:
                    r_list.extend(
                        SearchObject(*params, g_id=g_id).search_entry_list())
                else:
                    r_list.extend(SearchObject(*params).search_sug_list(3))
            else:  # 查询某个词典分组下的词典
                if check_dic_in_group(group_pk, dic.pk):
                    if is_mdx:
                        r_list.extend(
                            SearchObject(*params,
                                         g_id=g_id).search_entry_list())
                    else:
                        r_list.extend(SearchObject(*params).search_sug_list(3))

    if is_mdx:
        r_list = merge_record(r_list)
    return r_list
コード例 #9
0
from .data_utils import get_or_create_dic, get_all_dics, check_dic_in_group
from .init_utils import initVars, load_cache
from .search_object import SearchObject
from .mdict_utils import get_dic_attrs, mdict_root_path
from .mdict_utils2 import sort_mdict_list
from .dic_object import dicObject

from .entry_object import entryObject

try:
    from mdict.models import MdictDicGroup
except Exception as e:
    pass

all_dics = get_all_dics()
if check_system() == 0:
    pickle_file_path = os.path.join(ROOT_DIR, '.cache', '.Linux.cache')
    from .init_utils import init_vars
else:
    pickle_file_path = os.path.join(ROOT_DIR, '.cache', '.Windows.cache')
    init_vars = initVars()
k_list = []


def merge_record(record_list):
    # 多于1个的词条进行合并,比如国家教育研究院双语词汇双向词典,该词典查variation有27个词条。
    # 长度小于500才合并,原因是英和中词典的a词条都合并起来,特别特别长,iframe展开时,台式机要卡住好长时间才能显示
    merge_entry_max_length = get_config_con('merge_entry_max_length')

    if merge_entry_max_length == 0:
        return record_list