Example #1
0
class Tool:
    def __init__(self):
        self.logger = MyLogger().getLogger();
        self.logger.info('Tool class initial ')
        self.exist = True
        self.args = self.arg_parse()

    def arg_parse(self):
        """

        :rtype: object
        """
        parser = argparse.ArgumentParser()
        parser.add_argument('--host',dest='hostname')
        parser.add_argument('-u','--user',dest='user')
        parser.add_argument('-c','--command',dest='command')
        parser.add_argument('-k','--key',dest='key')
        return parser.parse_args()


    def jdk_exist(self):
        if self.exist:
            print('jdk exits')
        else:
            print('jdk not exits ')
Example #2
0
    def __init__(self, host, comms, key, user, ):

        self.logger = MyLogger().getLogger();
        self.logger.info('AutoD initial beginning~~~')
        self.tool = Tool()

        self.host = host
        self.comms = comms
        if not key:
            self.child = run('ssh {0}@{1}'.format(user, host))
        else:
            self.child = run('ssh -i {0} {1}@{2}'.format(key, user, host))
Example #3
0
class AutoD:
    def __init__(self, host, comms, key, user, ):

        self.logger = MyLogger().getLogger();
        self.logger.info('AutoD initial beginning~~~')
        self.tool = Tool()

        self.host = host
        self.comms = comms
        if not key:
            self.child = run('ssh {0}@{1}'.format(user, host))
        else:
            self.child = run('ssh -i {0} {1}@{2}'.format(key, user, host))

    def destroyed(self):
        self.logger.info('Destoryed the ssh login ~~~ ')
        self.child.close(force=True)

    def exec_command(self):
        self.logger.info('AutoD.exec_command ')
        self.tool.jdk_exist()
        print('command is {}'.format(self.comms))
Example #4
0
 def startLogger(self):
     self.l = MyLogger(self.basePath).logger
Example #5
0
# ===================================================================================
import sys
sys.path.append("../MyLogger/")
sys.path.append("../MyDataBase/")
from MyLogger import MyLogger
MyLogger = MyLogger.GetInstance()
from MyDataBase import MyDataBase
from MyTkRoot import MyTkRoot
from WidgetFactory import WidgetFactory
# ===================================================================================
import tkinter as tk


# ===================================================================================
## @brief メモを表示するフレーム
class TaskFrame(tk.Frame):
    @MyLogger.deco
    def __init__(self, master, **kw):
        super().__init__(master, **kw)
        self.taskdata = MyDataBase.GetInstance("task.xlsx")
        self.taskdata.DBAppendColumn(
            ['data/project', 'data/task', 'data/status'])
        self.memodata = MyDataBase.GetInstance("memo.xlsx")
        self.memodata.DBAppendColumn(
            ['data/project', 'data/task', 'data/memo'])
        self.inputfield = {}
        self.filterfield = {}
        self.viewerfield = {}
        self.memofield = {}
        self.InitializeStaticWidget()
        self.InitializeDynamicWidget()
Example #6
0
# from config import config
import pyodbc
import json
import base64
import os

import timeit

from Queue import Queue
import threading

from config import config
from config import global_vars
from MyLogger import MyLogger

logger = MyLogger()

### set to a larger number if flask is running on multithreading
sql_max_connect_num = 35
sql_live_connect_num = 25


class SQLConnManager:
    @staticmethod
    def Connect():
        server = config["database"]["hostname"]
        database = "DLWSCluster-%s" % config["clusterId"]
        username = config["database"]["username"]
        password = config["database"]["password"]
        # self.driver = '/usr/lib/x86_64-linux-gnu/libodbc.so'
        driver = '{ODBC Driver 13 for SQL Server}'
Example #7
0
    #  , 'db_pass'   : '<mysql-password>'
    #  , 'db_host'   : 'localhost'
    #  , 'local_dir' : 'mysqldump+ssh-test'
    #  },
    #  { 'title'     : 'Direct MySQL dump with exotic MySQL server port'
    #  , 'type'      : 'mysqldump'
    #  , 'db_user'   : 'root'
    #  , 'db_pass'   : '<mysql-password>'
    #  , 'db_host'   : 'sql.machine.com'
    #  , 'db_port'   : 9999
    #  , 'local_dir' : 'mysqldump-test'
    #  },
]

if __name__ == "__main__":
    logger = MyLogger()
    logger.doLog("starting!!")
    backupUtils = BackupUtils(logger)
    backupUtils.checkPythonVersion()
    backupUtils.checkOperativeSystem()
    backupUtils.checkRootDirectory(root_backup_dir)
    backupUtils.checkCommands(["rdiff-backup", "rm", "tar", "bzip2"])
    backupObjectsList = []
    # Doing this right now is nicer to the user: thanks to this he doesn't need to wait the end of the (X)th backup to get the error about the (X+1)th
    for backup in backup_list:
        # Check datas and requirement for each backup
        backup = backupUtils.parseBackupDefinition(backup)
        backup_folders = backupUtils.returnDirectoryList(root_backup_dir, backup["local_dir"])
        backupUtils.checkDirectories(root_backup_dir, backup["local_dir"])
        if backup["type"] == "":
            continue
Example #8
0
#!/usr/bin/env python3
#
# (c) 2019 Yoichi Tanibayashi
#
'''
複数のサーボモーターを同期をとりながら制御する(個数は任意)
'''
__author__ = 'Yoichi Tanibayashi'
__date__   = '2019'

import pigpio
import time

#####
from MyLogger import MyLogger
my_logger = MyLogger(__file__)

#####
PULSE_OFF  = 0
PULSE_MIN  = 500
PULSE_MAX  = 2500
PULSE_HOME = 1500

PULSE_STEP      = 10
INTERVAL_FACTOR = 0.40

DEF_PIN = [17, 27, 22, 23]
DEF_PULSE_HOME = [1500, 1500, 1500, 1500]
DEF_PULSE_MIN  = [ 500,  500,  500,  500]
DEF_PULSE_MAX  = [2500, 2500, 2500, 2500]
Example #9
0
import datetime
import requests
import threading
import pytz

from datasources.models import DataSource
from importsessions.models import UpdateSession

from DatetimeUtils import DatetimeUtils
from MyLogger import MyLogger

logger = MyLogger('reimport_script.log', MyLogger.INFO)


class UpdateSessionJobsManager:
    LOG_TAG = 'UpdateSessionJobsManager'
    MAX_ATTEMPTS_PER_DAY = 3
    _dates_attempts = {}
    _queue = []
    _pool_size = 2
    _jobs_running = 0
    total_imports = 0
    _finished_relaunch = None

    def __init__(self, pool_size=2):
        self._pool_size = pool_size

    def enqueue_job(self, date_from, date_to):
        logger.debug(msg='enqueue_job date_from=' + str(date_from) +
                     ' date_to=' + str(date_to),
                     tag=self.LOG_TAG)
Example #10
0
def handleM4A(path):
    # ./GENRE/Compilations/ARTIST/ALBUM/SONG.m4a
    temp = path.replace("\\", "/")
    #------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
    song = temp[temp.rfind("/") + 1:]
    song = jaconv.z2h(song, kana=False, digit=True, ascii=True)
    song = jaconv.h2z(song, kana=True, digit=False, ascii=False)
    temp = temp[:temp.rfind("/")]
    #------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
    album = temp[temp.rfind("/") + 1:]
    album = jaconv.z2h(album, kana=False, digit=True, ascii=True)
    album = jaconv.h2z(album, kana=True, digit=False, ascii=False)
    temp = temp[:temp.rfind("/")]
    #------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
    artist = temp[temp.rfind("/") + 1:]
    artist = jaconv.z2h(artist, kana=False, digit=True, ascii=True)
    artist = jaconv.h2z(artist, kana=True, digit=False, ascii=False)
    temp = temp[:temp.rfind("/")]
    #------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
    genre = temp[temp.rfind("/") + 1:]
    genre = jaconv.z2h(genre, kana=False, digit=True, ascii=True)
    genre = jaconv.h2z(genre, kana=True, digit=False, ascii=False)
    temp = temp[:temp.rfind("/")]
    #------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
    # take artist as Compilations
    category = temp[temp.rfind("/") + 1:]
    temp = temp[:temp.rfind("/")]
    if category == "__02_Compilations__":
        artist = "__Compilations__"
    elif category == "__01_Favorites__":
        pass
    #------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
    mp4 = MP4(path)
    #------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
    MyLogger.info(path)
    mp4.tags[TRACK_TITLE] = song
    mp4.tags[ALBUM] = album
    mp4.tags[ALBUM_ARTIST] = artist
    mp4.tags[ALBUM_SORT_ORDER] = conv.do(album)
    mp4.tags[ARTIST] = artist
    mp4.tags[ARTIST_SORT_ORDER] = conv.do(artist)
    mp4.tags[GENRE] = genre
    MyLogger.info("mp4.tags[TRACK_TITLE]", str(mp4.tags[TRACK_TITLE]))
    MyLogger.info("mp4.tags[ALBUM]", str(mp4.tags[ALBUM]))
    MyLogger.info("mp4.tags[ALBUM_ARTIST]", str(mp4.tags[ALBUM_ARTIST]))
    MyLogger.info("mp4.tags[ALBUM_SORT_ORDER]",
                  str(mp4.tags[ALBUM_SORT_ORDER]))
    MyLogger.info("mp4.tags[ARTIST]", str(mp4.tags[ARTIST]))
    MyLogger.info("mp4.tags[ARTIST_SORT_ORDER]",
                  str(mp4.tags[ARTIST_SORT_ORDER]))
    MyLogger.info("mp4.tags[GENRE]", str(mp4.tags[GENRE]))
Example #11
0
 def __init__(self, build_number, jenkins_url, job_name):
     self._jenkins_url = jenkins_url
     self._job_name = job_name
     self._build_number = build_number
     self._logger = MyLogger.__call__().get_logger()
Example #12
0
 def SetIgnoreFunction(self, functions):
    self.ignore_functions += functions
    MyLogger.sakura(self.ignore_functions)
Example #13
0
 def HasFocus(cls, id, focused):
     for key, widget in cls.GetInstance().widgets[id].items():
         if widget['instance'] == focused:
             MyLogger.critical(key)
             return key
     return None
Example #14
0
 def __init__(self):
     self.logger = MyLogger().getLogger();
     self.logger.info('Tool class initial ')
     self.exist = True
     self.args = self.arg_parse()
Example #15
0
sys.path.append(
    os.path.join(os.path.dirname(os.path.abspath(__file__)), "../storage"))

import yaml
from jinja2 import Environment, FileSystemLoader, Template
from config import config
from DataHandler import DataHandler
import base64
import re

from config import global_vars
from MyLogger import MyLogger

import copy

logger = MyLogger()


def LoadJobParams(jobParamsJsonStr):
    return json.loads(jobParamsJsonStr)


def SubmitJob(jobParamsJsonStr):
    ret = {}

    jobParams = LoadJobParams(jobParamsJsonStr)

    if "jobName" not in jobParams or len(jobParams["jobName"].strip()) == 0:
        ret["error"] = "ERROR: Job name cannot be empty"
        return ret
Example #16
0
# from config import config
import pyodbc
import json
import base64
import os

import timeit

from Queue import Queue
import threading

from config import config
from config import global_vars
from MyLogger import MyLogger

logger = MyLogger()

### set to a larger number if flask is running on multithreading
sql_max_connect_num = 35
sql_live_connect_num = 25


class SQLConnManager:
    @staticmethod
    def Connect():
        server = config["database"]["hostname"]
        database = "DLWorkspaceCluster-%s" % config["clusterId"]
        username = config["database"]["username"]
        password = config["database"]["password"]
        # self.driver = '/usr/lib/x86_64-linux-gnu/libodbc.so'
        driver = '{ODBC Driver 13 for SQL Server}'
Example #17
0
from pymongo import MongoClient

import twitter
from MyLogger import MyLogger
from social.celery import app


"""Database config
"""
__IP_ADDRESS__ = "127.0.0.1"
__PORT__ = 27017


""" config for logging
"""
logger = MyLogger.getLogger()
logger.info("TweetHunter start working")


""" config for database
"""
__DB_NAME__ = 'NewsHunter'
try:
    logger.info ("Connecting to database")
    client = MongoClient(__IP_ADDRESS__, __PORT__)
    logger.info ("Connecting to database successfully")
    db = client[__DB_NAME__]
except ValueError:
    logger.error ("Connecting to database error: " + ValueError)

Example #18
0
import json
import os
import re
import requests
import yaml

from MyLogger import MyLogger
from requests.auth import HTTPBasicAuth

restore_path = '/etc/elasticsearch/scripts/restore_elasticsearch.log'
config_dir = '/etc/elasticsearch/scripts'
my_log = MyLogger(restore_path)


def load_config(config_file_path):
    with open(config_file_path, 'r') as stream:
        data = yaml.load(stream)
    return data


def check_snapshot(hostport, repo_name, snapshot_name, use_searchguard):
    if use_searchguard:
        data = load_config('{}/credentials.yaml'.format(config_dir))
        user = data['user']
        password = data['password']
        url = 'https://{}/_snapshot/{}/{}' \
            .format(hostport, repo_name, snapshot_name)
        querystring = {'pretty': ''}
        response = requests.request("GET",
                                    url,
                                    params=querystring,