Beispiel #1
0
    def create_mmbot(self, symbol):
        print("CREATE MMBOT %s FOR %s WITH %d$ QUOTA TO RUN LIVE:%d" %
              (self.botname, symbol, self.quota, self.live))
        mylogger = MyLogger(symbol, self.live)

        if self.brokertype == "ibkr":
            print("CREATE IBKR BROKER")
            broker = IBBroker(self.budget, self.quota, mylogger)
        else:
            print("CREATE SIM MARKET")
            broker = Broker(self.budget, self.quota, mylogger)

        if self.marketdata == "ibkr":
            print("CREATE IBKR MARKET DATA")
            market = IBMarket(self.rth)
        else:
            print("CREATE ALPHA VANTAGE MARKER DATA")
            market = Market()

        # create and add bot to list of bots
        self.bots.append(
            MMBot(budget=self.quota,
                  symbol=symbol,
                  period=self.period,
                  live=self.live,
                  debug=self.debug,
                  email=self.email,
                  daily=self.daily,
                  broker=broker,
                  market=market,
                  mylogger=mylogger,
                  since=self.since,
                  liquidate=self.liquidate))
Beispiel #2
0
from DataHandler import DataHandler, DataManager
from MyLogger import MyLogger
import json
import requests
import random
from config import config
import timeit
from cache import fcache

logger = MyLogger()


def enum(*sequential, **named):
    enums = dict(zip(sequential, range(len(sequential))), **named)
    reverse = dict((value, key) for key, value in enums.items())
    enums['reverse_mapping'] = reverse
    return type('Enum', (), enums)


Permission = enum(Unauthorized=0, User=1, Collaborator=3, Admin=7)
ResourceType = enum(Cluster=1, VC=2, Job=3)

INVALID_RANGE_START = 900000000
INVALID_RANGE_END = 999999998
INVALID_ID = 999999999


class AuthorizationManager:
    #TODO : Add Cache to aovid frequent DB calls

    CLUSTER_ACL_PATH = "Cluster"
 def test_facade(self):
     logger = MyLogger()
     logger.warn("Facade works!")
     logger.read()
     self.assertTrue(True)
import json
import os
import requests
import sys
import yaml

from MyLogger import MyLogger

from _datetime import datetime
from requests.auth import HTTPBasicAuth

backup_log_path = '/etc/elasticsearch/scripts/backup_elasticsearch.log'
config_dir = '/etc/elasticsearch/scripts/'
my_log = MyLogger(backup_log_path)


def load_config(config_file_path):
    with open(config_file_path, 'r') as stream:
        data = yaml.load(stream)
    return data


def take_snapshot(hostport, repo_name, snapshot_name, use_searchguard):
    querystring = {'pretty': ''}
    headers = {'content-type': "application/json"}
    payload = {
        #"indices": "filebeat-*",
        "ignore_unavailable": True,
        "include_global_state": False
    }
Beispiel #5
0
#!/usr/bin/env python3
#
# (c) 2018 Yoichi Tanibayashi
#

import socket
import time

##### logger
from MyLogger import MyLogger

my_logger = MyLogger(__file__)


#####
class IrSend():
    '''
    LIRC Reference	http://www.lirc.org/html/lircd.html
    '''

    SOCK_PATH = '/var/run/lirc/lircd'

    def __init__(self, debug=False):
        self.debug = debug
        self.logger = my_logger.get_logger(__class__.__name__, self.debug)
        self.logger.debug('debug = %s', self.debug)

    def send1(self, dev, btn):
        self.logger.debug('dev=%s, btn=%s', dev, btn)

        ret = -1
Beispiel #6
0
 def startLogger(self):
     self.l = MyLogger(self.basePath).logger
Beispiel #7
0
import datetime
import requests
import threading
import pytz

from datasources.models import DataSource
from importsessions.models import UpdateSession

from DatetimeUtils import DatetimeUtils
from MyLogger import MyLogger

logger = MyLogger('reimport_script.log', MyLogger.INFO)


class UpdateSessionJobsManager:
    LOG_TAG = 'UpdateSessionJobsManager'
    MAX_ATTEMPTS_PER_DAY = 3
    _dates_attempts = {}
    _queue = []
    _pool_size = 2
    _jobs_running = 0
    total_imports = 0
    _finished_relaunch = None

    def __init__(self, pool_size=2):
        self._pool_size = pool_size

    def enqueue_job(self, date_from, date_to):
        logger.debug(msg='enqueue_job date_from=' + str(date_from) +
                     ' date_to=' + str(date_to),
                     tag=self.LOG_TAG)
Beispiel #8
0
import json
import os
import re
import requests
import yaml

from MyLogger import MyLogger
from requests.auth import HTTPBasicAuth

restore_path = '/etc/elasticsearch/scripts/restore_elasticsearch.log'
config_dir = '/etc/elasticsearch/scripts'
my_log = MyLogger(restore_path)


def load_config(config_file_path):
    with open(config_file_path, 'r') as stream:
        data = yaml.load(stream)
    return data


def check_snapshot(hostport, repo_name, snapshot_name, use_searchguard):
    if use_searchguard:
        data = load_config('{}/credentials.yaml'.format(config_dir))
        user = data['user']
        password = data['password']
        url = 'https://{}/_snapshot/{}/{}' \
            .format(hostport, repo_name, snapshot_name)
        querystring = {'pretty': ''}
        response = requests.request("GET",
                                    url,
                                    params=querystring,