Beispiel #1
0
 def create_uuid(self, directory: Optional[str] = './cache'):
     """Create UID for samples to get the actual name for later use cases"""
     print("START caching file names.")
     if osp.exists(directory):
         self.cache_names = Index(directory)
     else:
         self.cache_names = Index(
             directory, {
                 str(index): str(uuid.uuid5(uuid.NAMESPACE_X500,
                                            str(index)))
                 for index, _ in enumerate(self.dataset)
             })
         # use values as keys
         # self.cache_names.update({
         #         value: key for key, value in self.cache_names.items()
         #     })
     print("END caching file names.")
Beispiel #2
0
def db_client():
    """Return cached db connection"""
    global DB_CIENT
    if DB_CIENT is None:
        DB_CIENT = Index(iscc_registry.settings.db_dir)
        log.debug(
            f"Initialized ISCC state DB in {iscc_registry.settings.db_dir}")
    return DB_CIENT
Beispiel #3
0
def get_comp_order_book():
    print("查找comp挂单大于100的")
    futures = ftx.public_get_futures()["result"]
    msg = {}
    name = "comp_alarm"
    ALARM_SIZE = 100
    # msg["COMP-PERP"]["asks"][[187.1, 1.0471], [187.1, 1.0471]]
    comp_pd = [i["name"] for i in futures if "COMP" in i["name"]]
    for comp in comp_pd:
        orderbook = ftx.fetch_order_book(comp, 1)
        for i in orderbook["asks"]:
            price, size = i
            if size >= ALARM_SIZE:
                if comp in msg:
                    msg[comp]["asks"].append(i)
                else:
                    msg[comp] = {"asks": [], "bids": []}
                    msg[comp]["asks"].append(i)
        for i in orderbook["bids"]:
            price, size = i
            if size >= ALARM_SIZE:
                if comp in msg:
                    msg[comp]["bids"].append(i)
                else:
                    msg[comp] = {"asks": [], "bids": []}
                    msg[comp]["bids"].append(i)
    if msg:
        new_msg = {}
        for k, v in msg.items():
            if v["asks"] and v["bids"]:
                new_msg[k] = v
        result = Index("data/result")
        send_txt = ""
        msg = new_msg
        if msg:
            for k, v in msg.items():
                send_txt += k
                send_txt += "\n\n"
                send_txt += json.dumps(v)
                send_txt += "\n\n"

            if name in result:
                before_data = result[name]
                if msg != before_data:
                    sendMail(
                        f"{k} 有挂单超过100了",
                        send_txt,
                        ["*****@*****.**", "*****@*****.**"],
                    )
                    result[name] = msg
            else:
                sendMail("COMP有挂单超过50了", send_txt,
                         ["*****@*****.**", "*****@*****.**"])
                result[name] = msg
Beispiel #4
0
 def __init__(self, dbdir=None, baseiri=None, clear=False):
     '''
     Versa connection object built from DiskCache collection object
     '''
     self._dbdir = dbdir
     self._db = Index(dbdir)
     if clear: self._db.clear()
     self._ensure_abbreviations()
     #self.create_model()
     self._baseiri = baseiri
     self._abbr_index = 0
     return
Beispiel #5
0
    def __init__(
        self,
        tmp_dir: "StrPath",
        name: str,
    ):  # pylint: disable=super-init-not-called
        from diskcache import Index

        from dvc.fs.local import LocalFileSystem
        from dvc.utils.fs import makedirs

        self.index_dir = os.path.join(tmp_dir, self.INDEX_DIR, name)
        makedirs(self.index_dir, exist_ok=True)
        self.fs = LocalFileSystem()
        self.index = Index(self.index_dir)
Beispiel #6
0
    def __init__(self,
                 tmp_dir: "StrPath",
                 name: str,
                 dir_suffix: Optional[str] = None):  # pylint: disable=super-init-not-called
        from diskcache import Index

        from dvc.fs.local import LocalFileSystem
        from dvc.utils.fs import makedirs

        self.index_dir = os.path.join(tmp_dir, self.INDEX_DIR, name)
        makedirs(self.index_dir, exist_ok=True)
        self.fs = LocalFileSystem()
        self.index = Index(self.index_dir)

        if not dir_suffix:
            dir_suffix = self.fs.CHECKSUM_DIR_SUFFIX
        self.dir_suffix = dir_suffix
Beispiel #7
0
def get_btc_move_diff(futures):
    "获取各个btc move的差价"
    perpetuals = [i for i in futures if i["type"] == "move"]
    perpetual_names = [{"future_name": i["name"]} for i in perpetuals]
    strikePrices = get_future_stats(perpetual_names)
    strikePrices = {i["name"]: i for i in strikePrices}
    btc_moves = []
    for i in perpetuals:
        name = i["name"]
        if strikePrices[name].get("strikePrice", False):
            c = round(i["index"], 4)  # 指数成分市场的平均市价
            mark = i["mark"]  # 期货标记价格
            strikePrice = round(strikePrices[name]["strikePrice"],
                                4)  # 到期日开始时标的价格
            diff = round(abs(abs(c - strikePrice) - mark), 4)
            c1 = round(abs(c - strikePrice), 4)  ## 预计交割价
            print(
                f"{name}: 行权价:{strikePrice}, BTC指数价:{c}, move价格:{mark},差价:{diff}"
            )

            _append = {
                "index": c,
                "mark": mark,
                "strikePrice": strikePrice,
                "diff": diff,
                "name": name,
                "c1": c1,
            }
            btc_moves.append(_append)
            if diff > 3000:
                result = Index("data/result")
                if name in result:
                    t = result[name]  # 上次发邮件时间
                    if int(time.time()) - t > 60 * 60:  # 超过一小时
                        sendMail("FTX MOVE 差价大于500了", json.dumps(_append),
                                 ["*****@*****.**"])
                        result[name] = int(time.time())
                else:
                    sendMail("FTX MOVE 差价大于500了", json.dumps(_append),
                             ["*****@*****.**"])
                    result[name] = int(time.time())
    return sorted(btc_moves, key=lambda k: k["diff"], reverse=True)
Beispiel #8
0
import numpy as np  # type: ignore
import spacy  # type: ignore

from typing import List
from spacy.tokens.doc import Doc  # type: ignore
from sklearn.feature_extraction.text import CountVectorizer  # type: ignore
from sklearn.metrics.pairwise import cosine_similarity  # type: ignore
from diskcache import Cache, Index  # type: ignore
from dataclasses import dataclass
from hashlib import sha256

from ipybible import BIBLE_DATA_DIR

SIM_CACHE: Cache = Cache()
BIBLE_INDEX = Index(str(BIBLE_DATA_DIR))


@dataclass
class SpacyLangModel:
    nlp: spacy
    stop_words: List[str]


def normalize_text(text: str,
                   spacy_model: SpacyLangModel,
                   index_name: Index = BIBLE_INDEX):
    index_key = sha256(text.encode("utf-8")).hexdigest()
    if index_key in index_name:
        return BIBLE_INDEX[index_key]
    else:
        doc: Doc = spacy_model.nlp(text.lower())
Beispiel #9
0
           'nwid': 'b6079f73ca8129ad',
           'objtype': 'network',
           'private': True,
           'remoteTraceLevel': 0,
           'remoteTraceTarget': None,
           'revision': 1,
           'routes': [],
           'rules': [{'not': False, 'or': False, 'type': 'ACTION_ACCEPT'}],
           'rulesSource': '',
           'tags': [],
           'v4AssignMode': {'zt': False},
           'v6AssignMode': {'6plane': False, 'rfc4193': False, 'zt': False}}


# has_aging = False
cache = Index(get_cachedir(dir_name='fpn_test', user_dirs=True))
net_q = Deque(get_cachedir(dir_name='net_queue', user_dirs=True))
max_age = NODE_SETTINGS['max_cache_age']
utc_stamp = datetime.datetime.now(utc)  # use local time for console

client = mock_zt_api_client()


# special test cases
def json_check(data):
    import json

    json_dump = json.dumps(data, indent=4, separators=(',', ': '))
    json_load = json.loads(json_dump)
    assert data == json_load
Beispiel #10
0
    def __init__(self):
        self.cache_dir = resource_filename(__name__, '_cache')
        self.cache = Index(self.cache_dir)
        self.domain_names = []

        self._prepopulate()
Beispiel #11
0
import ipywidgets as widgets  # type: ignore
import numpy as np  # type: ignore
import matplotlib.pyplot as plt  # type: ignore
import hashlib  # type: ignore

from pathlib import Path
from PIL import Image  # type: ignore
from wordcloud import ImageColorGenerator, WordCloud  # type: ignore
from diskcache import Index  # type: ignore

from ipybible import IMG_DATA_DIR

LOVE_MASK_IMG = IMG_DATA_DIR / "love.png"
CLOUD_INDEX = Index()


def hash_txt(text: str) -> str:
    hash_object = hashlib.sha256(text.encode("utf-8"))
    hex_dig = hash_object.hexdigest()
    return hex_dig


def generate_cloud(text: str, mask_img: Path = LOVE_MASK_IMG):
    hashed_text = hash_txt(text)
    out = widgets.Output()
    mask = np.array(Image.open(mask_img))
    with out:
        if hashed_text in CLOUD_INDEX:
            wordcloud_bible = CLOUD_INDEX[hashed_text]
        else:
            wordcloud_bible = WordCloud(
Beispiel #12
0
from ..config import index_path
from diskcache import Index

index = Index(index_path.get())


Beispiel #13
0
from web3 import Web3, WebsocketProvider
import json
from sendMail import sendMail
from diskcache import Index

result = Index("data/result")

import os, sys, time

w3 = Web3(
    WebsocketProvider(
        "wss://mainnet.infura.io/ws/v3/cd42b3642f1441629f66000f8e544d5d",
        websocket_timeout=30,
    ))

with open("erc20.json") as f:
    erc20abi = json.loads(f.read())

comp = w3.eth.contract(address="0xc00e94Cb662C3520282E6f5717214004A7f26888",
                       abi=erc20abi)


def go():
    a1 = comp.events.Transfer.createFilter(fromBlock="latest",
                                           toBlock="pending")
    print("开始检测大于500的comp转账")
    while True:
        c = a1.get_new_entries()
        for i in c:
            amount = i["args"]["amount"]
            amount = w3.fromWei(amount, "ether")
from networkx import Graph
from .common import get_largest_element_sequence

# Occasionally users specify market orders on uniswap

# Orders are considered market orders if their limit price distance
# to the obtained price is greater than
IS_MARKET_ORDER_TOL = 0 #.001

# Fix market orders so that they are at this distance from the obtained price
LIMIT_PRICE_TOL = 0.1



uniswap = UniswapClient()
disk_cache = Index(".cache")

print(f"Cache size: {len(disk_cache)}")


def load_swaps(filename):
    """Parse a csv file to a list of dicts."""
    r = []
    with open(filename, "r") as f:
        reader = csv.reader(f)
        first = True
        for row in reader:
            # Skip header.
            if first:
                first = False
                continue
Beispiel #15
0
 def __init__(self, iterable=(), directory=None):
     self.index = Index(directory)
     self.update(*iterable)
Beispiel #16
0
PREDICTION_LOOP_SLEEP = float(os.getenv("PREDICTION_LOOP_SLEEP", "0.06"))
BATCH_COLLECTION_SLEEP_IF_EMPTY_FOR = float(
    os.getenv("BATCH_COLLECTION_SLEEP_IF_EMPTY_FOR", "60")
)
BATCH_COLLECTION_SLEEP_FOR_IF_EMPTY = float(
    os.getenv("BATCH_COLLECTION_SLEEP_FOR_IF_EMPTY", "1")
)
MANAGER_LOOP_SLEEP = float(os.getenv("MANAGER_LOOP_SLEEP", "8"))

_request_index = os.path.join(QUEUE_DIR, f"{QUEUE_NAME}.request_index")
_results_cache = os.path.join(QUEUE_DIR, f"{QUEUE_NAME}.results_cache")
_metrics_cache = os.path.join(QUEUE_DIR, f"{QUEUE_NAME}.metrics_cache")
_meta_index = os.path.join(QUEUE_DIR, f"{QUEUE_NAME}.META_INDEX")

REQUEST_INDEX = Index(_request_index)
RESULTS_INDEX = Cache(_results_cache)
METRICS_CACHE = Cache(_metrics_cache)
META_INDEX = Index(_meta_index)

META_INDEX["IS_FILE_INPUT"] = IS_FILE_INPUT
META_INDEX["PREDICTION_LOOP_SLEEP"] = PREDICTION_LOOP_SLEEP
META_INDEX["BATCH_COLLECTION_SLEEP_IF_EMPTY_FOR"] = BATCH_COLLECTION_SLEEP_IF_EMPTY_FOR
META_INDEX["BATCH_COLLECTION_SLEEP_FOR_IF_EMPTY"] = BATCH_COLLECTION_SLEEP_FOR_IF_EMPTY
META_INDEX["MANAGER_LOOP_SLEEP"] = MANAGER_LOOP_SLEEP
META_INDEX["TOTAL_REQUESTS"] = 0

FASTDEPLOY_UI_PATH = os.getenv(
    "FASTDEPLOYUI",
    os.path.join(os.path.split(os.path.abspath(__file__))[0], "fastdeploy-ui"),
)
Beispiel #17
0

def archivar_resultado(main_tex, main_name, problemas, filename, formato):
    """Vuelca el resultado a disco, a un archivo .zip o .tar.gz"""

    nombre = filename
    if formato == "zip":
        dump = dumper.ZipFile(nombre + ".zip")
    else:
        dump = dumper.TarFile(nombre + ".tar.gz")
    with dump as d:
        d.dump(main_tex, problemas)
    return d.name


cache = Index("/tmp/json2latex/cache")


def json2latex(data, main_name="examen.tex", formato="tgz"):
    progress = RQprogress()
    md5 = hashlib.md5("{}{}{}".format(data, main_name,
                                      formato).encode("utf8")).hexdigest()
    if md5 in cache:
        return cache[md5]
    datos = json.loads(data)
    main_tex, problemas = (converter.Examen2Tex(
        to_latex=True, progress=progress).convert_to_exam(datos, skip=False))
    progress.update("Volcando resultados a fichero")
    f = archivar_resultado(main_tex=main_tex,
                           main_name=main_name,
                           problemas=problemas,
Beispiel #18
0
# Target:   Python 3.6

import datetime

from diskcache import Index
from node_tools import update_state, get_cachedir
from node_tools import ENODATA, NODE_SETTINGS

try:
    from datetime import timezone
    utc = timezone.utc
except ImportError:
    from daemon.timezone import UTC
    utc = UTC()

cache = Index(get_cachedir())
max_age = NODE_SETTINGS['max_cache_age']
utc_stamp = datetime.datetime.now(utc)  # use local time for console

# reset timestamp if needed
if 'utc-time' in cache:
    stamp = cache['utc-time']
    cache_age = utc_stamp - stamp  # this is a timedelta
    print('Cache age is: {} sec'.format(cache_age.seconds))
    print('Maximum cache age: {} sec'.format(max_age))
    if cache_age.seconds > max_age:
        print('Cache data is too old!!')
        print('Stale data will be removed!!')
        cache.clear()

size = len(cache)
Beispiel #19
0
            peer_keys = find_keys(cache, 'peer')
            print('Returned peer keys: {}'.format(peer_keys))
            load_cache_by_type(cache, peer_data, 'peer')

            # get/display all available network data
            await client.get_data('controller/network')
            print('{} networks found'.format(len(client.data)))
            net_list = client.data
            net_data = []
            for net_id in net_list:
                # print(net_id)
                # Get details about each network
                await client.get_data('controller/network/{}'.format(net_id))
                # pprint(client.data)
                net_data.append(client.data)

            # load_cache_by_type(cache, net_data, 'net')
            # net_keys = find_keys(cache, 'net')
            # print('{} network keys found'.format(len(net_list)))
            # pprint(net_data)

        except Exception as exc:
            # print(str(exc))
            raise exc


cache = Index(get_cachedir(dir_name='ctlr_data'))
# cache.clear()
loop = asyncio.get_event_loop()
loop.run_until_complete(main())