示例#1
0
def get_or_create_subscription():
    conf = get_config()["google_pub_sub"]
    project_id, topic_id = conf["project_id"], conf["topic_id"]
    subscription_id = get_subs_name(conf["subscription"].get(
        "type", "schedule-consumer"))

    subscriber = pubsub_v1.SubscriberClient()
    publisher = pubsub_v1.PublisherClient()

    sub_path = subscriber.subscription_path(project_id, subscription_id)
    topic_path = publisher.topic_path(project_id, topic_id)

    try:
        subscriber.create_subscription(
            request={
                "name":
                sub_path,
                "topic":
                topic_path,
                "message_retention_duration":
                Duration(seconds=conf["subscription"].get(
                    "message_retention_duration", 86400)),
                "ack_deadline_seconds":
                conf["subscription"].get("ack_deadline_seconds", 300),
                "filter":
                f'attributes.mac = "{get_mac()}"'
            })
        logging.info(f"{sub_path} created")
    except AlreadyExists:
        logging.info(f"{sub_path} already exists")
        return sub_path

    return sub_path
def main():

    config = get_config(service_name='temperature_sensor', log=log)
    log.info("header: datetime, device_uuid, device_tags, temperature_celcius")
    action_func = action_func_factory(log=log, config=config)
    batch_func = batch_send_factory(log=log,
                                    config=config,
                                    batch_limit=10,
                                    verify_ssl_certificate=False)
    intervaled_ma(log=log,
                  action_func=action_func,
                  batch_func=batch_func,
                  min_seconds_between_actions=10)
示例#3
0
def main():

    log.info("log stats started")
    config = get_config(service_name='stat_reporter', log=log)
    action_func = action_func_factory(log=log, config=config)
    batch_func = batch_send_factory(log=log,
                                    config=config,
                                    batch_limit=1,
                                    verify_ssl_certificate=False)
    intervaled_ma(log=log,
                  action_func=action_func,
                  batch_func=batch_func,
                  min_seconds_between_actions=100)
 def __init__(self):
     self.jobstores = {
         'default': RedisJobStore(),
         'redis': RedisJobStore()
     }
     self.executors = {
         'default': ProcessPoolExecutor(max_workers=get_config()["scheduler"].get("process_pool_max_workers", 20))
     }
     self.job_defaults = {
         'coalesce': True,
         'max_instances': 1
     }
     self.scheduler = BackgroundScheduler(jobstores=self.jobstores,
                                          executors=self.executors,
                                          job_defaults=self.job_defaults)
示例#5
0
def config_app(p_app):
    """
    配置app参数
    :param p_app: Flask实例
    :return:
    """
    config = get_config('aliyun_mysql')
    db_type = config['db_type']
    host = config['host']
    port = config['port']
    user = config['user']
    password = config['password']

    p_app.config[
        'SQLALCHEMY_DATABASE_URI'] = f'{db_type}+pymysql://{user}:{password}@{host}:{port}/watero'
    p_app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False  # 不追踪数据库变化
    p_app.config['SQLALCHEMY_ECHO'] = False  # 不打印原始SQL语句
示例#6
0
    def __init__(self):
        import adafruit_rfm9x
        import board
        import busio
        from digitalio import DigitalInOut

        cs = DigitalInOut(board.CE1)
        reset = DigitalInOut(board.D25)
        spi = busio.SPI(board.SCK, MOSI=board.MOSI, MISO=board.MISO)
        self.__config = get_config()["lora"]
        self.__actions = self.__config["actions"]
        self.rfm9x = adafruit_rfm9x.RFM9x(spi, cs, reset,
                                          self.__config["frecuency"])
        self.serve_action = self.__actions["serve"]

        self.success_led = 13
        self.fail_led = 26
        self.warn_led = 19
        self.__setup_leds()
示例#7
0
import numpy as np
import json

if __name__ == "__main__":

    args = argparser()
    print("saving file to {}".format(args.prefix))

    # create workspace
    workspace_dir = "experiments/{}".format(args.prefix)
    if not os.path.isdir(workspace_dir):
        os.makedirs(workspace_dir, exist_ok=True)

    test_log = open("{}/test.log".format(workspace_dir), "w")

    input_size, _, test_loader = get_config(args.data)
    torch.manual_seed(args.seed)
    torch.cuda.manual_seed_all(args.seed)
    random.seed(0)
    np.random.seed(0)

    encoder = GlobalEncoder(stride=args.encoder_stride,
                            input_size=input_size,
                            output_size=args.code_size)
    # load encoder from checkpoint
    encoder.load_state_dict(
        torch.load(args.encoder_ckpt)["encoder_state_dict"])
    encoder = encoder.to(args.device)

    decoder = DeconvDecoder(input_size=encoder.output_size,
                            output_size=input_size)
示例#8
0
import json

if __name__ == "__main__":

    args = argparser()
    print("saving file to {}".format(args.prefix))

    # create workspace
    workspace_dir = "experiments/{}".format(args.prefix)
    if not os.path.isdir(workspace_dir):
        os.makedirs(workspace_dir, exist_ok=True)

    train_log = open("{}/train.log".format(workspace_dir), "w")
    test_log = open("{}/test.log".format(workspace_dir), "w")

    input_size, ndf, num_channels, train_loader, test_loader = get_config(args)
    torch.manual_seed(args.seed)
    torch.cuda.manual_seed_all(args.seed)
    random.seed(0)
    np.random.seed(0)

    encoder = GlobalEncoder(ndf=ndf,
                            num_channels=num_channels,
                            output_size=args.code_size,
                            input_size=input_size)

    if args.global_dim:
        DIM = GlobalDIM(encoder, type=args.mi_estimator)

    else:
        DIM = LocalDIM(encoder, type=args.mi_estimator)
    def __init__(self, database, collection, where, json_file, export):

        if where == 'local':

            #local data
            json_file = json_file

            try:
                client = MongoClient('localhost', 27017)
                self.__db = client[database]

                #create a new collection if it does not exist in database
                if collection not in self.__db.list_collection_names():
                    self.__collection = self.__db[collection]
                    file_data = []
                    for line in open(json_file, 'r'):
                        file_data.append(loads(line))
                    self.__collection.insert_many(file_data)

                # print the version of MongoDB server if connection successful
                print("server version:", client.server_info()["version"])
            except errors.ServerSelectionTimeoutError as err:
                client = None
                print("pymongo ERROR:", err)

            self.__database = database
            self.__collection = collection

        if where == 'remote':

            #connection configuration file
            json_file = json_file

            script_parent_dir = pathlib.Path(__file__).parents[1]
            config_fn = script_parent_dir.joinpath(json_file)
            config = get_config(config_fn)
            connection_dict = {
                'host': config['mongodb']['host'],
                'port': int(config['mongodb']['port']),
                'username': config['mongodb']['username'],
                'password': config['mongodb']['password']
            }

            self.__database = database
            self.__collection = collection

            uri = "mongodb://%s:%s@%s:%s/%s" % (
                connection_dict['username'], connection_dict['password'],
                connection_dict['host'], connection_dict['port'], database)

            try:
                client = MongoClient(
                    uri, serverSelectionTimeoutMS=3000)  # 3 second timeout
                self.__db = client[self.__database]
                # print the version of MongoDB server if connection successful
                print("server version:", client.server_info()["version"])
            except errors.ServerSelectionTimeoutError as err:
                client = None
                print("pymongo ERROR:", err)

            if export == 'yes':

                export_path = '../sna/data/es-tweets.hpai.json'

                self.__query = {'$and': [{'lang': {'$in': SPAIN_LANGUAGES}},
                    {'$or': [{'place.country': 'Spain'},
                      {'user.location': {'$in': \
                      get_spain_places_regex()}}]}]}

                print("Exporting cursor...")
                cursor = self.search(self.__query)
                file = open(export_path, "w")
                for document in tqdm(cursor, total=470000):
                    file.write(dumps(document))
                    file.write("\n")
                sys.exit("Bye!")
示例#10
0
# Setup/Import ----------------------------------------------------------------
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel, ValidationError
from typing import Dict, List, Union, Any
from typing_extensions import Literal
import os.path as path
import mlflow
import json
from mlflow.tracking import MlflowClient
from utils.get_config import get_config
import time
from fastapi.encoders import jsonable_encoder
from starlette.responses import HTMLResponse, JSONResponse

# Grab Config
conf = get_config()

# URI
uri = conf['uri']
mlClient = MlflowClient(tracking_uri=uri)
mlflow.set_tracking_uri(uri)

# Experiment Name
experiment_name = conf['experiment_name']
mlflow.set_experiment(experiment_name)
experiment_id = mlflow.get_experiment_by_name(experiment_name).experiment_id

router = APIRouter()

# Process Endpoint ------------------------------------------------------------