Exemple #1
0
def kl_inner_stop(circuit: Circuit, data: Data, app=None, genetic=False):
    """
    it is the end of the current pass, restore the best cut of this pass;
    terminates the outer loop if mincut doesn't improve or it reaches 6 iterations
    """
    data.restore_best_cut()  # restore block data structures

    # update the distribution for the restored best cut
    update_distribution(circuit, data)
    # update the gains for the restored best cut
    calculate_gains(circuit, data)

    if genetic:
        return

    logging.info("iteration {}: best mincut = {}".format(
        data.iteration, data.cutsize))

    if app is not None:
        app.update_canvas(data)
    data.iteration += 1

    # continue for up to 6 iterations or until mincut stops improving
    if data.iteration <= 6 and data.mincut != data.prev_mincut:
        data.prev_mincut = data.mincut
        if app is not None:
            app.root.after(1000, kl_inner_loop, circuit, data, app)
    elif app is not None:
        app.update_partition_button(True)
Exemple #2
0
def predict(args, model, field):

    model.eval()
    Dataset = Data(args)

    fn_in = args.input_file
    # if 'cand_paths' in fn_in:
    #     fn_out = fn_in.replace('cand_paths','best_path')
    # else:
    #     fn_out = fn_in.replace('paths','predict_path')
    if not args.output_file:
        fn_out = fn_in.replace('cand_paths', 'best_path')
    else:
        fn_out = args.output_file

    with open(fn_in, 'r') as f:
        raw_data = json.load(f)

    output_data = {}

    topk = args.topk

    for line in raw_data:
        if 'q_ws' in line.keys():
            q, q_ws, paths, paths_ws = line['q'], line['q_ws'], line[
                'paths'], line['paths_ws']
        else:
            q, paths = line['q'], line['paths']

        one_question = Dataset.numericalize(field, [q])  # 内部元素都是二维的
        one_question = [t[0] for t in one_question]  # 内部是一维的

        one_question = (t for t in one_question)

        paths_input = [
            ''.join([del_des(item) for item in path]) for path in paths
        ]
        one_cands = Dataset.numericalize(field, paths_input)
        batches_cands = data_batchlize(args.test_batch_size, one_cands)

        # 字符层面得分
        char_scores = occupied(
            q, [''.join([del_des(i) for i in p]) for p in paths])
        char_scores = torch.Tensor(char_scores)
        # 模型层面得分
        model_scores = model.cal_score(one_question, batches_cands)
        all_scores = alpha * char_scores + (1 - alpha) * model_scores

        if len(all_scores) > 0 and topk == 1:
            index = torch.argmax(all_scores)
            output_data[q] = paths[index]
        elif len(all_scores) > 0 and topk > 1:
            sorted_scores, index = torch.sort(all_scores, descending=True)
            output_data[q] = [paths[i] for i in index[:topk]]
        else:
            print(q, 'no path')

    with open(fn_out, 'w') as f:
        json.dump(output_data, f, ensure_ascii=False)
Exemple #3
0
 def initialize(self):
     Data.initialize()
     Config.initialize()
     self.Delete = None
     self.Search = None
     self.Plaster = None
     self.Config = None
     self.currentMenu = None
Exemple #4
0
def update_distribution(circuit: Circuit, data: Data):
    """
    update the distribution for each net
    """
    for net in circuit.nets:
        data.reset_net_distribution(net)
        for cell in net.cells:
            block_id = data.get_node_block_id(cell)
            data.inc_net_distribution(net, block_id)
Exemple #5
0
    def post(self):
        self.set_header("Content-Type", "application/json")
        folk_id = self.get_argument("folk_id", '')

        type_list = Type.get_all()
        for i in type_list:
            key = str(i.type_id)
            value = self.get_argument(key, '')
            if value:
                try:
                    Data.create_data(folk_id, key, value)
                except:
                    Data.update_data_by_folk_key(folk_id, key, value)

        self.write(json.dumps("OK"))
Exemple #6
0
 def get_data_info(self, tables, type_id, dev_id, owner, start_time,
                   end_time):
     data_list = []
     for table in tables:
         data_list.extend(
             Data.find_by(
                 'where device_id = ? and type_id = ? and owner= ? and created_at between ? and ?',
                 dev_id,
                 type_id,
                 owner,
                 start_time,
                 end_time,
                 sub_name=str(table.index)))
     res = {}
     dev = Device.get(dev_id)
     data_info = DataParser.get_instance().get_data_type(
         dev.dev_type, type_id, owner)
     res['name'] = data_info['name']
     res['type_id'] = data_info['type_id']
     res['unit'] = data_info['unit']
     res['values'] = []
     for data_item in data_list:
         res['values'].append(
             [data_item.created_at * 1000, data_item.value])
     return res
Exemple #7
0
    def __init__(self, data=Data()):
        self.data=data

        print('Запускаем плоттер')
        self._vp_config()
        self._plot()
        plt.show()
Exemple #8
0
 def merge(self):
     average = (self.red.average + self.green.average +
                self.blue.average) / 3
     median = (self.red.median + self.green.median + self.blue.median) / 3
     minimum = (self.red.minimum + self.green.minimum +
                self.blue.minimum) / 3
     maximum = (self.red.maximum + self.green.maximum +
                self.blue.maximum) / 3
     return Data(self.red.index, average, median, minimum, maximum)
Exemple #9
0
 def get_users_list(self):
     if self.users_cache is None:
         driver = self.app.driver
         self.app.session.being_on_users_page()
         self.users_cache = []
         for element in driver.find_elements_by_xpath("(//tr/td/span[contains(@title, 'Auto.test.user')])"):
             #ActionChains(driver).pause(0.01).perform()
             text = element.text
             self.users_cache.append(Data(name = text))
     return list(self.users_cache)
Exemple #10
0
    def from_json(cls, json):
        if json is None:
            return ApiResponsePayload()

        if type(json) is list:
            return [
                ApiResponsePayload.from_json(apiResponsePayload)
                for apiResponsePayload in json
            ]

        return ApiResponsePayload(Data.from_json(json.get('data')))
 def create_data(self):
     try:
         data = Data(None, t.now().isoformat(), None, self.user.id)
         uri = "{0}/{1}/{2}".format(self.opts['url'],
                                    self.opts['prefix'],
                                    self.opts['data'])
         body = urllib.urlencode(data)
         req = self.http.fetch(uri, method='POST', headers=self.header, body=body)
         response = json.loads(req.body)['data']
         dump = response['body']
         data.id = dump['id']
         return data
     except httpclient.HTTPError as e:
         print e.message
         self.http.close()
         return e
     except Exception as e:
         print e.message
         self.http.close()
         return e
Exemple #12
0
def init_partition(circuit, block_ids=None) -> Data:
    """
    randomly partition the nodes equally, if the block_ids
    is not specified.
    :return: data container for the current partition
    """
    pmax = get_pmax(circuit)
    nets_size = circuit.get_nets_size()
    n = circuit.get_cells_size()

    if block_ids is None:
        random_cids = random.sample(range(n), n)
        block_ids = [cid % 2 for i, cid in enumerate(random_cids)]

    # initialize a data container for the current partition
    data = Data(pmax, nets_size, block_ids)
    # update the nets distribution in the current partition
    update_distribution(circuit, data)
    # update the gain for each node in the current partition
    calculate_gains(circuit, data)
    # update the cutsize of the current partition
    data.cutsize = calculate_cutsize(circuit, data)

    logging.info("initial cutsize = {}".format(data.cutsize))

    # intialize best partition, and prev mincut
    data.store_best_cut()
    data.prev_mincut = data.mincut

    return data
Exemple #13
0
def kl_inner_loop(circuit: Circuit, data: Data, app=None, genetic=False):
    """
    perform the inner loop of the Kernighan-Lin Partition algorithm
    """
    # select the max gain node from blocks
    max_gain_node = select_max_gain_node(data)

    # move the max gain node to another block
    #   - update the gain for each node
    #   - update the nets distribution
    #   - update the cutsize
    move_node_another_block(max_gain_node, data)

    # if cutsize is the minimum for this pass, store the cut
    if data.cutsize < data.mincut:
        data.store_best_cut()

    data.print_blocks_size()

    if app is None or app.quick:
        if data.has_unlocked_nodes():
            kl_inner_loop(circuit, data, app, genetic)
        else:
            kl_inner_stop(circuit, data, app, genetic)
    else:
        app.update_canvas(data)
        # inner loop stop until no unlocked nodes remains
        if data.has_unlocked_nodes():
            app.root.after(1, kl_inner_loop, circuit, data, app, genetic)
        else:  # inner loop exit
            app.root.after(1000, kl_inner_stop, circuit, data, app, genetic)
Exemple #14
0
    def get_data_info(self, tables, type_id, dev_id, start_time, end_time):
        data_list = []
        for table in tables:
            data_list.extend(Data.find_by('where device_id = ? and type_id = ? and created_at between ? and ?', dev_id, type_id, start_time, end_time, sub_name = str(table.index)))

        dev  = Device.get(dev_id)
        data_info = DataParser.get_instance().get_data_type(dev.dev_type, type_id)

        if data_info['duration'] == 0:
            return self.deal_for_single(data_list, data_info)

        if data_info['duration'] > 0:
            return self.deal_for_accumulate(data_list, data_info)
def test4():
    data = Data()
    data.add_user(123456, 'Will', User("Will", Schedule()))
    [print('%s: %s\n' % (k, v)) for k, v in data.db['users'].items()]
    print(data.db)
    # print(data.db['users']['Will'].schedule.events['Skate'].time.in_time(data.db['users']['Michael'].schedule.courses['CSCI140'].time.start))
    #print(data.db)
    data.write_data()
Exemple #16
0
 def get_data_info(self, tables, type_id, dev_id, owner, start_time, end_time):
     data_list = []
     for table in tables:
         data_list.extend(Data.find_by('where device_id = ? and type_id = ? and owner= ? and created_at between ? and ?', dev_id, type_id, owner, start_time, end_time, sub_name = str(table.index)))
     res = {}
     dev  = Device.get(dev_id)
     data_info = DataParser.get_instance().get_data_type(dev.dev_type, type_id, owner)
     res['name'] = data_info['name']
     res['type_id'] = data_info['type_id']
     res['unit'] = data_info['unit']
     res['values'] = []
     for data_item in data_list:
         res['values'].append([data_item.created_at*1000, data_item.value])
     return res
Exemple #17
0
 def convert_to_test_model(self, file):
     data = self.convert_documents_to_test_model(file=file)
     if data != -1:
         try:
             self.data = Data(settings=Settings(
                 target=file['settings']['target'],
                 scaner_port=file['settings']['scaner_port'],
                 scaner_boundrate=file['settings']['scaner_boundrate'],
                 have_cassa=file['settings']['have_cassa']),
                              data=data)
             return 0
         except:
             self.err.append('There is exception')
             return -1
     else:
         return -1
Exemple #18
0
 def build_chart(self, data):
     chart = self.me()
     for attr in chart.get_attributes():
         val = self.get_attribute(attr.get_name()).value
         v = lib.string_utils.unquote(val)
         if v != "":
             attr.set_value(v)
     if not data.is_valid():
         logging.info('Invalid')
         data = Data.default()
     for item in data.as_list():
         chart.add_row(item.name, item.value)
     chart.set_range(data.min, data.max)
     d = Decorator(chart)
     d.add_background("ffffff")
     return d
Exemple #19
0
    def get(self):
        folk_id = self.get_argument("id", '')
        today = time.strftime("%Y-%m-%d", time.localtime())

        # 类型列表
        type_list = Data.get_all_data(folk_id, today)
        if not type_list:
            type_list = Type.get_all()
        else:
            # TODO 补充没有赋值的属性
            pass

        folk = Folk.get_folk(0, folk_id)
        self.echo('admin_daily.html', {
                'title': "数据录入",
                'objs': type_list,
                'folk': folk,
        }, layout='_layout_admin.html')
Exemple #20
0
def select_max_gain_node(data: Data):
    """
    choose max gain node from blocks, and maintain the balance constraint
    """
    block0_size, block0_max_gain = data.get_block_size(
        0), data.peek_block_max_gain(0)
    block1_size, block1_max_gain = data.get_block_size(
        1), data.peek_block_max_gain(1)

    if block0_size > block1_size or (block0_size == block1_size
                                     and block0_max_gain > block1_max_gain):
        return data.pop_block_max_gain(0)
    elif block0_size < block1_size or (block0_size == block1_size
                                       and block0_max_gain < block1_max_gain):
        return data.pop_block_max_gain(1)
    else:  # break tie
        return data.pop_block_max_gain(random.choice([0, 1]))
Exemple #21
0
    def calculate_for(self, index, x, y):
        """
        Calculate the values for the point
        """
        origin = Point(x, y, self.star.line.width)
        neighbors = []
        for point in points.neighbors(origin, self.width, self.height):
            value = self.raw_data[point.y][point.x]
            if isinstance(value, numpy.ndarray):
                neighbors.append(mean(value))
            else:
                neighbors.append(value)

        min_value = min(neighbors)
        max_value = max(neighbors)
        average_value = mean(neighbors)
        median_value = median(neighbors)

        return Data(index, float(average_value), float(median_value),
                    float(min_value), float(max_value), origin)
Exemple #22
0
 def write_into_db(self, data_instance):
     # write data into mongodb
     package = Data.pack(data_instance)
     print("IOData.write(): ", package)
     result = data_instance.to_json()
     s = ""
     for r in result:
         v = result[r]
         if isinstance(v, float):
             v = round(v, 2)
         if s == "":
             s = str(v)
         else:
             s += ',' + str(v)
     try:
         self.write_t("write", "Writing ", s)
         iobase.mongo.data.insert(result)
         self.write_t("write", "Done")
     except Exception as e:
         print(e)
         self.write_t("write", "Write failed.")
Exemple #23
0
    def get_data_info(self, tables, type_id, dev_id, start_time, end_time):
        data_list = []
        for table in tables:
            data_list.extend(
                Data.find_by(
                    'where device_id = ? and type_id = ? and created_at between ? and ?',
                    dev_id,
                    type_id,
                    start_time,
                    end_time,
                    sub_name=str(table.index)))

        dev = Device.get(dev_id)
        data_info = DataParser.get_instance().get_data_type(
            dev.dev_type, type_id)

        if data_info['duration'] == 0:
            return self.deal_for_single(data_list, data_info)

        if data_info['duration'] > 0:
            return self.deal_for_accumulate(data_list, data_info)
Exemple #24
0
    def __init__(self):
        self.model = Data()

        self.application = tkinter.Tk()
        self.application.title("Osztályozó")

        self.insert_after = tkinter.Button(
            self.application,
            text='insert new task after',
            command=self.insert_task_after_selected)
        self.insert_after.pack()

        self.task_hierarchy = tkinter.ttk.Treeview(self.application,
                                                   columns=('weight', ))
        self.task_hierarchy.heading('weight', text='Weight')
        self.edit_popup = None
        self.display_task(self.model.task)
        self.task_hierarchy.bind(
            '<<TreeviewSelect>>',
            lambda args: print(self.task_hierarchy.selection()))
        self.task_hierarchy.bind("<Button-3>", self.engage_edit)
        self.task_hierarchy.pack()

        self.application.mainloop()
Exemple #25
0
from model.data import Data
from model.course import Course
from model.event import Event
from model.event_time import EventTime
import discord
import configparser
import time

data = Data()

config = configparser.ConfigParser()
config.read('config.ini')

TOKEN = config['SECRETS']['client_id']

client = discord.Client()

commands = {
    "!help": "display available commands",
    "!info": "display user information",
    "!register": "add yourself as a user",
    "!unregister": "remove yourself as a user",
    "!addcourse": "add a course to your schedule",
    "!removecourse": "remove a course from your schedule",
    "!clearcourses": "remove all courses from your schedule",
    "!addevent": "add an event to your schedule",
    "!removeevent": "remove an event from your schedule",
    "!clearevents": "remove all events from your schedule",
    "!clearschedule": "remove all courses and events from your schedule",
    "!free": "display who's free at the current time"
}

def parseDate(dateStr):
    """ Get date from YYYY/MM/DD string """
    year, month, day = dateStr.split('/')
    return dt.datetime(year=int(year), month=int(month), day=int(day))


if __name__ == "__main__":

    n = len(sys.argv)

    fromDate = parseDate(sys.argv[1]) if n > 1 else None
    toDate = parseDate(sys.argv[2]) if n > 2 else None

    data = Data(dataFolder=getDataFile(), fromDate=fromDate, toDate=toDate)
    reservations = data.getReservations()

    def _filterReservation(r):
        isCEM = (r.isNationalFacility() and r.getCemCode() == 'cem00262')
        return r.resource.get() in MICROSCOPES and isCEM

    reservations = filter(_filterReservation, reservations)
    stats = {
        TITAN: {
            'cem': 0,
            'fac': 0,
            'sll': 0,
            'dbb': 0,
            'mmk': 0
        },
Exemple #27
0
parser.add_argument('--phase', dest='phase', default='train', help='train, test')
parser.add_argument('--save_freq', dest='save_freq', type=int, default=5, help='save a model every save_freq iterations')
parser.add_argument('--print_freq', dest='print_freq', type=int, default=100, help='print the debug information every print_freq iterations')
parser.add_argument('--continue_train', dest='continue_train', type=bool, default=False, help='if continue training, load the latest model: 1: true, 0: false')
parser.add_argument('--checkpoint_dir', dest='checkpoint_dir', default='./checkpoint', help='models are saved here')
parser.add_argument('--sample_dir', dest='sample_dir', default='./sample', help='sample are saved here')
parser.add_argument('--test_dir', dest='test_dir', default='./test', help='test sample are saved here')
parser.add_argument('--L1_lambda', dest='L1_lambda', type=float, default=10.0, help='weight on L1 term in objective')
parser.add_argument('--use_resnet', dest='use_resnet', type=bool, default=True, help='generation network using reidule block')
parser.add_argument('--use_lsgan', dest='use_lsgan', type=bool, default=True, help='gan loss defined in lsgan')
parser.add_argument('--max_size', dest='max_size', type=int, default=50, help='max size of image pool, 0 means do not use image pool')
parser.add_argument('--is_training', dest='is_training', type=bool, default=False, help='is training or not')

args = parser.parse_args()

data=Data(args.img_size,is_train=args.is_training,test_img_path='./data/x_domain/Aaron_Eckhart_0001.jpg')
origin_img,_=data.next_batch(1)
xgan=XGAN(args,encoder,decoder,discriminator,data)
img,img1 = xgan.test('')
# show_img(data.next_batch(1)[0][0])

def uni(img):
    tmp = (img[0]+1)/2
    #tmp1=tmp-tmp.min()
    #tmp1=(tmp-tmp.min())/(tmp.max()-tmp.min())
    return tmp
show_img(origin_img[0])
show_img(img[0])
show_img(img1[0])

show_img(uni(origin_img))
Exemple #28
0
# Setup
import dash
from dash.dependencies import Input, Output, State
import dash_core_components as dcc
import dash_html_components as html
import dash_bootstrap_components as dbc
from config import config, about
from model.data import Data
from model.model import Model
from model.result import Result  # Read data

data = Data()
data.get_data()  # App Instance
print(config.root+'\\app\\assets')
app = dash.Dash(name=config.name, assets_folder=config.root+'\\app\\assets',
                external_stylesheets=[dbc.themes.LUX, config.fontawesome])
app.title = config.name  # Navbar
navbar = dbc.Nav(className="nav nav-pills", children=[
    # logo/home
    dbc.NavItem(html.Img(src=app.get_asset_url("logo.PNG"), height="40px")),
    # about
    dbc.NavItem(html.Div([
        dbc.NavLink("About", href="/", id="about-popover", active=False),
        dbc.Popover(id="about", is_open=False, target="about-popover", children=[
            dbc.PopoverHeader("How it works"), dbc.PopoverBody(about.txt)
        ])
    ])),
    # links
    dbc.DropdownMenu(label="Links", nav=True, children=[
        dbc.DropdownMenuItem([html.I(className="fa fa-linkedin"),
                              "  Contacts"], href=config.contacts, target="_blank"),
Exemple #29
0
 def post(self):
     json_object = Data.instance().to_json()
     json_object["is_high_frequency"] = Data.instance().high_frequency_record()
     json_object["high_frequency_interval"] = model.data.HIGH_FREQUENCY_INTERVAL
     json_string = json.dumps(json_object)
     self.write(json_string)
# -*- coding: utf-8 -*-

import os

from config import Config

from model.data import Data
from model.network import Network

if __name__ == '__main__':
    dir_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                            Config.CURRENT_MODEL_BASE_PATH)
    data = Data()
    data.load_data_from_file(os.path.join(dir_path,
                                          'data.nosync/all_data.npy'))
    model = Network(data, os.path.join(dir_path, 'log.nosync/network/run1'))
    model.train()
Exemple #31
0
 def __init__(self, debug=False):
     self.data = Data()
     self.debug = debug
Exemple #32
0
class Interpreter:
    def __init__(self, debug=False):
        self.data = Data()
        self.debug = debug

    def proceed(self, file):
        n = 0
        print('Запускаем интерпретатор')
        for line in file:
            n += 1
            if self.debug: print('=====Строка', n, '=====')
            line = line.strip()

            if line.startswith('#'):
                print(line.lstrip("#"))
                continue

            (key, sp, data) = line.partition(":")
            if self.debug: print(key, data)

            if key == "AVP":  # добавляем подэкран
                self.data.new_viewport()
                continue

            if key.startswith("ACS"):  # добавляем график свечек
                name = self._get_name(key)
                vp = int(key.partition(')')[2])
                self.data.new_candle(vp, name)
                continue

            if key.startswith("CS"):  # добавляем значения свечек
                name = self._get_name(key)
                split = data.split(';')
                time = datetime.strptime(split[0], "%Y.%m.%d %H:%M:%S")
                self.data.add_candle(name, time, float(split[1]),
                                     float(split[2]), float(split[3]),
                                     float(split[4]))
                continue

            if key.startswith("AG"):  # добавляем график линию
                name = self._get_name(key)
                vp = int(key.partition(')')[2])
                self.data.new_graph(vp, name)
                continue

            if key.startswith("AD"):  # добавляем график точки
                name = self._get_name(key)
                vp = int(key.partition(')')[2])
                options = self._get_options(data)

                self.data.new_dots(vp, name, options)
                continue

            if key.startswith("("):  # добавляем значения точек или линии
                name = self._get_name(key)
                split = data.split(';')
                time = datetime.strptime(split[0], "%Y.%m.%d %H:%M:%S")
                self.data.add_value(name, time, float(split[1]))
                continue

            print("Ошибка в строке", n, ":")
            print("  ", line)
            print('   Неизвестный ключ:', key)

        print('====================')
        print("Обработано", n, "строк")
        return self.data

    def _get_name(self, txt):
        return txt.partition("(")[2].partition(')')[0]

    def _get_options(self, txt):
        split = txt.split(' ')
        # print(split)
        options = dict()
        for item in split:
            (key, sp, data) = item.partition('=')
            options[key] = data.strip('"').strip("'")
        # print(options)
        return options
Exemple #33
0
def test_generator(app):

    testdata = [
        Data(name="Auto.test.place_{}",
             placeId=(''.join(choice(digits) for i in range(5))))
    ]

    file = config_file = os.path.join(
        os.path.dirname(os.path.abspath(__file__)), "../data/places.json")

    with open(file, "w") as out:
        jsonpickle.set_encoder_options("json", indent=2)
        out.write(jsonpickle.encode(testdata))

    testdata = [
        Data(email="sergey.verkhovodko+{}@synesis.ru".format(''.join(
            choice(digits) for i in range(2))),
             name="Auto.test.user_{}_{}",
             userId=(''.join(choice(digits) for i in range(5))),
             phone=(''.join(choice(digits) for i in range(15))),
             role=3)
    ]

    file = config_file = os.path.join(
        os.path.dirname(os.path.abspath(__file__)), "../data/mailcheck.json")

    with open(file, "w") as out:
        jsonpickle.set_encoder_options("json", indent=2)
        out.write(jsonpickle.encode(testdata))

    testdata = [
        Data(email="AutoTestUser_{}_{}@ki.ki",
             name="Auto.test.user_{}_{}",
             userId=(''.join(choice(digits) for i in range(5))),
             phone=(''.join(choice(digits) for i in range(15))),
             role=3),
        Data(email="AutoTestUser_{}_{}@ki.ki",
             name="Auto.test.user_{}_{}",
             userId=(''.join(choice(digits) for i in range(5))),
             phone=(''.join(choice(digits) for i in range(15))),
             role=2),
        Data(email="AutoTestUser_{}_{}@ki.ki",
             name="Auto.test.user_{}_{}",
             userId=(''.join(choice(digits) for i in range(5))),
             phone=(''.join(choice(digits) for i in range(15))),
             role=1),
        Data(email="AutoTestUser_{}_{}@ki.ki",
             name="Auto.test.user_{}_{}",
             userId=(''.join(choice(digits) for i in range(5))),
             phone=(''.join(choice(digits) for i in range(15))),
             role=0)
    ]

    file = config_file = os.path.join(
        os.path.dirname(os.path.abspath(__file__)),
        "../data/allusersdata.json")

    with open(file, "w") as out:
        jsonpickle.set_encoder_options("json", indent=2)
        out.write(jsonpickle.encode(testdata))

    testdata = [
        Data(email="AutoTestUser_{}_{}@ki.ki",
             name="Auto.test.user_{}_{}",
             userId=(''.join(choice(digits) for i in range(5))),
             phone=(''.join(choice(digits) for i in range(15))),
             role=0)
    ]

    file = config_file = os.path.join(
        os.path.dirname(os.path.abspath(__file__)), "../data/fordel.json")

    with open(file, "w") as out:
        jsonpickle.set_encoder_options("json", indent=2)
        out.write(jsonpickle.encode(testdata))

    testdata = [
        Data(email="AutoTestUser_{}_{}@ki.ki",
             name="Auto.test.user_{}_{}",
             userId=(''.join(choice(digits) for i in range(5))),
             phone=(''.join(choice(digits) for i in range(15))),
             role=2),
        Data(email="AutoTestUser_{}_{}@ki.ki",
             name="Auto.test.user_{}_{}",
             userId=(''.join(choice(digits) for i in range(5))),
             phone=(''.join(choice(digits) for i in range(15))),
             role=1),
        Data(email="AutoTestUser_{}_{}@ki.ki",
             name="Auto.test.user_{}_{}",
             userId=(''.join(choice(digits) for i in range(5))),
             phone=(''.join(choice(digits) for i in range(15))),
             role=0)
    ]

    file = config_file = os.path.join(
        os.path.dirname(os.path.abspath(__file__)), "../data/statiosmoke.json")

    with open(file, "w") as out:
        jsonpickle.set_encoder_options("json", indent=2)
        out.write(jsonpickle.encode(testdata))
Exemple #34
0
def calculate_gains(circuit: Circuit, data: Data):
    """
    calculate the gain for each node
    """
    for cell in circuit.cells:
        data.reset_node_gain(cell)
        F = data.get_node_block_id(cell)  # from block id
        T = (F + 1) % 2  # to block id
        for net in cell.nets:
            if data.get_net_distribution(net, F) == 1:
                data.inc_node_gain(cell)
            if data.get_net_distribution(net, T) == 0:
                data.dec_node_gain(cell)
        data.unlock_node(cell, F)
Exemple #35
0
    def read(self):
        t = int(time.time())
        lt = time.localtime(t)
        if t == IOStream.timestamp:
            print("IOData.read() was broken, cause the time has been used.")
            return

        # print(lt.tm_sec, IOData.timestamp, t)
        IOStream.timestamp = t

        master = modbus.ModbusMaster.instance()
        values = master.read()
        if values:
            log.Log.instance().d("modbus" + str(values))
        else:
            # no value read from modbus
            log.Log.instance().d("modbus: None")
            self.write("Read modbus: None")
            return

        d = Data.instance()
        if values:
            d.timestamp = t
            d.pressure = DataDefines.round('pressure', values[0])
            d.water_level = DataDefines.round('water_level', values[1])
            d.ac_flow = values[2]
            d.in_flow = DataDefines.round('in_flow', values[3])
            d.v1 = values[4]
            d.v2 = values[5]
            d.v3 = values[6]
            d.c1 = DataDefines.round('c1', values[7])
            d.c2 = DataDefines.round('c2', values[8])
            d.c3 = DataDefines.round('c3', values[9])
            d.power_con = values[10]
            d.reactive_power = values[11]
            d.power_factor = DataDefines.round('power_factor', values[12])
            d.frequency = values[13]
            d.energy = DataDefines.round('energy', values[14])
            d.set_on_off(t, values[15])

        if d.high_frequency_record(
        ):  # high speed write into database every second
            if not self._hfr:
                self._hfr = True
                self._hfr_time = int(time.time())
                self._hfr_count = 0
                print("Start high frequency record...")
                self.write("---------------------------------")
                self.write("-> Start high frequency record...")
                self.write("---------------------------------")
            self._hfr_count += 1
            tornado.ioloop.IOLoop.instance().add_callback(
                self.write_into_db, d)
        else:
            if self._hfr:
                self._hfr = False
                print("End high frequency record...",
                      int(time.time()) - self._hfr_time, self._hfr_count)
                self.write("---------------------------------")
                self.write("-> End high frequency record...",
                           int(time.time()) - self._hfr_time, self._hfr_count)
                self.write("---------------------------------")
            if lt.tm_sec == 0:  # low speed write into database every minute
                tornado.ioloop.IOLoop.instance().add_callback(
                    self.write_into_db, d)
Exemple #36
0
def is_cut(net: Net, data: Data) -> bool:
    """
    :return: True if the given net is a cut, otherwiese False
    """
    return (data.get_net_distribution(net, 0) >
            0) and (data.get_net_distribution(net, 1) > 0)