Пример #1
0
    def __init__(self, *args, vav_nums=1, **kwargs):
        super(VavDevice, self).__init__(*args, **kwargs)
        for _ in range(0, vav_nums):
            vav_id = ':vav-{0}'.format(gen_uuid())
            zone_id = ':zone-{0}'.format(gen_uuid())
            self._register_vav_objs(vav_id, zone_id)

        self.add_capability(SemanticQueryServices)
Пример #2
0
    def test_db_output_create(self, init_models, init_sites, init_buildings):
        output_db = OutputDB()
        # check that database is empty
        result = output_db.get_all()
        assert list(result) == []

        #get service and model id
        models = list(ModelDB().get_all())
        model_id, service_id = models[0].id, models[0].service_id

        # create an item
        output = OutputEvent(service_id, model_id, id=str(gen_uuid()))
        new_output_id_evt = output_db.create(output)
        assert new_output_id_evt is not None

        # check that database is not empty now
        result = output_db.get_all()
        outputs = list(result)
        assert len(outputs) == 1
        assert outputs[0].module_id == service_id
        assert outputs[0].model_id == model_id

        #test with time series
        output = OutputTimeSeries(service_id,
                                  model_id,
                                  init_buildings[0],
                                  ValuesDescription('Temperature',
                                                    'DegreeCelsius', 20),
                                  id=str(gen_uuid()))
        new_output_id_ts = output_db.create(output)
        assert new_output_id_ts is not None

        result = output_db.get_all()
        outputs = list(result)
        assert len(outputs) == 2
        output_event = outputs[0] if isinstance(
            outputs[0], OutputTimeSeries) else outputs[1]
        assert output_event.module_id == service_id
        assert output_event.model_id == model_id
        assert output_event.localization == output.localization
        assert output_event.values_desc.unit == output.values_desc.unit
        assert output_event.values_desc.kind == output.values_desc.kind
        assert output_event.values_desc.sampling == output.values_desc.sampling

        #test relation between service and output
        model = ModelDB().get_by_id(model_id)
        assert set(model.event_output_ids) == {new_output_id_evt}
        assert set(model.timeseries_output_ids) == {new_output_id_ts}
Пример #3
0
    def predict_dep(self, target_srcids=[]):
        pred_g = self.new_graph(empty=True)
        occs = self.get_occs()
        for occ in occs:
            if self.target_building == 'ebu3b':
                srcid = occ.split('#')[-1]
                qstr = """
                select ?point where {{
                  ?occ bf:srcid "{0}" .
                  ?occ bf:isPointOf ?something .
                  ?point bf:isPointOf ?something .
                  ?point a/rdfs:subClassOf* brick:point .
                }}
                """.format(srcid)
            else:
                qstr = """
                select ?point where {{
                  {0} bf:isPointOf ?something .
                  ?point bf:isPointOf ?something .
                  ?point a/rdfs:subClassOf* brick:point .
                }}
                """.format(occ.n3())
            res = query_sparql(self.true_g, qstr)
            points = [row['point'] for row in res]
            random_obj = create_uri(str(gen_uuid()))  # This would be a VAV.
            for point in points:
                insert_triple(pred_g, (point, BF['isPointOf'], random_obj))
                insert_triple(pred_g, (random_obj, RDF['type'], BRICK['VAV']))

        pred_g.serialize('test.ttl', format='turtle')
        self.pred_g = pred_g
        print('Quiver done')
        return pred_g
Пример #4
0
 def predict_raw(self, target_srcids=[]):
     pred_g = self.new_graph(empty=True)
     if self.target_building == 'ebu3b':
         qstr = """
         select ?occ ?occ_srcid ?point ?point_srcid where {
             ?occ a brick:occupied_command.
             ?occ bf:srcid ?occ_srcid .
             ?occ bf:isPointOf ?something .
             ?point bf:isPointOf ?something .
             ?point bf:srcid ?point_srcid .
             ?occ bf:isPointOf ?something .
         }
         """
     else:
         raise Exception('qstr should be rewritten for {0}'.format(
             self.target_building))
     # TODO: Add confidences (==1)
     res = query_sparql(self.true_g, qstr)
     vav_dict = {}
     #for
     #    random_obj = create_uri(str(gen_uuid())) # This would be a VAV.
     for row in res:
         occ_srcid = str(row['occ_srcid'])
         if occ_srcid not in vav_dict:
             vav_dict[occ_srcid] = create_uri(str(
                 gen_uuid()))  # This would be a VAV.
         vav = vav_dict[occ_srcid]
         occ = create_uri(occ_srcid)
         point = create_uri(str(row['point_srcid']))
         insert_triple(pred_g, (point, BF['isPointOf'], vav))
         insert_triple(pred_g, (occ, BF['isPointOf'], vav))
         insert_triple(pred_g, (vav, RDF['type'], BRICK['vav']))
     self.pred_g = pred_g
     return pred_g
Пример #5
0
    def __init__(self,
                 sparql_url,
                 brick_version,
                 base_ns='',
                 load_schema=True):
        BRICK_VERSION = brick_version
        self.sparql_url = sparql_url
        self.sparql = SPARQLWrapper(endpoint=self.sparql_url,
                                    updateEndpoint=self.sparql_url + '-auth')
        self.sparql.queryType = SELECT
        self.sparql.setCredentials('dba', 'dba')
        self.sparql.setHTTPAuth(DIGEST)
        if not base_ns:
            base_ns = 'http://example.com/'
        self.base_graph = 'urn:' + str(gen_uuid())
        self.sparql.addDefaultGraph(self.base_graph)
        self.namespaces = {
            '': BASE,
            'brick': BRICK,
            'bf': BF,
            'rdfs': RDFS,
            'rdf': RDF,
            'owl': OWL,
            'foaf': FOAF
        }
        sparql_prefix = ''
        #for prefix, ns in self.namespaces.items():
        #    ns_n3 = ns.uri.n3()
        #    sparql_prefix += 'prefix {0}: {1}\n'.format(prefix, ns_n3)
        #sparql_prefix += '\n'

        self._init_brick_constants()
        if load_schema:
            self.load_schema()
Пример #6
0
 def __init__(self, uuid=None):
     # Set the type to the name of the class.
     self.__dict__['type'] = self.__class__.__name__.lower()
     # If there is a uuid, instantiate it.
     if uuid:
         self.get(uuid)
     # Otherwise, generate a uuid - our ojbect is new.
     else:
         self._id = gen_uuid().__str__()
Пример #7
0
 def _get_verbosity(kwargs):
     """Private static helper method to get the verbosity settings from **kwargs."""
     verbose = kwargs.get('verbose', False)
     verbose_period = kwargs.get('verbose_period', 1)
     log = kwargs.get('log', None)
     if log == 'unique':
         uuid = gen_uuid().hex[:12]
         log = logger.easy_setup(uuid, console_output=True, filename="log_{}.txt".format(uuid))
     if log is None:
         log = LOCAL_LOG
     return verbose, verbose_period, log
Пример #8
0
    def test_db_output_update(self, init_models, init_buildings):
        building_ids, _ = init_buildings

        models = list(ModelDB().get_all())
        model_id, service_id = models[0].id, models[0].service_id

        output_db = OutputDB()
        output = OutputTimeSeries(
            service_id, model_id, building_ids[0],
            ValuesDescription('Temperature', 'DegreeCelsius', 20),
            id=str(gen_uuid()))
        new_output_id_ts = output_db.create(output)

        # get all items
        result = output_db.get_all()
        outputs = list(result)
        assert len(outputs) == 1
        assert [item.id for item in outputs] == [new_output_id_ts]

        # get an item by its ID!
        # for a output, the ID to be used is the URL
        output = output_db.get_by_id(outputs[0].id)

        # update item data
        new_sampling = 400
        new_model_id = models[1].id
        new_kind = 'Energy'
        new_unit = 'Joule'
        output.sampling = new_sampling
        output.model_id = new_model_id
        output.values_desc.kind = new_kind
        output.values_desc.unit = new_unit
        output_db.update(output.id, output)

        # check that item has really been updated in database
        updated_output = output_db.get_by_id(output.id)
        assert updated_output.id == output.id
        assert updated_output.model_id == new_model_id
        assert updated_output.module_id == output.module_id
        assert updated_output.localization == output.localization
        assert updated_output.values_desc.unit == new_unit
        assert updated_output.values_desc.kind == new_kind
        assert updated_output.values_desc.sampling == (
            output.values_desc.sampling)

        # delete an item by its ID
        output_db.remove(output.id)

        # get an item by its ID
        with pytest.raises(ItemNotFoundError):
            # it has been removed...
            output_db.get_by_id(output.id)
Пример #9
0
 async def post(self,
                request: Request,
                create_entities: CreateEntitiesRequest = Body(..., description='A dictionary to describe entities to create. Keys are Brick Classes and values are the number of instances to create for the Class'),
                graph: str = Query(configs['brick']['base_graph'], description=graph_desc),
                token: HTTPAuthorizationCredentials = jwt_security_scheme,
                ) -> EntitiesCreateResponse:
     resp = defaultdict(list)
     for brick_type, entities_num in create_entities.items():
         for _ in range(entities_num):
             uri = UUID[str(gen_uuid())]
             await self.brick_db.add_triple(uri, RDF.type, URIRef(brick_type))
             # TODO: Check the brick_type based on the parameter in the future
             resp[brick_type].append(str(uri))
     return dict(resp)
Пример #10
0
 def add_entities_json(self, entities):
     for entity in entities:
         entity_type = entity['type']
         entity_id = entity.get('entity_id', None)
         if not entity_id:
             entity_id = str(gen_uuid())
             entity['entity_id'] = entity_id
         self.db.add_brick_instance(entity_id, entity_type)
         for prop, obj in entity['relationships']:
             self.db.add_triple(':' + entity_id, prop, ':' + obj)
         name = entity.get('name', None)
         if name:
             self.db.add_triple(':' + entity_id, 'bf:hasName', name)
     return entities
Пример #11
0
    def add_to_db_and_exec(code, svars):
        uuid = str(gen_uuid())
        path = CodeExec.add_to_db(uuid, svars)

        try:
            with open(path, "w") as f:
                f.write(code)

        except IOError:
            return "Something went wrong"

        RamsimRunner.handle_request(uuid)

        return uuid
Пример #12
0
 def add_reminder(self, hour: int, minute: int, weekday: int,
                  concept: int) -> Union[str, NoReturn]:
     log("ReminderData: Adding reminder...")
     if 0 < hour < 25 and -1 < minute < 61 and 0 < weekday < 8:
         str_time = str(hour) + ":" + str(minute) + ":00"
         r_time = datetime.strptime(str_time, '%H:%M:%S').time()
         r_id = str(gen_uuid())
         reminder = (r_time, weekday, concept, r_id)
         log("ReminderData: Created reminder " + r_id)
         self._reminders.append(reminder)
         log(self._reminders)
         self._db_reminders[r_id] = reminder
         self._sort()
         log("ReminderData: Executing add callbacks")
         for f in self._add_callbacks.values():
             f(r_id)
         return r_id
Пример #13
0
 async def add_entities_json_deprecated(self, entities):
     #TODO: IMplement this:
     raise HTTPException(status_code=501)
     for entity in entities:
         entity_type = entity['type']
         entity_id = entity.get('entity_id', None)
         if not entity_id:
             entity_id = str(gen_uuid())
             entity['entity_id'] = entity_id
         entity_id = URIRef(entity_id)
         self.brick_db.add_brick_instance(entity_id, entity_type)
         for prop, obj in entity['relationships']:
             obj = URIRef(obj)
             self.brick_db.add_triple(entity_id, prop, obj)
         name = entity.get('name', None)
         if name:
             self.brick_db.add_triple(entity_id, 'bf:hasName', name)
     return entities
Пример #14
0
    async def _bulk_upsert_data(self, data, col_name):
        temp_table = '_temp_{0}'.format(gen_uuid().hex)
        async with self.pool.acquire() as conn:
            await conn.execute("""
CREATE TEMPORARY TABLE {temp_table} (
uuid TEXT, time TIMESTAMP, {col_name} {data_type})
            """.format(col_name=col_name,
                       temp_table=temp_table,
                       data_type=self.column_type_map[col_name]))
            await conn.copy_records_to_table(temp_table, records=data)
            res = await conn.execute("""
INSERT INTO {target_table} (uuid, time, {col_name})
SELECT * FROM {temp_table}
ON CONFLICT (time, uuid)
DO UPDATE SET {col_name}=EXCLUDED.{col_name}
WHERE {target_table}.{col_name} <> EXCLUDED.{col_name};
DROP TABLE {temp_table};
            """.format(target_table=self.TABLE_NAME,
                       temp_table=temp_table,
                       col_name=col_name))
Пример #15
0
 def create_entity(self, entity_type):
     return str(gen_uuid())
Пример #16
0
def process_file(file_path, base_sandbox_path, final_path, id):
    filename = os.path.basename(file_path)
    status = {"error":False, "state":"started", "result":None, "file_to_remove":file_path, \
              "exception":None, "sandbox_id":id, "filename":filename, "file_to_mail":None, \
              "file_list":[], "file_info_str":None, "json_failed":False}
    try:
        filename = os.path.basename(file_path)
        if not filename.endswith(".zip"):
            logging.warning(
                "Random shit received: {}, ignoring!".format(filename))
            status["state"] = "finished"
            status["error"] = True
            status["result"] = "random_nonzip_shit"
            os.remove(file_path)
            status["file_to_remove"] = None
            raise ValueError("Got random shit")
        status["state"] = "processing_magic"
        logging.info("{}: processing {}".format(id, filename))
        with open(file_path) as f:
            type = magic.from_buffer(f.read(1024))
        logging.info("{}: '{}' file".format(id, type))
        status["state"] = "checking_magic"
        if not type.startswith("Zip archive") or not is_zipfile(file_path):
            logging.warning(
                "Random .zip-imitating shit received: {}, ignoring!".format(
                    filename))
            status["state"] = "finished"
            status["error"] = True
            status["result"] = "random_fakezip_shit"
            os.remove(file_path)
            status["file_to_remove"] = None
            raise ValueError("Got random shit pretending to be ZIP")
        #TODO: check file size
        status["state"] = "moving_into_sandbox_base"
        os.rename(file_path, os.path.join(base_sandbox_path, filename))
        status["file_to_remove"] = os.path.join(base_sandbox_path, filename)
        logging.info("{}: Moved file to the base sandbox folder".format(id))
        sandbox_dir = os.path.join(base_sandbox_path, str(id))
        logging.info("Cleaning sandbox dir: {}".format(sandbox_dir))
        status["state"] = "cleaning_sandbox_base"
        clean_dir(sandbox_dir)
        sandbox_base_path = os.path.join(base_sandbox_path, filename)
        sandboxed_file_path = os.path.join(sandbox_dir, filename)
        status["state"] = "moving_into_sandbox"
        shutil.move(sandbox_base_path, sandboxed_file_path)
        status["file_to_remove"] = sandboxed_file_path
        logging.info("{}: Moved the file into the sandbox".format(id))
        status["state"] = "extracting_into_sandbox"
        with ZipFile(sandboxed_file_path, 'r') as zf:
            status["file_list"] = list(zf.namelist())
            status["file_info_to_str"] = dump_zipinfos_to_str(zf.infolist())
            zf.extractall(sandbox_dir)
        status["state"] = "removing_original"
        logging.info("{}: Removing original file: {}".format(
            id, sandboxed_file_path))
        os.remove(sandboxed_file_path)
        status["file_to_remove"] = None
        status["state"] = "generating_dest_filename"
        # filename, maybe extension
        fme = filename.rsplit('.', 1)
        if len(fme) == 1:
            logging.warning("lol wtf {} has no extension?".format(filename))
            result_path = os.path.join(final_path,
                                       "{}-{}".format(filename, gen_uuid()))
        elif len(fme) == 2:
            # Expected result
            result_path = os.path.join(
                final_path, "{}-{}.{}".format(fme[0], gen_uuid(), fme[-1]))
        else:
            logging.warning(
                "lol wtf len({}.rsplit('.', 1)) != 2 ?".format(filename))
            result_path = os.path.join(final_path,
                                       "{}-{}".format(filename, gen_uuid()))
        status["state"] = "packing_files"
        with ZipFile(result_path, 'w', ZIP_DEFLATED) as zf:
            for fn in status["file_list"]:
                zf.write(os.path.join(sandbox_dir, fn), fn)
        status["file_to_mail"] = result_path
        status["state"] = "success"
    except Exception as e:
        logging.exception("Failure during archive processing!")
        status["error"] = True
        status["exception"] = [
            traceback.format_exc(),
            {k: str(v)
             for k, v in inspect.trace()[-1][0].f_locals.items()}
        ]
        if status["file_to_remove"]:
            try:
                os.remove(status["file_to_remove"])
            except:
                logging.exception("Failure during file removal!")
                status["exception"].append([traceback.format_exc()])
    heading = "ZeroPhone bugreport upload fail" if status.get(
        "error", False) else "ZeroPhone bugreport uploaded"
    files = [status["file_to_mail"]] if status.get("file_to_mail",
                                                   None) else []
    for key in status.keys():
        if status[key] is None:
            status[key] = "None"
    logging.info(status)
    try:
        text = json.dumps(status)
    except:
        logging.exception("Status-to-JSON conversion failed!")
        status["json_failed"] = True
        text = str(status)
    try:
        sendMail(config["mail_destination"], 'ZeroPhone bugreport <*****@*****.**>', \
                 heading, text, files, server=config.get('mail_server', None))
    except Exception as e:
        try:
            status["exception"].append([traceback.format_exc()])
            status["exception"].append([{
                k: str(v)
                for k, v in inspect.trace()[-1][0].f_locals.items()
            }])
            logging.exception(status)
        except:
            logging.exception("wtf")
    return id
Пример #17
0
def uuid4():
    """
    Method to generate uuid as primary key for student and student_class
    :return: string as uuid
    """
    return str(gen_uuid())
Пример #18
0
 def register_remove_callback(self, f: callable) -> str:
     log("ReminderData: Registering new remove callback")
     identifier = str(gen_uuid())
     self._remove_callbacks[identifier] = f
     log("ReminderData: Registered " + identifier)
     return identifier
Пример #19
0
            if isinstance(val, float):
                if np.isnan(val):
                    continue
            if tag in h_tags:
                if val == 'M':
                    val = 'm:'
                datum[tag] = val
        data.append(datum)


# Read rows to intantiate Brick
g = Graph() # init graph
entity_dict = defaultdict(list) # To validate if all entities are intantiated.
for row in data:
    entity_h_tags = set()
    identifier = gen_uuid()
    raw_ref_dict = []
    for (tag, value) in row.items():
        if tag == 'id':
            identifier = quote_plus(value)
        elif value in ['Marker' , 'm:']:
            entity_h_tags.add(tag)
        elif 'Ref' in tag:
            ref_type = tag[:-3]
            ref_id = quote_plus(value[1:]) # remove '@'
            entity_dict[ref_type].append(ref_id)
            raw_ref_dict.append((ref_type, ref_id)) 

    # Determine is-a relationship from the tag set.
    if identifier == '%401d552c40-54c9904c+%22AHU+03+Supply+Air+Pressure+Filter+DP%22':
        pdb.set_trace()
Пример #20
0
def generate_uuid():
    uuid = str(gen_uuid())
    config["uuid"] = uuid
    save_config(config)
    PrettyPrinter("Generated new UUID!", i, o, 3)
Пример #21
0
def uuid4():
    return str(gen_uuid())