def configure(self, env): import params env.set_params(params) if not (params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >=0): install_tez_jars() hive(name='hiveserver2')
def configure(self, env): import params env.set_params(params) hive(name='client') service_packagedir = os.path.realpath(__file__).split('/scripts')[0] cmd= format("cp -rf {service_packagedir}/scripts/hive-hbase-handler-1.2.1.jar /usr/hdp/current/hive-client/lib/") Execute('echo "Running ' + cmd + '" as root') Execute(cmd, ignore_failures=True) cmd= format("ln -sf /usr/hdp/current/hive-client/lib/hive-hbase-handler-1.2.1.jar hive-hbase-handler.jar") Execute('echo "Running ' + cmd + '" as root') Execute(cmd, ignore_failures=True)
def on_entity_destroyed(self, entity): parent = entity.get_parent() nodepath = parent.find("+BulletRigidBodyNode") self._world.remove_rigid_body(nodepath.node()) def build_physics_manager(cls, i, ex, args): i.tick_rate = hive.property(cls, "tick_rate", 'int') i.pull_tick_rate = hive.pull_in(i.tick_rate) ex.tick_rate = hive.antenna(i.pull_tick_rate) i.do_update = hive.triggerfunc(cls.update) hive.trigger(i.do_update, i.pull_tick_rate, pretrigger=True) i.on_tick = hive.triggerable(i.do_update) ex.tick = hive.entry(i.on_tick) ex.on_entity_destroyed = hive.plugin(cls.on_entity_destroyed, "entity.on_destroyed", policy=hive.SingleRequired) ex.on_entity_created = hive.plugin(cls.on_entity_created, "entity.on_created", policy=hive.SingleRequired) ex.get_angular_velocity = hive.plugin(cls.get_angular_velocity, "entity.angular_velocity.get", export_to_parent=True) ex.set_angular_velocity = hive.plugin(cls.set_angular_velocity, "entity.angular_velocity.set", export_to_parent=True) ex.get_linear_velocity = hive.plugin(cls.get_linear_velocity, "entity.angular_velocity.get", export_to_parent=True) ex.set_linear_velocity = hive.plugin(cls.set_linear_velocity, "entity.angular_velocity.set", export_to_parent=True) PhysicsManager = hive.hive("PhysicsManager", build_physics_manager, builder_cls=_PhysicsManagerClass)
import hive def build_encode(i, ex, args): """Encode a string into bytes""" args.encoding = hive.parameter('str', 'utf-8') ex.encoding = hive.variable('str', args.encoding) i.string = hive.variable("str") i.pull_string = hive.pull_in(i.string) ex.string = hive.antenna(i.pull_string) i.bytes_ = hive.variable('bytes') i.pull_bytes_ = hive.pull_out(i.bytes_) ex.bytes_ = hive.output(i.pull_bytes_) def do_encoding(self): self._bytes_ = self._string.encode(self.encoding) i.do_encoding = hive.modifier(do_encoding) hive.trigger(i.pull_bytes_, i.pull_string, pretrigger=True) hive.trigger(i.pull_string, i.do_encoding) Encode = hive.hive("Encode", build_encode)
def configure(self, env): import params env.set_params(params) install_hive() hive(name='client')
def print_house(self): print("Found House for", self.name, self.get_house()) def set_get_house(self, get_house_func): self.get_house = get_house_func print("SET FUNC", get_house_func, self.name, self) def build_dog(cls, i, ex, args): i.print_house = hive.triggerable(cls.print_house) ex.print_house = hive.entry(i.print_house) ex.some_socket = hive.socket(cls.set_get_house, identifier="get.house", data_type="float") DogHive = hive.hive("DogHive", build_dog, Dog) def declare_filler(meta_args): meta_args.i = hive.parameter("int", 2) def build_filler(i, ex, args, meta_args): print("NEW FILLER", meta_args.i) if meta_args.i: i.inner = FillerHive(meta_args.i - 1, import_namespace=True) ex.inner = hive.hook(i.inner) else: i.inner = DogHive(import_namespace=True, name="DOGGY") ex.inner = hive.hook(i.inner)
from math import sqrt import hive def normalise_modifier(self): x, y, z = self._vector length = sqrt(x**2 + y**2 + z**2) self._result = (x / length, y / length, z / length) def build_normalise(i, ex, args): """Find the unit vector for a given vector""" i.vector = hive.variable("vector") i.pull_vector = hive.pull_in(i.vector) ex.vector = hive.antenna(i.pull_vector) i.result = hive.variable("vector") i.pull_result = hive.pull_out(i.result) ex.result = hive.output(i.pull_result) i.calculate = hive.modifier(normalise_modifier) hive.trigger(i.pull_result, i.calculate, pretrigger=True) Normalise = hive.hive("Normalise", build_normalise)
def start(self, env): self.configure(env) hive(action='start',service='hive-server2')
import hive from .event import EventHandler class OnStopClass: def __init__(self): self._hive = hive.get_run_hive() def set_add_handler(self, add_handler): callback = self._hive._on_stop handler = EventHandler(callback, ("stop", ), mode='match') add_handler(handler) def build_on_stop(cls, i, ex, args): """Listen for quit event""" ex.get_add_handler = hive.socket(cls.set_add_handler, "event.add_handler") i.on_stop = hive.triggerfunc() ex.on_stop = hive.hook(i.on_stop) OnStop = hive.hive("OnStop", build_on_stop, builder_cls=OnStopClass)
import hive def build_zip(i, ex, args): """Merge two iterables into a single iterable""" i.iterable_a = hive.attribute() i.iterable_b = hive.attribute() i.pull_a = hive.pull_in(i.iterable_a) i.pull_b = hive.pull_in(i.iterable_b) ex.a = hive.antenna(i.pull_a) ex.b = hive.antenna(i.pull_b) i.result = hive.attribute("iterator") i.pull_result = hive.pull_out(i.result) ex.result = hive.output(i.pull_result) def do_zip(self): self._result = zip(self._iterable_a, self._iterable_b) i.do_zip = hive.modifier(do_zip) hive.trigger(i.pull_result, i.pull_a, pretrigger=True) hive.trigger(i.pull_a, i.pull_b) hive.trigger(i.pull_b, i.do_zip) Zip = hive.hive("Zip", build_zip)
def configure(self, env): import params env.set_params(params) if not params.stack_is_hdp22_or_further: install_tez_jars() hive(name='hiveserver2')
def configure(self, env): import params env.set_params(params) hive(name="client")
def status(self, env): hive(action='status',service='hive-server2')
def stop(self, env): hive(action='stop',service='hive-server2')
def configure(self, env): import params env.set_params(params) hive(name='metastore')
import hive def do_while(self): while True: self.condition_in() if not self.condition: break self.trig_out() def build_while(i, ex, args): """Trigger output while condition is True""" ex.condition = hive.attribute() i.condition_in = hive.pull_in(ex.condition) ex.condition_in = hive.antenna(i.condition_in) i.trig = hive.triggerfunc() ex.trig_out = hive.hook(i.trig) i.trig_in = hive.modifier(do_while) ex.trig_in = hive.entry(i.trig_in) While = hive.hive("While", build_while)
import hive from json import dumps def build_dumps(i, ex, args): """Interface to JSON dumps function""" def do_dumps(self): self._result = dumps(self._object_) i.result = hive.attribute('str') i.object_ = hive.attribute() i.pull_result = hive.pull_out(i.result) ex.result = hive.output(i.pull_result) i.pull_object = hive.pull_in(i.object_) ex.object_ = hive.antenna(i.pull_object) i.do_dumps = hive.modifier(do_dumps) hive.trigger(i.pull_result, i.pull_object, pretrigger=True) hive.trigger(i.pull_object, i.do_dumps) Dumps = hive.hive("Dumps", build_dumps)
except: invokedUsername = '' if folderID != '': count = 1 max_count = int(addon.getSetting(PLUGIN_NAME+'_numaccounts')) loop = True while loop: instanceName = PLUGIN_NAME+str(count) try: username = addon.getSetting(instanceName+'_username') if username == invokedUsername: #let's log in service = hive.hive(PLUGIN_URL,addon,instanceName, user_agent) loop = False except: break if count == max_count: #fallback on first defined account service = hive.hive(PLUGIN_URL,addon,PLUGIN_NAME+'1', user_agent) break count = count + 1 service.buildSTRM(path + '/'+title,folderID) elif filename != '':
import hive def decompose_modifier(self): self._vector.pull() self._x = self._vector[0] self._y = self._vector[1] self._z = self._vector[2] def build_decompose(i, ex, args): """Decompose a vector into its x, y and z components""" i.refresh = hive.modifier(decompose_modifier) for name in ['x', 'y', 'z']: attr = hive.variable("float") setattr(i, name, attr) pull_out = hive.pull_out(attr) setattr(ex, name, hive.output(pull_out)) hive.trigger(pull_out, i.refresh, pretrigger=True) i.vector = hive.variable("vector") i.pull_vector = hive.pull_in(i.vector) ex.vector = hive.antenna(i.pull_vector) Decompose = hive.hive("Decompose", build_decompose)
h.connect(i.call, i.woof) i.woof2 = h.modifier(woof2) i.bark = h.triggerfunc() h.trigger(i.bark, i.woof) i.woofed = h.triggerfunc() ex.woofs = h.property(cls, "woofs") ex.name = h.property(cls, "name") ex.woofs2 = h.variable(data_type="int", start_value=0) ex.woof = h.entry(i.woof) ex.woofed = h.hook(i.woofed) ex.bark = h.hook(i.bark) ex.call = h.hook(i.call) dog = h.hive("dog", build_dog, Dog) spot = dog("Spot") spike = dog("Spike") print(3) print(spot.name) #=> Spot spot.call() #=> CALL Spot WOOF Spot 1 h.connect(spot.call, spot._woof2) spot.call() #=> CALL Spot WOOF Spot 2 WOOF2 Spot 1 print("SPOT WOOFS", spot.woofs, spot.woofs2) #=> SPOT WOOFS 2 1 print(4) spot.bark() #=> WOOF Spot 3 print(5) spike.call() #=> CALL Spike WOOF Spike 1 spike.call() #=> CALL Spike WOOF Spike 2
def build_collision(cls, i, ex, args): """Interface to collision events for bound hive""" i.hit_entity = hive.property(cls, "hit_entity_id", "int.entity_id") i.hit_position = hive.property(cls, "hit_position", "vector") i.hit_normal = hive.property(cls, "hit_normal", "vector") i.hit_impulse = hive.property(cls, "hit_impulse", "vector") i.pull_hit_entity = hive.pull_out(i.hit_entity) i.pull_hit_position = hive.pull_out(i.hit_position) i.pull_hit_normal = hive.pull_out(i.hit_normal) i.pull_hit_impulse = hive.pull_out(i.hit_impulse) ex.hit_entity = hive.output(i.pull_hit_entity) ex.hit_position = hive.output(i.pull_hit_position) ex.hit_normal = hive.output(i.pull_hit_normal) ex.hit_impulse = hive.output(i.pull_hit_impulse) i.on_collided = hive.triggerfunc() ex.on_collided = hive.hook(i.on_collided) ex.get_get_entity_id = hive.socket(cls.set_get_entity_id, "entity.get_bound") ex.get_add_handler = hive.socket(cls.set_add_handler, "event.add_handler") Collision = hive.hive("Collision", build_collision, builder_cls=_CollisionClass)
def main(zone_area=8.85, zone_ratio=1.179, zone_height=2.5, azimuth=90, absorptance=.5, wall_u=4.083, wall_ct=165.6, ground=0, roof=1, shading=[0, 0.5, 0, 0], living_room=False, exp=[1, 1, 0, 0], wwr=[0, 0.219, 0, 0], open_fac=[0, 0.45, 0, 0], glass_fs=.87, equipment=0, lights=5, bldg_ratio=0.85, floor_height=0, door=True, bound='hive', input_file='seed.json', output='teste_model.epJSON', construction="", convert=False): ## Main function that creates the epJSON model. ## INPUTS: ## zone_area - The area of the "APP" in square meters. ## zone_ratio - Ratio between y length (walls 1 and 3), and xlength # (walls 0 and 2). Or: Ratio = zone_y/zone_x. ## zone_height - Distance from floor to ceiling in meters. ## azimuth - Angle from north. ## absorptance - The value of the absorptace of walls and roof. ## wall_u - The value of transmittance of the walls (concrete+eps # approach only). ## wall_ct - The value of thermal capacity of the walls (concrete+eps # approach only). ## ground - Condition of exposure: 0 = Adiabatic, 1 = Outdoors. ## roof - Condition of exposure: 0 = Adiabatic, 1 = Outdoors. ## shading - the length of horizontal shading in meters. ## living_room - Defines schedules for occupation: True = living # room occupation pattern, False = bedroom occupation pattern. ## exp - List with condition of exposure of walls: 0 = not exposed, # 1 = exposed (Outdoors). ## wwr - List with WWR of the walls. ## open_fac - List with the opening factors of windows. ## glass_fs - SHGC of the windows' glass. ## equipment - Equipment loads in Watts ## lights - Lights loads in Watts per square meters. ## bldg_ratio - The ratio of the reference building. ## floor_height - Distance from zone's floor to the ground in meters. ## door - Condition to create or not a door in the zone. ## bound - String that defines the boundary condition of internal # walls. May be "hive", "double", or "adiabatic". ## input_file - The name of the seed file. The seed files contains # the information that do not depend on the input variables. ## output - The name of the generated epJSON model. ## construction - The name of a json file with a construction object # called "wall_construction" and the materials objects. ## convert - Condition to generate a .idf model. energyplus has to # be an environment variable for it to work! print(output) #### Defining zone's x and y length -------------------------------- zone_x = (zone_area / zone_ratio)**(1 / 2) zone_y = (zone_area / zone_x) #### Defining Occupation condition --------------------------------- if living_room: occupation_sch = 'livingroom_occup' light_sch = 'livingroom_lights' activity_sch = 'livingroom_activity' number_of_people = 4 else: occupation_sch = 'bedroom_occup' light_sch = 'bedroom_lights' activity_sch = 'bedroom_activity' number_of_people = 2 #### START BUILDING OBJECTS ---------------------------------------- model = dict() ##### Building ----------------------------------------------------- model["Building"] = { output[:-7]: { "north_axis": azimuth, "loads_convergence_tolerance_value": 0.04, "maximum_number_of_warmup_days": 25, "solar_distribution": "FullInteriorAndExteriorWithReflections", "temperature_convergence_tolerance_value": 0.4, "idf_max_extensible_fields": 0, "idf_max_fields": 8, "terrain": "City" } } ##### ZONE --------------------------------------------------------- model["Zone"] = {"room": {"z_origin": floor_height}} ##### Building Surface --------------------------------------------- model["BuildingSurface:Detailed"] = { # Ceiling "ceiling": { "vertices": [{ "vertex_x_coordinate": 0.0, "vertex_y_coordinate": zone_y, "vertex_z_coordinate": zone_height }, { "vertex_x_coordinate": 0.0, "vertex_y_coordinate": 0.0, "vertex_z_coordinate": zone_height }, { "vertex_x_coordinate": zone_x, "vertex_y_coordinate": 0.0, "vertex_z_coordinate": zone_height }, { "vertex_x_coordinate": zone_x, "vertex_y_coordinate": zone_y, "vertex_z_coordinate": zone_height }] }, # Floor "floor": { "vertices": [{ "vertex_x_coordinate": zone_x, "vertex_y_coordinate": zone_y, "vertex_z_coordinate": 0.0 }, { "vertex_x_coordinate": zone_x, "vertex_y_coordinate": 0.0, "vertex_z_coordinate": 0.0 }, { "vertex_x_coordinate": 0.0, "vertex_y_coordinate": 0.0, "vertex_z_coordinate": 0.0 }, { "vertex_x_coordinate": 0.0, "vertex_y_coordinate": zone_y, "vertex_z_coordinate": 0.0 }] }, # Walls: 0 = up, 1 = right, 2 = down, 3 = left "wall-0": { "vertices": [{ "vertex_x_coordinate": zone_x, "vertex_y_coordinate": zone_y, "vertex_z_coordinate": zone_height }, { "vertex_x_coordinate": zone_x, "vertex_y_coordinate": zone_y, "vertex_z_coordinate": 0.0 }, { "vertex_x_coordinate": 0.0, "vertex_y_coordinate": zone_y, "vertex_z_coordinate": 0.0 }, { "vertex_x_coordinate": 0.0, "vertex_y_coordinate": zone_y, "vertex_z_coordinate": zone_height }] }, "wall-1": { "vertices": [{ "vertex_x_coordinate": zone_x, "vertex_y_coordinate": 0.0, "vertex_z_coordinate": zone_height }, { "vertex_x_coordinate": zone_x, "vertex_y_coordinate": 0.0, "vertex_z_coordinate": 0.0 }, { "vertex_x_coordinate": zone_x, "vertex_y_coordinate": zone_y, "vertex_z_coordinate": 0.0 }, { "vertex_x_coordinate": zone_x, "vertex_y_coordinate": zone_y, "vertex_z_coordinate": zone_height }] }, "wall-2": { "vertices": [{ "vertex_x_coordinate": 0.0, "vertex_y_coordinate": 0.0, "vertex_z_coordinate": zone_height }, { "vertex_x_coordinate": 0.0, "vertex_y_coordinate": 0.0, "vertex_z_coordinate": 0.0 }, { "vertex_x_coordinate": zone_x, "vertex_y_coordinate": 0.0, "vertex_z_coordinate": 0.0 }, { "vertex_x_coordinate": zone_x, "vertex_y_coordinate": 0.0, "vertex_z_coordinate": zone_height }] }, "wall-3": { "vertices": [{ "vertex_x_coordinate": 0.0, "vertex_y_coordinate": zone_y, "vertex_z_coordinate": zone_height }, { "vertex_x_coordinate": 0.0, "vertex_y_coordinate": zone_y, "vertex_z_coordinate": 0.0 }, { "vertex_x_coordinate": 0.0, "vertex_y_coordinate": 0.0, "vertex_z_coordinate": 0.0 }, { "vertex_x_coordinate": 0.0, "vertex_y_coordinate": 0.0, "vertex_z_coordinate": zone_height }] } } # Top Condition if roof == 0: ceiling_bound = { "outside_boundary_condition": "Adiabatic", "sun_exposure": "NoSun", "surface_type": "Ceiling", "wind_exposure": "NoWind" } else: ceiling_bound = { "outside_boundary_condition": "Outdoors", "sun_exposure": "SunExposed", "surface_type": "Roof", "wind_exposure": "WindExposed" } model["BuildingSurface:Detailed"]["ceiling"].update(ceiling_bound) # Bottom condition if ground == 0: ground_bound = { "outside_boundary_condition": "Adiabatic", "sun_exposure": "NoSun", "wind_exposure": "NoWind" } else: ground_bound = { "outside_boundary_condition": "Ground", "sun_exposure": "NoSun", "wind_exposure": "NoWind" } model["BuildingSurface:Detailed"]["floor"].update(ground_bound) # Wall exposition condition exposed_wall = { "outside_boundary_condition": "Outdoors", "sun_exposure": "SunExposed", "wind_exposure": "WindExposed" } adiabatic_wall = { "outside_boundary_condition": "Adiabatic", "sun_exposure": "NoSun", "wind_exposure": "NoWind" } hive_wall = { "outside_boundary_condition": "Surface", "sun_exposure": "NoSun", "wind_exposure": "NoWind" } if sum(exp) < 4: model['AirflowNetwork:MultiZone:Zone'] = {} model["AirflowNetwork:MultiZone:Surface:Crack"] = { 'crack': { "air_mass_flow_coefficient_at_reference_conditions": 0.01, "air_mass_flow_exponent": 0.667, "idf_max_extensible_fields": 0, "idf_max_fields": 4 } } hive_cracks = {} hive_externalnodes = {} for i in range(4): if exp[i] > 0: model["BuildingSurface:Detailed"]["wall-" + str(i)].update(exposed_wall) else: if bound == 'hive': model["BuildingSurface:Detailed"]["wall-" + str(i)].update(hive_wall) model["BuildingSurface:Detailed"]["wall-" + str( i)]["outside_boundary_condition_object"] = "hive_" + str( i) + "_wall-" + str((i + 2) % 4) model["Zone"][ "hive_" + str(i)], hive_afn, hive_surfaces, door_return = hive( i, zone_x, zone_y, zone_height, floor_height, ground, roof, door) afn_zone = hive_afn['zone'] cracks_return = hive_afn['cracks'] externalnodes_return = hive_afn['nodes'] model['AirflowNetwork:MultiZone:Zone'].update(afn_zone) model["BuildingSurface:Detailed"].update(hive_surfaces) hive_cracks.update(cracks_return) hive_externalnodes.update(externalnodes_return) if len(door_return) > 0: hive_door = door_return elif bound == 'double' or bound == 'doublewall': model["BuildingSurface:Detailed"]["wall-" + str(i)].update(exposed_wall) else: model["BuildingSurface:Detailed"]["wall-" + str(i)].update( adiabatic_wall) #### FENESTRATION -------------------------------------------------- model["FenestrationSurface:Detailed"] = {} for i in range(4): if wwr[i] > 0: window_z1 = zone_height * (1 - wwr[i]) * .5 window_z2 = window_z1 + (zone_height * wwr[i]) if i == 0: window_x1 = zone_x * .999 window_x2 = zone_x * .001 window_y1 = zone_y window_y2 = zone_y elif i == 1: window_x1 = zone_x window_x2 = zone_x window_y1 = zone_y * .001 window_y2 = zone_y * .999 elif i == 2: window_x1 = zone_x * .001 window_x2 = zone_x * .999 window_y1 = 0 window_y2 = 0 else: window_x1 = 0 window_x2 = 0 window_y1 = zone_y * .999 window_y2 = zone_y * .001 model["FenestrationSurface:Detailed"]["window_" + str(i)] = { "building_surface_name": "wall-" + str(i), "construction_name": "glass_construction", "number_of_vertices": 4.0, "surface_type": "Window", "vertex_1_x_coordinate": window_x1, "vertex_1_y_coordinate": window_y1, "vertex_1_z_coordinate": window_z2, "vertex_2_x_coordinate": window_x1, "vertex_2_y_coordinate": window_y1, "vertex_2_z_coordinate": window_z1, "vertex_3_x_coordinate": window_x2, "vertex_3_y_coordinate": window_y2, "vertex_3_z_coordinate": window_z1, "vertex_4_x_coordinate": window_x2, "vertex_4_y_coordinate": window_y2, "vertex_4_z_coordinate": window_z2 } for obj in model['FenestrationSurface:Detailed']: model['FenestrationSurface:Detailed'][obj].update({ "idf_max_extensible_fields": 0, "idf_max_fields": 22 }) #### SHADING ------------------------------------------------------- z_shading = floor_height + zone_height # checks if there is shading in model for shade in shading: if shade > 0: model['Shading:Building:Detailed'] = {} if shading[0] > 0.01: model['Shading:Building:Detailed']['shading_0'] = { "idf_max_extensible_fields": 12, "idf_max_fields": 15, 'transmittance_schedule_name': '', 'number_of_vertices': 4, "vertices": [{ "vertex_x_coordinate": 0, "vertex_y_coordinate": zone_y + shading[0], "vertex_z_coordinate": z_shading }, { "vertex_x_coordinate": 0, "vertex_y_coordinate": zone_y, "vertex_z_coordinate": z_shading }, { "vertex_x_coordinate": zone_x, "vertex_y_coordinate": zone_y, "vertex_z_coordinate": z_shading }, { "vertex_x_coordinate": zone_x, "vertex_y_coordinate": zone_y + shading[0], "vertex_z_coordinate": z_shading }] } if shading[1] > 0.01: model['Shading:Building:Detailed']['shading_1'] = { "idf_max_extensible_fields": 12, "idf_max_fields": 15, 'transmittance_schedule_name': '', 'number_of_vertices': 4, "vertices": [{ "vertex_x_coordinate": zone_x + shading[1], "vertex_y_coordinate": zone_y, "vertex_z_coordinate": z_shading }, { "vertex_x_coordinate": zone_x, "vertex_y_coordinate": zone_y, "vertex_z_coordinate": z_shading }, { "vertex_x_coordinate": zone_x, "vertex_y_coordinate": 0, "vertex_z_coordinate": z_shading }, { "vertex_x_coordinate": zone_x + shading[1], "vertex_y_coordinate": 0, "vertex_z_coordinate": z_shading }] } if shading[2] > 0.01: model['Shading:Building:Detailed']['shading_2'] = { "idf_max_extensible_fields": 12, "idf_max_fields": 15, 'transmittance_schedule_name': '', 'number_of_vertices': 4, "vertices": [{ "vertex_x_coordinate": zone_x, "vertex_y_coordinate": -shading[2], "vertex_z_coordinate": z_shading }, { "vertex_x_coordinate": zone_x, "vertex_y_coordinate": 0, "vertex_z_coordinate": z_shading }, { "vertex_x_coordinate": 0, "vertex_y_coordinate": 0, "vertex_z_coordinate": z_shading }, { "vertex_x_coordinate": 0, "vertex_y_coordinate": -shading[2], "vertex_z_coordinate": z_shading }] } if shading[3] > 0.01: model['Shading:Building:Detailed']['shading_3'] = { "idf_max_extensible_fields": 12, "idf_max_fields": 15, 'transmittance_schedule_name': '', 'number_of_vertices': 4, "vertices": [{ "vertex_x_coordinate": -shading[3], "vertex_y_coordinate": 0, "vertex_z_coordinate": z_shading }, { "vertex_x_coordinate": 0, "vertex_y_coordinate": 0, "vertex_z_coordinate": z_shading }, { "vertex_x_coordinate": 0, "vertex_y_coordinate": zone_y, "vertex_z_coordinate": z_shading, }, { "vertex_x_coordinate": -shading[3], "vertex_y_coordinate": zone_y, "vertex_z_coordinate": z_shading }] } #### THERMAL LOADS ------------------------------------------------- if living_room: model["ElectricEquipment"] = { "equipment_loads": { "design_level": equipment, "design_level_calculation_method": "EquipmentLevel", "end_use_subcategory": "General", "fraction_latent": 0, "fraction_lost": 0, "fraction_radiant": 0.3, "idf_max_extensible_fields": 0, "idf_max_fields": 11, "schedule_name": "livingroom_equipment", "zone_or_zonelist_name": "room" } } model["Lights"] = { "lights": { "watts_per_zone_floor_area": lights, "schedule_name": light_sch } } model["People"] = { "people": { "number_of_people": number_of_people, "number_of_people_schedule_name": occupation_sch, "activity_level_schedule_name": activity_sch } } #### MATERIALS ----------------------------------------------------- model["WindowMaterial:SimpleGlazingSystem"] = { "glass_material": { "solar_heat_gain_coefficient": glass_fs } } #### AFN OBJECTS --------------------------------------------------- # AFN Simulation Control if bldg_ratio <= 1: # x/y wind_azimuth = azimuth % 180 else: bldg_ratio = 1 / bldg_ratio wind_azimuth = (azimuth + 90) % 180 model["AirflowNetwork:SimulationControl"] = { "Ventilacao": { "azimuth_angle_of_long_axis_of_building": wind_azimuth, "ratio_of_building_width_along_short_axis_to_width_along_long_axis": bldg_ratio } } # AFN Surface if bound == 'hive': model["AirflowNetwork:MultiZone:ExternalNode"] = hive_externalnodes model["AirflowNetwork:MultiZone:Surface"] = hive_cracks else: model["AirflowNetwork:MultiZone:ExternalNode"] = {} model["AirflowNetwork:MultiZone:Surface"] = {} for i in range(4): if wwr[i] > 0: model["AirflowNetwork:MultiZone:Surface"][ "AirflowNetwork:MultiZone:Surface " + str(i)] = { "external_node_name": "window_" + str(i) + "_Node", "indoor_and_outdoor_enthalpy_difference_upper_limit_for_minimum_venting_open_factor": 300000.0, "indoor_and_outdoor_temperature_difference_upper_limit_for_minimum_venting_open_factor": 100.0, "leakage_component_name": "detailed_window", "surface_name": "window_" + str(i), "ventilation_control_mode": "Temperature", "ventilation_control_zone_temperature_setpoint_schedule_name": "Temp_setpoint", "venting_availability_schedule_name": "VN", # occupation_sch, "window_door_opening_factor_or_crack_factor": open_fac[i] } model["AirflowNetwork:MultiZone:ExternalNode"][ "window_" + str(i) + "_Node"] = { "idf_max_extensible_fields": 0, "idf_max_fields": 5, "symmetric_wind_pressure_coefficient_curve": "No", "wind_angle_type": "Absolute", "wind_pressure_coefficient_curve_name": "side_" + str(i) + "_coef" } for obj in model["AirflowNetwork:MultiZone:Surface"]: model["AirflowNetwork:MultiZone:Surface"][obj].update({ "idf_max_extensible_fields": 0, "idf_max_fields": 12 }) window_areas = [] for i in range(4): if i % 2 == 0: window_areas.append(wwr[i] * open_fac[i] * zone_x) else: window_areas.append(wwr[i] * open_fac[i] * zone_y) if door: with open(SEED_DOOR_FILE, 'r') as file: seed_door = json.loads(file.read()) model["FenestrationSurface:Detailed"]["door"] = seed_door["door"] model["AirflowNetwork:MultiZone:Surface"][ "AirflowNetwork:MultiZone:Surface 5"] = seed_door[ "AirflowNetwork:MultiZone:Surface 5"] if bound == 'hive': model[ "AirflowNetwork:MultiZone:WindPressureCoefficientValues"] = cp_calc( bldg_ratio, azimuth=azimuth, window_areas=window_areas, cp_eq=False) model["FenestrationSurface:Detailed"].update(hive_door) else: model[ "AirflowNetwork:MultiZone:WindPressureCoefficientValues"] = cp_calc( bldg_ratio, azimuth=azimuth, window_areas=window_areas, cp_eq=True) model["AirflowNetwork:MultiZone:Surface"][ "AirflowNetwork:MultiZone:Surface 5"][ "external_node_name"] = "door_Node" model["AirflowNetwork:MultiZone:ExternalNode"][ "door_Node"] = seed_door["door_Node"] else: model[ "AirflowNetwork:MultiZone:WindPressureCoefficientValues"] = cp_calc( bldg_ratio, azimuth=azimuth, window_areas=window_areas, cp_eq=False) #### DEFINING CONSTRUCTION AND MATERIALS --------------------------- if len(construction) > 0: with open(construction, 'r') as file: construction_wall = json.loads(file.read()) update(model, construction_wall) else: construction_wall = concrete_wall(wall_u, wall_ct, absorptance) update(model, construction_wall) #### EMS PROGRAM --------------------------------------------------- model["EnergyManagementSystem:Program"] = {} with open(EMS_PROGRAM_FILE, 'r') as file: ems_program = json.loads(file.read()) if living_room: model["EnergyManagementSystem:Program"]["ems_program"] = ems_program[ "living_room"] else: model["EnergyManagementSystem:Program"]["ems_program"] = ems_program[ "bedroom"] #### BRING SEED TO MODEL ------------------------------------------- with open(input_file, 'r') as file: seed = json.loads(file.read()) update(model, seed) with open(output, 'w') as file: file.write(json.dumps(model)) #### CONVERT TO IDF ------------------------------------------------ if convert: os.system('energyplus -x -c ' + output) if os.name == 'posix': os.system('rm eplusout*') os.system('rm sqlite.err') else: os.system('del eplusout*') os.system('del sqlite.err')
def configure(self, env): import params env.set_params(params) hive(name = 'metastore')
import hive def build_print(i, ex, args): """Output object to Python stdout""" ex.value = hive.variable() i.value_in = hive.push_in(ex.value) ex.value_in = hive.antenna(i.value_in) i.func = hive.modifier(lambda self: print(self.value)) hive.trigger(i.value_in, i.func) Print = hive.hive("Print", build_print)
""" body = generator_string.replace("\n", "\n ") declaration_string = declaration.format(body) exec(declaration_string, locals(), globals()) return generator def on_new_generator(self): generator_func = create_generator_func(self.generator_body) self.generator = generator_func() def build_generator(i, ex, args): """Define and instantiate a new generator when pulled""" args.generator_body = hive.parameter(("str", "code")) ex.generator = hive.variable() ex.generator_body = hive.variable(("str", "code"), args.generator_body) i.create_generator = hive.modifier(on_new_generator) i.generator_out = hive.pull_out(ex.generator) ex.generator_out = hive.output(i.generator_out) hive.trigger(i.generator_out, i.create_generator, pretrigger=True) Generator = hive.hive("Generator", build_generator)
import hive def compose_modifier(self): self._result = (self._x, self._y, self._z) def build_compose(i, ex, args): """Compose a euler from its x, y and z components""" i.compose_vector = hive.modifier(compose_modifier) i.result = hive.attribute("euler") i.pull_result = hive.pull_out(i.result) for name in ('x', 'y', 'z'): attr = hive.attribute("float") setattr(i, name, attr) pull_in = hive.pull_in(attr) setattr(ex, "{}".format(name), hive.antenna(pull_in)) hive.trigger(i.pull_result, pull_in, pretrigger=True) hive.trigger(i.pull_result, i.compose_vector, pretrigger=True) ex.result = hive.output(i.pull_result) Compose = hive.hive("Compose", build_compose)
import hive def build_decode(i, ex, args): """Decode bytes into a string""" args.encoding = hive.parameter('str', 'utf-8') ex.encoding = hive.variable('str', args.encoding) i.string = hive.variable("str") i.pull_string = hive.pull_out(i.string) ex.string = hive.output(i.pull_string) i.bytes_ = hive.variable('bytes') i.pull_bytes_ = hive.pull_in(i.bytes_) ex.bytes_ = hive.antenna(i.pull_bytes_) def do_encoding(self): self._string = self._bytes_.decode(self.encoding) i.do_encoding = hive.modifier(do_encoding) hive.trigger(i.pull_string, i.pull_bytes_, pretrigger=True) hive.trigger(i.pull_bytes_, i.do_encoding) Decode = hive.hive("Decode", build_decode)
import hive from json import loads def build_loads(i, ex, args): """Interface to JSON loads function""" def do_loads(self): self._result = loads(self._object_) i.result = hive.attribute('str') i.object_ = hive.attribute() i.pull_result = hive.pull_out(i.result) ex.result = hive.output(i.pull_result) i.pull_object = hive.pull_in(i.object_) ex.object_ = hive.antenna(i.pull_object) i.do_loads = hive.modifier(do_loads) hive.trigger(i.pull_result, i.pull_object, pretrigger=True) hive.trigger(i.pull_object, i.do_loads) Loads = hive.hive("Loads", build_loads)
def build_and(i, ex, args): ex.a_value = hive.attribute(("bool", ), False) ex.b_value = hive.attribute(("bool", ), False) i.a = hive.pull_in(ex.a_value) i.b = hive.pull_in(ex.b_value) ex.a = hive.antenna(i.a) ex.b = hive.antenna(i.b) def on_and(h): h._pull_inputs() if h.a_value and h.b_value: h.trig_out() i.trig_out = hive.triggerfunc() i.trig_in = hive.modifier(on_and) # Update attributes before calling modifier i.pull_inputs = hive.triggerfunc() hive.trigger(i.pull_inputs, i.a, pretrigger=True) hive.trigger(i.pull_inputs, i.b, pretrigger=True) ex.trig_out = hive.hook(i.trig_out) ex.trig_in = hive.entry(i.trig_in) AND = hive.hive("AND", build_and)
import hive from importlib import import_module def do_import_from_path(self): self._module = import_module(self._import_path) def build_import(i, ex, args): """Interface to python import mechanism""" i.do_import = hive.modifier(do_import_from_path) i.import_path = hive.variable("str") i.pull_import_path = hive.pull_in(i.import_path) ex.import_path = hive.antenna(i.pull_import_path) i.module = hive.variable("module") i.pull_module = hive.pull_out(i.module) ex.module = hive.output(i.pull_module) hive.trigger(i.pull_module, i.pull_import_path, pretrigger=True) hive.trigger(i.pull_module, i.do_import, pretrigger=True) Import = hive.hive("Import", build_import)
import hive def build_cycle(i, ex, args): """Emit trigger to trig_out when N triggers to trig_in are received, where N = period_in""" ex.period = hive.attribute("int", 0) ex.counter = hive.attribute("int", 0) i.period_in = hive.pull_in(ex.period) i.counter_out = hive.pull_out(ex.counter) ex.index = hive.output(i.counter_out) ex.period_in = hive.antenna(i.period_in) ex.trig_in = hive.entry(i.period_in) def cycle(self): self.counter += 1 if self.counter >= self.period: self.counter -= self.period self._output() i.trigger = hive.modifier(cycle) hive.trigger(i.period_in, i.trigger) i.output = hive.triggerfunc() ex.trig_out = hive.hook(i.output) Cycle = hive.hive("Cycle", build_cycle)
def install(self, env): import params env.set_params(params) self.install_packages(env,params.exclude_packages) hive(action='config')
def build_set(i, ex, args): """Perform set operation on two sets""" i.a = hive.variable('set') i.pull_a = hive.pull_in(i.a) ex.a = hive.antenna(i.pull_a) i.b = hive.variable('set') i.pull_b = hive.pull_in(i.b) ex.b = hive.antenna(i.pull_b) i.result = hive.variable('set') for op_name, op in SET_SET_OPERATIONS.items(): pull_op = hive.pull_out(i.result) setattr(i, "pull_{}".format(op_name), pull_op) setattr(ex, op_name, hive.output(pull_op)) def do_operation(self): self._result = op(self._a, self._b) mod = hive.modifier(do_operation) setattr(i, "do_{}".format(op_name), mod) hive.trigger(pull_op, i.pull_a, pretrigger=True) hive.trigger(pull_op, mod, pretrigger=True) hive.trigger(i.pull_a, i.pull_b) FrozenSet = hive.hive("FrozenSet", build_set)
import hive def do_count_up(self): self.count += 1 self.count_out.push() def do_count_down(self): self.count -= 1 self.count_out.push() def build_count(i, ex, args): """Simple integer counter""" args.start_value = hive.parameter("int", 0) ex.count = hive.attribute("int", args.start_value) i.do_count_up = hive.modifier(do_count_up) ex.increment = hive.entry(i.do_count_up) i.do_count_down = hive.modifier(do_count_down) ex.decrement = hive.entry(i.do_count_down) i.count_out = hive.push_out(ex.count) ex.count_out = hive.output(i.count_out) Count = hive.hive("Count", build_count)
def configure(self, env): import params env.set_params(params) hive(name='hiveserver2')
from math import sqrt import hive def length_modifier(self): vector = self._vector self._result = sqrt(vector[0] ** 2 + vector[1] ** 2 + vector[2] ** 2) def build_determinant(i, ex, args): """Calculate the determinant (length) of a vector""" i.vector = hive.variable("vector") i.pull_vector = hive.pull_in(i.vector) ex.vector = hive.antenna(i.pull_vector) i.result = hive.variable("float") i.pull_result = hive.pull_out(i.result) ex.result = hive.output(i.pull_result) i.calculate = hive.modifier(length_modifier) hive.trigger(i.pull_result, i.calculate, pretrigger=True) Determinant = hive.hive("Determinant", build_determinant)
def add_on_stopped(self, on_stopped): self._on_stopped.append(on_stopped) def start(self): for callback in self._on_started: callback() def stop(self): for callback in self._on_stopped: callback() def build_process(cls, i, ex, args): # Startup / End callback ex.get_on_started = hive.socket(cls.add_on_started, identifier="on_started", policy=hive.MultipleOptional) ex.get_on_stopped = hive.socket(cls.add_on_stopped, identifier="on_stopped", policy=hive.MultipleOptional) i.on_started = hive.triggerable(cls.start) i.on_stopped = hive.triggerable(cls.stop) ex.on_started = hive.entry(i.on_started) ex.on_stopped = hive.entry(i.on_stopped) Process = hive.hive("Process", build_process, builder_cls=ProcessClass)
def configure(self, env): import params env.set_params(params) hive(action='config',service='hive-server2')